diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 6c5d559a8a..e8f632af23 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,6 @@ Make sure that: --> - [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc). -- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO). - [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes. - [ ] You submit test cases (unit or integration tests) that back your changes. - [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only). diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml new file mode 100644 index 0000000000..a5f764579a --- /dev/null +++ b/.github/workflows/project.yml @@ -0,0 +1,40 @@ +# GitHub Actions to automate GitHub issues for Spring Data Project Management + +name: Spring Data GitHub Issues + +on: + issues: + types: [opened, edited, reopened] + issue_comment: + types: [created] + pull_request_target: + types: [opened, edited, reopened] + +jobs: + Inbox: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null && !contains(join(github.event.issue.labels.*.name, ', '), 'dependency-upgrade') && !contains(github.event.issue.title, 'Release ') + steps: + - name: Create or Update Issue Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Pull-Request: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null + steps: + - name: Create or Update Pull Request Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Feedback-Provided: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback') + steps: + - name: Update Project Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} diff --git a/.gitignore b/.gitignore index be372b6209..27b7a78896 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,9 @@ src/ant/.ant-targets-upload-dist.xml atlassian-ide-plugin.xml /.gradle/ /.idea/ -*.graphml \ No newline at end of file +*.graphml +build/ +node_modules +node +package-lock.json +.mvn/.develocity diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 0000000000..e0857eaa25 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,8 @@ + + + + io.spring.develocity.conventions + develocity-conventions-maven-extension + 0.0.22 + + diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..e27f6e8f5e --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,14 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED +--add-opens=java.base/java.util=ALL-UNNAMED +--add-opens=java.base/java.lang.reflect=ALL-UNNAMED +--add-opens=java.base/java.text=ALL-UNNAMED +--add-opens=java.desktop/java.awt.font=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100755 index 0000000000..01e6799737 Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100755 index 0000000000..5f3193b363 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,2 @@ +#Thu Nov 07 09:47:19 CET 2024 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 5b8e003615..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,46 +0,0 @@ -language: java - -jdk: - - oraclejdk8 - -before_script: - - mongod --version - - |- - echo "replication: - replSetName: rs0" | sudo tee -a /etc/mongod.conf - - sudo service mongod restart - - sleep 20 - - |- - mongo --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});" - - sleep 15 - -services: - - mongodb - -env: - matrix: - - PROFILE=ci - - PROFILE=mongo36-next - -# Current MongoDB version is 2.4.2 as of 2016-04, see https://github.com/travis-ci/travis-ci/issues/3694 -# apt-get starts a MongoDB instance so it's not started using before_script -addons: - apt: - sources: - - mongodb-3.4-precise - packages: - - mongodb-org-server - - mongodb-org-shell - - oracle-java8-installer - -sudo: false - -cache: - directories: - - $HOME/.m2 - -install: - - |- - mongo admin --eval "db.adminCommand({setFeatureCompatibilityVersion: '3.4'});" - -script: "mvn clean dependency:list test -P${PROFILE} -Dsort" diff --git a/CI.adoc b/CI.adoc new file mode 100644 index 0000000000..057100a955 --- /dev/null +++ b/CI.adoc @@ -0,0 +1,43 @@ += Continuous Integration + +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] + +== Running CI tasks locally + +Since this pipeline is purely Docker-based, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your test routine before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what the CI server does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +2. `cd spring-data-mongodb-github` ++ +Next, run the tests from inside the container: ++ +3. `./mvnw clean dependency:list test -Dsort -Dbundlor.enabled=false -B` (or with whatever profile you need to test out) + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to package things up, do this: + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +2. `cd spring-data-mongodb-github` ++ +Next, package things from inside the container doing this: ++ +3. `./mvnw clean dependency:list package -Dsort -Dbundlor.enabled=false -B` + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc deleted file mode 100644 index f64fb1b7a5..0000000000 --- a/CODE_OF_CONDUCT.adoc +++ /dev/null @@ -1,27 +0,0 @@ -= Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io. -All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. -Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident. - -This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/]. \ No newline at end of file diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc index f007591467..740e8bd0bb 100644 --- a/CONTRIBUTING.adoc +++ b/CONTRIBUTING.adoc @@ -1,3 +1,3 @@ = Spring Data contribution guidelines -You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here]. +You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..0e83b47e2f --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,216 @@ +def p = [:] +node { + checkout scm + p = readProperties interpolate: true, file: 'ci/pipeline.properties' +} + +pipeline { + agent none + + triggers { + pollSCM 'H/10 * * * *' + upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS) + } + + options { + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '14')) + } + + stages { + stage("Docker images") { + parallel { + stage('Publish JDK (Java 17) + MongoDB 6.0') { + when { + anyOf { + changeset "ci/openjdk17-mongodb-6.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk17-mongodb-6.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + stage('Publish JDK (Java 17) + MongoDB 7.0') { + when { + anyOf { + changeset "ci/openjdk17-mongodb-7.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.7.0.version']} ci/openjdk17-mongodb-7.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + stage('Publish JDK (Java.next) + MongoDB 8.0') { + when { + anyOf { + changeset "ci/openjdk17-mongodb-8.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.8.0.version']} ci/openjdk23-mongodb-8.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + } + } + + stage("test: baseline (main)") { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + + stage("Test other configurations") { + when { + beforeAgent(true) + allOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + parallel { + stage("test: MongoDB 7.0 (main)") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-7.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + + stage("test: MongoDB 8.0") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + } + } + + stage('Release to artifactory') { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 20, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Pci,artifactory " + + "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " + + "-Dartifactory.server=${p['artifactory.url']} " + + "-Dartifactory.username=${ARTIFACTORY_USR} " + + "-Dartifactory.password=${ARTIFACTORY_PSW} " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + + "-Dartifactory.build-name=spring-data-mongodb " + + "-Dartifactory.build-number=spring-data-mongodb-${BRANCH_NAME}-build-${BUILD_NUMBER} " + + "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb " + + "-Dmaven.test.skip=true clean deploy -U -B" + } + } + } + } + } + } + + post { + changed { + script { + emailext( + subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", + mimeType: 'text/html', + recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']], + body: "${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}") + } + } + } +} diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..ff77379631 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000000..61b956fbfc --- /dev/null +++ b/README.adoc @@ -0,0 +1,231 @@ +image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] + += Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring Data MongoDB"] + +The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. + +The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. +The Spring Data MongoDB project provides integration with the MongoDB document database. +Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer. + +[[code-of-conduct]] +== Code of Conduct + +This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. + +[[getting-started]] +== Getting Started + +Here is a quick teaser of an application using Spring Data Repositories in Java: + +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + List findByLastname(String lastname); + + List findByFirstnameLike(String firstname); +} + +@Service +public class MyService { + + private final PersonRepository repository; + + public MyService(PersonRepository repository) { + this.repository = repository; + } + + public void doWork() { + + repository.deleteAll(); + + Person person = new Person(); + person.setFirstname("Oliver"); + person.setLastname("Gierke"); + repository.save(person); + + List lastNameResults = repository.findByLastname("Gierke"); + List firstNameResults = repository.findByFirstnameLike("Oli*"); + } +} + +@Configuration +@EnableMongoRepositories +class ApplicationConfig extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "springdata"; + } +} +---- + +[[maven-configuration]] +=== Maven configuration + +Add the Maven dependency: + +[source,xml] +---- + + org.springframework.data + spring-data-mongodb + ${version} + +---- + +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository +and declare the appropriate dependency version. + +[source,xml] +---- + + org.springframework.data + spring-data-mongodb + ${version}-SNAPSHOT + + + + spring-snapshot + Spring Snapshot Repository + https://repo.spring.io/snapshot + +---- + +[[upgrading]] +== Upgrading + +Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki]. +Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to. + +[[getting-help]] +== Getting Help + +Having trouble with Spring Data? We’d love to help! + +* Check the +https://docs.spring.io/spring-data/mongodb/reference/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs] +* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. +If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. +* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features. +* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`]. +* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues]. + +[[reporting-issues]] +== Reporting Issues + +Spring Data uses Github as issue tracking system to record bugs and feature requests. +If you want to raise an issue, please follow the recommendations below: + +* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem. +* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue]. +* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc. +* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++. + +[[guides]] +== Guides + +The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step: + +* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories. +* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories. + +[[examples]] +== Examples + +* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. + +[[building-from-source]] +== Building from Source + +You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io] +and accessible from Maven using the Maven configuration noted <>. + +NOTE: Configuration for Gradle is similar to Maven. + +The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io]. +Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link] +to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link] +to build a reactive one. + +However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper] +and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]). + +In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download] +and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution]. + +Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to +your MongoDB installation directory (e.g. `MONGODB_HOME`). + +To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set] +is required. + +To run the MongoDB server enter the following command from a command-line: + +[source,bash] +---- +$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0 +... +"msg":"Successfully connected to host" +---- + +Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_". + +Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set +the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`). + +You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started. +To initialize the replica set, start a mongo client: + +[source,bash] +---- +$ $MONGODB_HOME/bin/mongo +MongoDB server version: 6.0.0 +... +---- + +Then enter the following command: + +[source,bash] +---- +mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] }) +---- + +Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`. +In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation): + +[source,bash] +---- +$ ulimit -n 32768 +---- + +You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately. + +Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command: + +[source,bash] +---- + $ ./mvnw clean install +---- + +If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above]. + +_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign +the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ + +=== Building reference documentation + +Building the documentation builds also the project without running tests. + +[source,bash] +---- + $ ./mvnw clean install -Pantora +---- + +The generated documentation is available from `target/antora/site/index.html`. + +[[license]] +== License + +Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license]. diff --git a/README.md b/README.md deleted file mode 100644 index 1425c0827d..0000000000 --- a/README.md +++ /dev/null @@ -1,150 +0,0 @@ -[![Spring Data MongoDB](https://spring.io/badges/spring-data-mongodb/ga.svg)](http://projects.spring.io/spring-data-mongodb#quick-start) -[![Spring Data MongoDB](https://spring.io/badges/spring-data-mongodb/snapshot.svg)](http://projects.spring.io/spring-data-mongodb#quick-start) - -# Spring Data MongoDB - -The primary goal of the [Spring Data](http://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. - -The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer. - -## Getting Help - -For a comprehensive treatment of all the Spring Data MongoDB features, please refer to: - -* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/) -* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well. -* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources. -* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb). - -If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/). - - -## Quick Start - -### Maven configuration - -Add the Maven dependency: - -```xml - - org.springframework.data - spring-data-mongodb - ${version}.RELEASE - -``` - -If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. - -```xml - - org.springframework.data - spring-data-mongodb - ${version}.BUILD-SNAPSHOT - - - - spring-libs-snapshot - Spring Snapshot Repository - http://repo.spring.io/libs-snapshot - -``` - -### MongoTemplate - -MongoTemplate is the central support class for Mongo database operations. It provides: - -* Basic POJO mapping support to and from BSON -* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.) -* Connection affinity callback -* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions). - -### Spring Data repositories - -To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface. - -For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below: - -```java -public interface PersonRepository extends CrudRepository { - - List findByLastname(String lastname); - - List findByFirstnameLike(String firstname); -} -``` - -The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them. - -You can have Spring automatically create a proxy for the interface by using the following JavaConfig: - -```java -@Configuration -@EnableMongoRepositories -class ApplicationConfig extends AbstractMongoConfiguration { - - @Override - public MongoClient mongoClient() throws Exception { - return new MongoClient(); - } - - @Override - protected String getDatabaseName() { - return "springdata"; - } -} -``` - -This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML: - -```xml - - - - - - - - - - - -``` - -This will find the repository interface and register a proxy object in the container. You can use it as shown below: - -```java -@Service -public class MyService { - - private final PersonRepository repository; - - @Autowired - public MyService(PersonRepository repository) { - this.repository = repository; - } - - public void doWork() { - - repository.deleteAll(); - - Person person = new Person(); - person.setFirstname("Oliver"); - person.setLastname("Gierke"); - person = repository.save(person); - - List lastNameResults = repository.findByLastname("Gierke"); - List firstNameResults = repository.findByFirstnameLike("Oli*"); - } -} -``` - -## Contributing to Spring Data - -Here are some ways for you to get involved in the community: - -* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate. -* Create [JIRA](https://jira.spring.io/browse/DATAMONGO) tickets for bugs and new features and comment and vote on the ones that you are interested in. -* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing. -* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io. - -Before we accept a non-trivial patch or pull request we will need you to [sign the Contributor License Agreement](https://cla.pivotal.io/sign/spring). Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests. diff --git a/SECURITY.adoc b/SECURITY.adoc new file mode 100644 index 0000000000..9c518d999a --- /dev/null +++ b/SECURITY.adoc @@ -0,0 +1,9 @@ +# Security Policy + +## Supported Versions + +Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions. + +## Reporting a Vulnerability + +Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly. diff --git a/ci/README.adoc b/ci/README.adoc new file mode 100644 index 0000000000..f1c11d8496 --- /dev/null +++ b/ci/README.adoc @@ -0,0 +1,39 @@ +== Running CI tasks locally + +Since Concourse is built on top of Docker, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your `test.sh` script before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what Concourse does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +Next, run the `test.sh` script from inside the container: ++ +2. `PROFILE=none spring-data-mongodb-github/ci/test.sh` + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to test the `build.sh` script, do this: + +1. `mkdir /tmp/spring-data-mongodb-artifactory` +2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary +artifactory output directory at `spring-data-mongodb-artifactory`. ++ +Next, run the `build.sh` script from inside the container: ++ +3. `spring-data-mongodb-github/ci/build.sh` + +IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything. +It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts +and deliver them to artifactory. + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/ci/openjdk17-mongodb-6.0/Dockerfile b/ci/openjdk17-mongodb-6.0/Dockerfile new file mode 100644 index 0000000000..fd2580e23a --- /dev/null +++ b/ci/openjdk17-mongodb-6.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 6.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 6.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk17-mongodb-7.0/Dockerfile b/ci/openjdk17-mongodb-7.0/Dockerfile new file mode 100644 index 0000000000..5701ab9fbc --- /dev/null +++ b/ci/openjdk17-mongodb-7.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 6.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-7.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 7.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/openjdk23-mongodb-8.0/Dockerfile b/ci/openjdk23-mongodb-8.0/Dockerfile new file mode 100644 index 0000000000..0cb80001bf --- /dev/null +++ b/ci/openjdk23-mongodb-8.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 8.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-8.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 8.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu noble/mongodb-org/8.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-8.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/pipeline.properties b/ci/pipeline.properties new file mode 100644 index 0000000000..8dd2295acc --- /dev/null +++ b/ci/pipeline.properties @@ -0,0 +1,32 @@ +# Java versions +java.main.tag=17.0.15_6-jdk-focal +java.next.tag=24.0.1_9-jdk-noble + +# Docker container images - standard +docker.java.main.image=library/eclipse-temurin:${java.main.tag} +docker.java.next.image=library/eclipse-temurin:${java.next.tag} + +# Supported versions of MongoDB +docker.mongodb.6.0.version=6.0.23 +docker.mongodb.7.0.version=7.0.20 +docker.mongodb.8.0.version=8.0.9 + +# Supported versions of Redis +docker.redis.6.version=6.2.13 +docker.redis.7.version=7.2.4 +docker.valkey.8.version=8.1.1 + +# Docker environment settings +docker.java.inside.basic=-v $HOME:/tmp/jenkins-home +docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home + +# Credentials +docker.registry= +docker.credentials=hub.docker.com-springbuildmaster +docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com +docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token +artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local +develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/start-replica.sh b/ci/start-replica.sh new file mode 100755 index 0000000000..9124976f39 --- /dev/null +++ b/ci/start-replica.sh @@ -0,0 +1,6 @@ +#!/bin/sh +mkdir -p /tmp/mongodb/db /tmp/mongodb/log +mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log & +sleep 10 +mongosh --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});" +sleep 15 diff --git a/mvnw b/mvnw new file mode 100755 index 0000000000..8b9da3b8b6 --- /dev/null +++ b/mvnw @@ -0,0 +1,286 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + # TODO classpath? +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + wget "$jarUrl" -O "$wrapperJarPath" + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + curl -o "$wrapperJarPath" "$jarUrl" + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100755 index 0000000000..fef5a8f7f9 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,161 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven2 Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" +FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + echo Found %WRAPPER_JAR% +) else ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" + echo Finished downloading %WRAPPER_JAR% +) +@REM End of extension + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml index 6ccfbc2f39..962ae73ffe 100644 --- a/pom.xml +++ b/pom.xml @@ -1,35 +1,33 @@ - + 4.0.0 org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 4.5.0-SNAPSHOT pom Spring Data MongoDB MongoDB support for Spring Data - http://projects.spring.io/spring-data-mongodb + https://spring.io/projects/spring-data-mongodb org.springframework.data.build spring-data-parent - 2.1.0.BUILD-SNAPSHOT + 3.5.0-SNAPSHOT spring-data-mongodb - spring-data-mongodb-cross-store spring-data-mongodb-distribution multi spring-data-mongodb - 2.1.0.BUILD-SNAPSHOT - 3.6.3 - 1.7.1 + 3.5.0-SNAPSHOT + 5.5.0 1.19 @@ -39,7 +37,7 @@ Oliver Gierke ogierke at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Project Lead @@ -50,7 +48,7 @@ Thomas Risberg trisberg at vmware.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -61,7 +59,7 @@ Mark Pollack mpollack at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -72,7 +70,7 @@ Jon Brisbin jbrisbin at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -83,7 +81,7 @@ Thomas Darimont tdarimont at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -94,7 +92,7 @@ Christoph Strobl cstrobl at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -105,7 +103,7 @@ Mark Paluch mpaluch at pivotal.io Pivotal - http://www.pivotal.io + https://www.pivotal.io Developer @@ -113,91 +111,66 @@ - - - + + GitHub + https://github.com/spring-projects/spring-data-mongodb/issues + + - - mongo36-next - - 3.6.0-SNAPSHOT - - + jmh - mongo-snapshots - https://oss.sonatype.org/content/repositories/snapshots + jitpack.io + https://jitpack.io - - - release - - - - org.jfrog.buildinfo - artifactory-maven-plugin - false - - - - - - - benchmarks - - spring-data-mongodb - spring-data-mongodb-cross-store - spring-data-mongodb-distribution - spring-data-mongodb-benchmarks - + mongo-4.x + + 4.11.1 + 1.8.0 + - - - - org.mongodb - mongo-java-driver - ${mongo} - - + + + + + org.mongodb + mongodb-driver-bom + ${mongo} + pom + import + + + + - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-snapshot + https://repo.spring.io/snapshot + + true + + + false + + + + spring-milestone + https://repo.spring.io/milestone - - - spring-plugins-release - https://repo.spring.io/plugins-release - - - spring-libs-milestone - https://repo.spring.io/libs-milestone - - - - diff --git a/settings.xml b/settings.xml new file mode 100644 index 0000000000..b3227cc110 --- /dev/null +++ b/settings.xml @@ -0,0 +1,29 @@ + + + + + spring-plugins-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-snapshot + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-milestone + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + + \ No newline at end of file diff --git a/spring-data-mongodb-benchmarks/README.md b/spring-data-mongodb-benchmarks/README.md deleted file mode 100644 index e11925b7fd..0000000000 --- a/spring-data-mongodb-benchmarks/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# Benchmarks - -Benchmarks are based on [JMH](http://openjdk.java.net/projects/code-tools/jmh/). - -# Running Benchmarks - -Running benchmarks is disabled by default and can be activated via the `benchmarks` profile. -To run the benchmarks with default settings use. - -```bash -mvn -P benchmarks clean test -``` - -A basic report will be printed to the CLI. - -```bash -# Run complete. Total time: 00:00:15 - -Benchmark Mode Cnt Score Error Units -MappingMongoConverterBenchmark.readObject thrpt 10 1920157,631 ± 64310,809 ops/s -MappingMongoConverterBenchmark.writeObject thrpt 10 782732,857 ± 53804,130 ops/s -``` - -## Running all Benchmarks of a specific class - -To run all Benchmarks of a specific class, just provide its simple class name via the `benchmark` command line argument. - -```bash -mvn -P benchmarks clean test -D benchmark=MappingMongoConverterBenchmark -``` - -## Running a single Benchmark - -To run a single Benchmark provide its containing class simple name followed by `#` and the method name via the `benchmark` command line argument. - -```bash -mvn -P benchmarks clean test -D benchmark=MappingMongoConverterBenchmark#readObjectWith2Properties -``` - -# Saving Benchmark Results - -A detailed benchmark report is stored in JSON format in the `/target/reports/performance` directory. -To store the report in a different location use the `benchmarkReportDir` command line argument. - -## MongoDB - -Results can be directly piped to MongoDB by providing a valid [Connection String](https://docs.mongodb.com/manual/reference/connection-string/) via the `publishTo` command line argument. - -```bash -mvn -P benchmarks clean test -D publishTo=mongodb://127.0.0.1:27017 -``` - -NOTE: If the uri does not explicitly define a database the default `spring-data-mongodb-benchmarks` is used. - -## HTTP Endpoint - -The benchmark report can also be posted as `application/json` to an HTTP Endpoint by providing a valid URl via the `publishTo` command line argument. - -```bash -mvn -P benchmarks clean test -D publishTo=http://127.0.0.1:8080/capture-benchmarks -``` - -# Customizing Benchmarks - -Following options can be set via command line. - -Option | Default Value ---- | --- -warmupIterations | 10 -warmupTime | 1 (seconds) -measurementIterations | 10 -measurementTime | 1 (seconds) -forks | 1 -benchmarkReportDir | /target/reports/performance (always relative to project root dir) -benchmark | .* (single benchmark via `classname#benchmark`) -publishTo | \[not set\] (mongodb-uri or http-endpoint) \ No newline at end of file diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml deleted file mode 100644 index 9baccaa905..0000000000 --- a/spring-data-mongodb-benchmarks/pom.xml +++ /dev/null @@ -1,111 +0,0 @@ - - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-benchmarks - jar - - Spring Data MongoDB - Microbenchmarks - - - - true - - - - - - ${project.groupId} - spring-data-mongodb - ${project.version} - - - - junit - junit - ${junit} - compile - - - - org.openjdk.jmh - jmh-core - ${jmh.version} - - - - org.openjdk.jmh - jmh-generator-annprocess - ${jmh.version} - provided - - - - - - - - benchmarks - - false - - - - - - - - pl.project13.maven - git-commit-id-plugin - 2.2.2 - - - - revision - - - - - - maven-jar-plugin - - - default-jar - never - - - - - maven-surefire-plugin - - ${project.build.sourceDirectory} - ${project.build.outputDirectory} - - **/AbstractMicrobenchmark.java - **/*$*.class - **/generated/*.class - - - **/*Benchmark* - - - ${project.build.directory}/reports/performance - ${project.version} - ${git.dirty} - ${git.commit.id} - ${git.branch} - - - - - - diff --git a/spring-data-mongodb-benchmarks/src/main/resources/logback.xml b/spring-data-mongodb-benchmarks/src/main/resources/logback.xml deleted file mode 100644 index bccb2dc4fa..0000000000 --- a/spring-data-mongodb-benchmarks/src/main/resources/logback.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/aop.xml b/spring-data-mongodb-cross-store/aop.xml deleted file mode 100644 index d11b1549e8..0000000000 --- a/spring-data-mongodb-cross-store/aop.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml deleted file mode 100644 index 8ba393d38b..0000000000 --- a/spring-data-mongodb-cross-store/pom.xml +++ /dev/null @@ -1,147 +0,0 @@ - - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-cross-store - Spring Data MongoDB - Cross-Store Support - - - 2.1.1 - 5.2.1.Final - spring.data.mongodb.cross.store - - - - - - - org.springframework - spring-beans - - - commons-logging - commons-logging - - - - - org.springframework - spring-tx - - - org.springframework - spring-aspects - - - org.springframework - spring-orm - - - - - org.springframework.data - spring-data-mongodb - 2.1.0.BUILD-SNAPSHOT - - - - - io.projectreactor - reactor-core - true - - - - org.aspectj - aspectjrt - ${aspectj} - - - - - org.eclipse.persistence - javax.persistence - ${jpa} - true - - - - - org.hibernate - hibernate-entitymanager - ${hibernate} - test - - - hsqldb - hsqldb - 1.8.0.10 - test - - - javax.validation - validation-api - ${validation} - test - - - org.hibernate - hibernate-validator - 5.2.4.Final - test - - - - - - - - org.codehaus.mojo - aspectj-maven-plugin - 1.6 - - - org.aspectj - aspectjrt - ${aspectj} - - - org.aspectj - aspectjtools - ${aspectj} - - - - - - compile - test-compile - - - - - true - - - org.springframework - spring-aspects - - - ${source.level} - ${source.level} - ${source.level} - aop.xml - - - - - - diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java deleted file mode 100644 index 2b9e07a9fe..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import javax.persistence.EntityManagerFactory; - -import org.bson.Document; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.data.crossstore.ChangeSet; -import org.springframework.data.crossstore.ChangeSetBacked; -import org.springframework.data.crossstore.ChangeSetPersister; -import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.util.ClassUtils; - -import com.mongodb.MongoException; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.model.Filters; -import com.mongodb.client.result.DeleteResult; - -/** - * @author Thomas Risberg - * @author Oliver Gierke - * @author Alex Vengrovsk - * @author Mark Paluch - * @deprecated will be removed without replacement. - */ -@Deprecated -public class MongoChangeSetPersister implements ChangeSetPersister { - - private static final String ENTITY_CLASS = "_entity_class"; - private static final String ENTITY_ID = "_entity_id"; - private static final String ENTITY_FIELD_NAME = "_entity_field_name"; - private static final String ENTITY_FIELD_CLASS = "_entity_field_class"; - - private final Logger log = LoggerFactory.getLogger(getClass()); - - private MongoTemplate mongoTemplate; - private EntityManagerFactory entityManagerFactory; - - public void setMongoTemplate(MongoTemplate mongoTemplate) { - this.mongoTemplate = mongoTemplate; - } - - public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { - this.entityManagerFactory = entityManagerFactory; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet) - */ - public void getPersistentState(Class entityClass, Object id, final ChangeSet changeSet) - throws DataAccessException, NotFoundException { - - if (id == null) { - log.debug("Unable to load MongoDB data for null id"); - return; - } - - String collName = getCollectionNameForEntity(entityClass); - - final Document dbk = new Document(); - dbk.put(ENTITY_ID, id); - dbk.put(ENTITY_CLASS, entityClass.getName()); - if (log.isDebugEnabled()) { - log.debug("Loading MongoDB data for {}", dbk); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - for (Document dbo : collection.find(dbk)) { - String key = (String) dbo.get(ENTITY_FIELD_NAME); - if (log.isDebugEnabled()) { - log.debug("Processing key: {}", key); - } - if (!changeSet.getValues().containsKey(key)) { - String className = (String) dbo.get(ENTITY_FIELD_CLASS); - if (className == null) { - throw new DataIntegrityViolationException( - "Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available"); - } - Class clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader()); - Object value = mongoTemplate.getConverter().read(clazz, dbo); - if (log.isDebugEnabled()) { - log.debug("Adding to ChangeSet: {}", key); - } - changeSet.set(key, value); - } - } - return null; - } - }); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet) - */ - public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException { - if (log.isDebugEnabled()) { - log.debug("getPersistentId called on {}", entity); - } - if (entityManagerFactory == null) { - throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null"); - } - - return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet) - */ - public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException { - if (cs == null) { - log.debug("Flush: changeset was null, nothing to flush."); - return 0L; - } - - if (log.isDebugEnabled()) { - log.debug("Flush: changeset: {}", cs.getValues()); - } - - String collName = getCollectionNameForEntity(entity.getClass()); - if (mongoTemplate.getCollection(collName) == null) { - mongoTemplate.createCollection(collName); - } - - for (String key : cs.getValues().keySet()) { - if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) { - Object value = cs.getValues().get(key); - final Document dbQuery = new Document(); - dbQuery.put(ENTITY_ID, getPersistentId(entity, cs)); - dbQuery.put(ENTITY_CLASS, entity.getClass().getName()); - dbQuery.put(ENTITY_FIELD_NAME, key); - final Document dbId = mongoTemplate.execute(collName, new CollectionCallback() { - public Document doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { - Document id = collection.find(dbQuery).first(); - return id; - } - }); - - if (value == null) { - if (log.isDebugEnabled()) { - log.debug("Flush: removing: {}", dbQuery); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { - DeleteResult dr = collection.deleteMany(dbQuery); - return null; - } - }); - } else { - final Document dbDoc = new Document(); - dbDoc.putAll(dbQuery); - if (log.isDebugEnabled()) { - log.debug("Flush: saving: {}", dbQuery); - } - mongoTemplate.getConverter().write(value, dbDoc); - dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName()); - if (dbId != null) { - dbDoc.put("_id", dbId.get("_id")); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { - - if (dbId != null) { - collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc); - } else { - - if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) { - dbDoc.remove("_id"); - } - collection.insertOne(dbDoc); - } - return null; - } - }); - } - } - } - return 0L; - } - - /** - * Returns the collection the given entity type shall be persisted to. - * - * @param entityClass must not be {@literal null}. - * @return - */ - private String getCollectionNameForEntity(Class entityClass) { - return mongoTemplate.getCollectionName(entityClass); - } -} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj deleted file mode 100644 index a032194c4b..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Copyright 2011-2017 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import java.lang.reflect.Field; - -import javax.persistence.EntityManager; -import javax.persistence.Transient; -import javax.persistence.Entity; - -import org.aspectj.lang.JoinPoint; -import org.aspectj.lang.reflect.FieldSignature; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.crossstore.RelatedDocument; -import org.springframework.data.mongodb.crossstore.DocumentBacked; -import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization; -import org.springframework.data.crossstore.ChangeSet; -import org.springframework.data.crossstore.ChangeSetPersister; -import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException; -import org.springframework.data.crossstore.HashMapChangeSet; -import org.springframework.transaction.support.TransactionSynchronizationManager; - -/** - * Aspect to turn an object annotated with @Document into a persistent document using Mongo. - * - * @author Thomas Risberg - * @deprecated will be removed without replacement. - */ -@Deprecated -public aspect MongoDocumentBacking { - - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class); - - // Aspect shared config - private ChangeSetPersister changeSetPersister; - - public void setChangeSetPersister(ChangeSetPersister changeSetPersister) { - this.changeSetPersister = changeSetPersister; - } - - // ITD to introduce N state to Annotated objects - declare parents : (@Entity *) implements DocumentBacked; - - // The annotated fields that will be persisted in MongoDB rather than with JPA - declare @field: @RelatedDocument * (@Entity+ *).*:@Transient; - - // ------------------------------------------------------------------------- - // Advise user-defined constructors of ChangeSetBacked objects to create a new - // backing ChangeSet - // ------------------------------------------------------------------------- - pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) : - execution((DocumentBacked+).new(..)) && - !execution((DocumentBacked+).new(ChangeSet)) && - this(entity); - - pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) : - execution((DocumentBacked+).new(ChangeSet)) && - this(entity) && - args(cs); - - protected pointcut entityFieldGet(DocumentBacked entity) : - get(@RelatedDocument * DocumentBacked+.*) && - this(entity) && - !get(* DocumentBacked.*); - - protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) : - set(@RelatedDocument * DocumentBacked+.*) && - this(entity) && - args(newVal) && - !set(* DocumentBacked.*); - - // intercept EntityManager.merge calls - public pointcut entityManagerMerge(EntityManager em, Object entity) : - call(* EntityManager.merge(Object)) && - target(em) && - args(entity); - - // intercept EntityManager.remove calls - // public pointcut entityManagerRemove(EntityManager em, Object entity) : - // call(* EntityManager.remove(Object)) && - // target(em) && - // args(entity); - - // move changeSet from detached entity to the newly merged persistent object - Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) { - Object mergedEntity = proceed(em, entity); - if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) { - ((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet(); - } - return mergedEntity; - } - - // clear changeSet from removed entity - // Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) { - // if (entity instanceof DocumentBacked) { - // removeChangeSetValues((DocumentBacked)entity); - // } - // return proceed(em, entity); - // } - - private static void removeChangeSetValues(DocumentBacked entity) { - LOGGER.debug("Removing all change-set values for " + entity); - ChangeSet nulledCs = new HashMapChangeSet(); - DocumentBacked documentEntity = (DocumentBacked) entity; - @SuppressWarnings("unchecked") - ChangeSetPersister changeSetPersister = (ChangeSetPersister) documentEntity.itdChangeSetPersister; - try { - changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(), - documentEntity.getChangeSet()); - } catch (DataAccessException e) { - } catch (NotFoundException e) { - } - for (String key : entity.getChangeSet().getValues().keySet()) { - nulledCs.set(key, null); - } - entity.setChangeSet(nulledCs); - } - - before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) { - LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass()); - // Populate all ITD fields - entity.setChangeSet(new HashMapChangeSet()); - entity.itdChangeSetPersister = changeSetPersister; - entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity); - // registerTransactionSynchronization(entity); - } - - private static void registerTransactionSynchronization(DocumentBacked entity) { - if (TransactionSynchronizationManager.isSynchronizationActive()) { - if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Adding transaction synchronization for " + entity); - } - TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization); - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Transaction synchronization already active for " + entity); - } - } - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Transaction synchronization is not active for " + entity); - } - } - } - - // ------------------------------------------------------------------------- - // ChangeSet-related mixins - // ------------------------------------------------------------------------- - // Introduced field - @Transient - private ChangeSet DocumentBacked.changeSet; - - @Transient - private ChangeSetPersister DocumentBacked.itdChangeSetPersister; - - @Transient - private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization; - - public void DocumentBacked.setChangeSet(ChangeSet cs) { - this.changeSet = cs; - } - - public ChangeSet DocumentBacked.getChangeSet() { - return changeSet; - } - - // Flush the entity state to the persistent store - public void DocumentBacked.flush() { - Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet); - itdChangeSetPersister.persistState(this, this.changeSet); - } - - public Object DocumentBacked.get_persistent_id() { - return itdChangeSetPersister.getPersistentId(this, this.changeSet); - } - - // lifecycle methods - @javax.persistence.PostPersist - public void DocumentBacked.itdPostPersist() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName()); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PreUpdate - public void DocumentBacked.itdPreUpdate() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PostUpdate - public void DocumentBacked.itdPostUpdate() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PostRemove - public void DocumentBacked.itdPostRemove() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - removeChangeSetValues(this); - } - - @javax.persistence.PostLoad - public void DocumentBacked.itdPostLoad() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - /** - * delegates field reads to the state accessors instance - */ - Object around(DocumentBacked entity): entityFieldGet(entity) { - Field f = field(thisJoinPoint); - String propName = f.getName(); - LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet()); - if (entity.getChangeSet().getValues().get(propName) == null) { - try { - this.changeSetPersister - .getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet()); - } catch (NotFoundException e) { - } - } - Object fValue = entity.getChangeSet().getValues().get(propName); - if (fValue != null) { - return fValue; - } - return proceed(entity); - } - - /** - * delegates field writes to the state accessors instance - */ - Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) { - Field f = field(thisJoinPoint); - String propName = f.getName(); - LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]"); - entity.getChangeSet().set(propName, newVal); - return proceed(entity, newVal); - } - - Field field(JoinPoint joinPoint) { - FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature(); - return fieldSignature.getField(); - } -} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java deleted file mode 100644 index 7209091339..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java +++ /dev/null @@ -1,5 +0,0 @@ -/** - * Infrastructure for Spring Data's MongoDB cross store support. - */ -package org.springframework.data.mongodb.crossstore; - diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java deleted file mode 100644 index 25aad5c2fb..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; - -import org.bson.Document; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.crossstore.test.Address; -import org.springframework.data.mongodb.crossstore.test.Person; -import org.springframework.data.mongodb.crossstore.test.Resume; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; - -/** - * Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}). - * - * @author Thomas Risberg - * @author Oliver Gierke - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml") -public class CrossStoreMongoTests { - - @Autowired MongoTemplate mongoTemplate; - - @PersistenceContext EntityManager entityManager; - - @Autowired PlatformTransactionManager transactionManager; - TransactionTemplate txTemplate; - - @Before - public void setUp() { - - txTemplate = new TransactionTemplate(transactionManager); - - clearData(Person.class); - - Address address = new Address(12, "MAin St.", "Boston", "MA", "02101"); - - Resume resume = new Resume(); - resume.addEducation("Skanstulls High School, 1975"); - resume.addEducation("Univ. of Stockholm, 1980"); - resume.addJob("DiMark, DBA, 1990-2000"); - resume.addJob("VMware, Developer, 2007-"); - - final Person person = new Person("Thomas", 20); - person.setAddress(address); - person.setResume(resume); - person.setId(1L); - - txTemplate.execute(new TransactionCallback() { - public Void doInTransaction(TransactionStatus status) { - entityManager.persist(person); - return null; - } - }); - } - - @After - public void tearDown() { - txTemplate.execute(new TransactionCallback() { - public Void doInTransaction(TransactionStatus status) { - entityManager.remove(entityManager.find(Person.class, 1L)); - return null; - } - }); - } - - private void clearData(Class domainType) { - - String collectionName = mongoTemplate.getCollectionName(domainType); - mongoTemplate.dropCollection(collectionName); - } - - @Test - @Transactional - public void testReadJpaToMongoEntityRelationship() { - - Person found = entityManager.find(Person.class, 1L); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found.getResume()); - Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs()); - } - - @Test - @Transactional - public void testUpdatedJpaToMongoEntityRelationship() { - - Person found = entityManager.find(Person.class, 1L); - found.setAge(44); - found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006"); - - entityManager.merge(found); - - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found.getResume()); - Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; " - + "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs()); - } - - @Test - public void testMergeJpaEntityWithMongoDocument() { - - final Person detached = entityManager.find(Person.class, 1L); - entityManager.detach(detached); - detached.getResume().addJob("TargetRx, Developer, 2000-2005"); - - Person merged = txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - Person result = entityManager.merge(detached); - entityManager.flush(); - return result; - } - }); - - Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - final Person updated = entityManager.find(Person.class, 1L); - Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - } - - @Test - public void testRemoveJpaEntityWithMongoDocument() { - - txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - Person p2 = new Person("Thomas", 20); - Resume r2 = new Resume(); - r2.addEducation("Skanstulls High School, 1975"); - r2.addJob("DiMark, DBA, 1990-2000"); - p2.setResume(r2); - p2.setId(2L); - entityManager.persist(p2); - Person p3 = new Person("Thomas", 20); - Resume r3 = new Resume(); - r3.addEducation("Univ. of Stockholm, 1980"); - r3.addJob("VMware, Developer, 2007-"); - p3.setResume(r3); - p3.setId(3L); - entityManager.persist(p3); - return null; - } - }); - txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - final Person found2 = entityManager.find(Person.class, 2L); - entityManager.remove(found2); - return null; - } - }); - - boolean weFound3 = false; - - for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) { - Assert.assertTrue(!dbo.get("_entity_id").equals(2L)); - if (dbo.get("_entity_id").equals(3L)) { - weFound3 = true; - } - } - Assert.assertTrue(weFound3); - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java deleted file mode 100644 index b6db0fd8eb..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -public class Address { - - private Integer streetNumber; - private String streetName; - private String city; - private String state; - private String zip; - - public Address(Integer streetNumber, String streetName, String city, String state, String zip) { - super(); - this.streetNumber = streetNumber; - this.streetName = streetName; - this.city = city; - this.state = state; - this.zip = zip; - } - - public Integer getStreetNumber() { - return streetNumber; - } - - public void setStreetNumber(Integer streetNumber) { - this.streetNumber = streetNumber; - } - - public String getStreetName() { - return streetName; - } - - public void setStreetName(String streetName) { - this.streetName = streetName; - } - - public String getCity() { - return city; - } - - public void setCity(String city) { - this.city = city; - } - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public String getZip() { - return zip; - } - - public void setZip(String zip) { - this.zip = zip; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java deleted file mode 100644 index be1e15ea38..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -import javax.persistence.Entity; -import javax.persistence.Id; - -import org.springframework.data.mongodb.crossstore.RelatedDocument; - -@Entity -public class Person { - - @Id - Long id; - - private String name; - - private int age; - - private java.util.Date birthDate; - - @RelatedDocument - private Address address; - - @RelatedDocument - private Resume resume; - - public Person() { - } - - public Person(String name, int age) { - this.name = name; - this.age = age; - this.birthDate = new java.util.Date(); - } - - public void birthday() { - ++age; - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public int getAge() { - return age; - } - - public void setAge(int age) { - this.age = age; - } - - public java.util.Date getBirthDate() { - return birthDate; - } - - public void setBirthDate(java.util.Date birthDate) { - this.birthDate = birthDate; - } - - public Resume getResume() { - return resume; - } - - public void setResume(Resume resume) { - this.resume = resume; - } - - public Address getAddress() { - return address; - } - - public void setAddress(Address address) { - this.address = address; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java deleted file mode 100644 index 71a01ad8ee..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.bson.types.ObjectId; -import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; - -@Document -public class Resume { - - private static final Log LOGGER = LogFactory.getLog(Resume.class); - - @Id - private ObjectId id; - - private String education = ""; - - private String jobs = ""; - - public String getId() { - return id.toString(); - } - - public String getEducation() { - return education; - } - - public void addEducation(String education) { - LOGGER.debug("Adding education " + education); - this.education = this.education + (this.education.length() > 0 ? "; " : "") + education; - } - - public String getJobs() { - return jobs; - } - - public void addJob(String job) { - LOGGER.debug("Adding job " + job); - this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job; - } - - @Override - public String toString() { - return "Resume [education=" + education + ", jobs=" + jobs + "]"; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml b/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml deleted file mode 100644 index 878fff47ba..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - org.hibernate.ejb.HibernatePersistence - org.springframework.data.mongodb.crossstore.test.Person - - - - - - - - diff --git a/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml b/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml deleted file mode 100644 index 3fad886b03..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-data-mongodb-cross-store/src/test/resources/logback.xml b/spring-data-mongodb-cross-store/src/test/resources/logback.xml deleted file mode 100644 index 5ecc71909e..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/logback.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-distribution/package.json b/spring-data-mongodb-distribution/package.json new file mode 100644 index 0000000000..4689506b3f --- /dev/null +++ b/spring-data-mongodb-distribution/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "antora": "3.2.0-alpha.6", + "@antora/atlas-extension": "1.0.0-alpha.2", + "@antora/collector-extension": "1.0.0-alpha.7", + "@asciidoctor/tabs": "1.0.0-beta.6", + "@springio/antora-extensions": "1.13.0", + "@springio/asciidoctor-extensions": "1.0.0-alpha.11" + } +} diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index e5c865ea08..58c63dfc97 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -13,30 +15,62 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 4.5.0-SNAPSHOT ../pom.xml ${basedir}/.. - SDMONGO + ${project.basedir}/../src/main/antora/antora-playbook.yml + + + ${project.basedir}/../src/main/antora/resources/antora-resources + true + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.4.0 + + + timestamp-property + + timestamp-property + + validate + + current.year + yyyy + + + + org.apache.maven.plugins - maven-assembly-plugin + maven-resources-plugin + + + + resources + + + - org.codehaus.mojo - wagon-maven-plugin + org.apache.maven.plugins + maven-assembly-plugin - org.asciidoctor - asciidoctor-maven-plugin + org.antora + antora-maven-plugin + diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 784df0dc8c..b842a2def3 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -11,7 +13,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 4.5.0-SNAPSHOT ../pom.xml @@ -19,11 +21,36 @@ 1.3 1.7.8 spring.data.mongodb + ${basedir}/.. 1.01 + + + org.mongodb + mongodb-driver-core + + + + org.mongodb + mongodb-driver-sync + true + + + + org.mongodb + mongodb-driver-reactivestreams + true + + + + org.mongodb + mongodb-crypt + true + + org.springframework @@ -64,6 +91,12 @@ querydsl-mongodb ${querydsl} true + + + org.mongodb + mongo-java-driver + + @@ -80,33 +113,15 @@ true - - org.mongodb - mongodb-driver-reactivestreams - ${mongo.reactivestreams} + com.google.code.findbugs + jsr305 + 3.0.2 true - org.mongodb - mongodb-driver-async - ${mongo} - true - - - org.mongodb - mongodb-driver-core - - - org.mongodb - bson - - - - - - io.projectreactor + io.projectreactor reactor-core true @@ -118,35 +133,21 @@ - io.reactivex - rxjava - ${rxjava} - true - - - - io.reactivex - rxjava-reactive-streams - ${rxjava-reactive-streams} - true + org.awaitility + awaitility + ${awaitility} + test - io.reactivex.rxjava2 + io.reactivex.rxjava3 rxjava - ${rxjava2} + ${rxjava3} true - - org.apache.geronimo.specs - geronimo-jcdi_2.0_spec - 1.0.1 - test - - javax.interceptor javax.interceptor-api @@ -155,17 +156,17 @@ - javax.enterprise - cdi-api + jakarta.enterprise + jakarta.enterprise.cdi-api ${cdi} provided true - javax.annotation - javax.annotation-api - ${javax-annotation-api} + jakarta.annotation + jakarta.annotation-api + ${jakarta-annotation-api} test @@ -178,8 +179,8 @@ - javax.validation - validation-api + jakarta.validation + jakarta.validation-api ${validation} true @@ -192,38 +193,44 @@ - org.hibernate - hibernate-validator - 5.2.4.Final - test + io.micrometer + micrometer-observation + true + + + + io.micrometer + micrometer-tracing + true - joda-time - joda-time - ${jodatime} + org.hibernate.validator + hibernate-validator + 7.0.1.Final test - org.threeten - threetenbp - ${threetenbp} + jakarta.el + jakarta.el-api + 4.0.0 + provided true - com.fasterxml.jackson.core - jackson-databind - ${jackson} + org.glassfish + jakarta.el + 4.0.2 + provided true - org.slf4j - jul-to-slf4j - ${slf4j} - test + com.fasterxml.jackson.core + jackson-databind + true @@ -253,52 +260,123 @@ test + + org.junit-pioneer + junit-pioneer + 0.5.3 + test + + + + org.junit.platform + junit-platform-launcher + test + + + + org.testcontainers + junit-jupiter + ${testcontainers} + test + + + + org.testcontainers + mongodb + ${testcontainers} + test + + + + jakarta.transaction + jakarta.transaction-api + 2.0.0 + test + + org.jetbrains.kotlin kotlin-stdlib - ${kotlin} true + org.jetbrains.kotlin kotlin-reflect - ${kotlin} true + - org.jetbrains.kotlin - kotlin-test - ${kotlin} + org.jetbrains.kotlinx + kotlinx-coroutines-core + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-reactor + true + + + + io.mockk + mockk-jvm + ${mockk} test + - com.nhaarman - mockito-kotlin - 1.5.0 + io.micrometer + micrometer-test test - org.jetbrains.kotlin - kotlin-stdlib - - - org.jetbrains.kotlin - kotlin-reflect - - - org.mockito - mockito-core + com.github.tomakehurst + wiremock-jre8-standalone + + io.micrometer + micrometer-tracing-test + test + + + + io.micrometer + micrometer-tracing-integration-test + test + + + + + org.jmolecules + jmolecules-ddd + ${jmolecules} + test + - + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh} + + + + + com.mysema.maven apt-maven-plugin @@ -317,8 +395,11 @@ test-process - target/generated-test-sources - org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + target/generated-test-sources + + + org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + @@ -328,6 +409,7 @@ org.apache.maven.plugins maven-surefire-plugin + false false **/*Tests.java @@ -337,19 +419,17 @@ **/ReactivePerformanceTests.java - src/test/resources/logging.properties + ${mongo} + ${env.MONGO_VERSION} + + src/test/resources/logging.properties + true - - - listener - org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener - - - + - + diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java similarity index 94% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java index da457264c0..3b0c72cc0b 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.beans.factory.annotation.Value; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; @@ -27,8 +28,8 @@ import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; import com.mongodb.client.MongoCollection; /** @@ -56,7 +57,7 @@ public class ProjectionsBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); template = new MongoTemplate(client, DB_NAME); source = new Person(); @@ -83,7 +84,7 @@ public void setUp() { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java similarity index 77% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java index b10eb6b4ad..53f64f2a50 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +18,6 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - import java.util.ArrayList; import java.util.List; @@ -29,14 +27,15 @@ import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * @author Christoph Strobl @@ -55,7 +54,7 @@ public class DbRefMappingBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() throws Exception { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); template = new MongoTemplate(client, DB_NAME); List refObjects = new ArrayList<>(); @@ -80,7 +79,7 @@ public void setUp() throws Exception { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } @@ -94,18 +93,56 @@ public ObjectWithDBRef readMultipleDbRefs() { return template.findOne(queryObjectWithDBRefList, ObjectWithDBRef.class); } - @Data static class ObjectWithDBRef { private @Id ObjectId id; private @DBRef RefObject ref; private @DBRef List refList; + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public RefObject getRef() { + return ref; + } + + public void setRef(RefObject ref) { + this.ref = ref; + } + + public List getRefList() { + return refList; + } + + public void setRefList(List refList) { + this.refList = refList; + } } - @Data static class RefObject { private @Id String id; private String someValue; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSomeValue() { + return someValue; + } + + public void setSomeValue(String someValue) { + this.someValue = someValue; + } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java similarity index 74% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java index 3d6cd34c43..00d2e7034a 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,6 @@ */ package org.springframework.data.mongodb.core.convert; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.Getter; -import lombok.RequiredArgsConstructor; - import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -29,25 +24,29 @@ import org.bson.Document; import org.bson.types.ObjectId; +import org.junit.platform.commons.annotation.Testable; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; +import org.springframework.util.ObjectUtils; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * @author Christoph Strobl */ @State(Scope.Benchmark) +@Testable public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { private static final String DB_NAME = "mapping-mongo-converter-benchmark"; @@ -64,13 +63,13 @@ public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() throws Exception { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); this.mappingContext = new MongoMappingContext(); this.mappingContext.setInitialEntitySet(Collections.singleton(Customer.class)); this.mappingContext.afterPropertiesSet(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoDbFactory(client, DB_NAME)); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoClientDatabaseFactory(client, DB_NAME)); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); this.converter.setCustomConversions(new MongoCustomConversions(Collections.emptyList())); @@ -116,7 +115,7 @@ public void setUp() throws Exception { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } @@ -151,22 +150,36 @@ public Object writeObjectWithListAndMapsOfComplexType() { return sink; } - @Getter - @RequiredArgsConstructor static class Customer { private @Id ObjectId id; private final String firstname, lastname; private final Address address; + + public Customer(String firstname, String lastname, Address address) { + this.firstname = firstname; + this.lastname = lastname; + this.address = address; + } } - @Getter - @AllArgsConstructor static class Address { private String zipCode, city; + + public Address(String zipCode, String city) { + this.zipCode = zipCode; + this.city = city; + } + + public String getZipCode() { + return zipCode; + } + + public String getCity() { + return city; + } } - @Data static class SlightlyMoreComplexObject { @Id String id; @@ -177,5 +190,59 @@ static class SlightlyMoreComplexObject { Customer customer; List
addressList; Map customerMap; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SlightlyMoreComplexObject)) { + return false; + } + SlightlyMoreComplexObject that = (SlightlyMoreComplexObject) o; + if (intOne != that.intOne) { + return false; + } + if (intTwo != that.intTwo) { + return false; + } + if (!ObjectUtils.nullSafeEquals(id, that.id)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringOne, that.stringOne)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringTwo, that.stringTwo)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(renamedField, that.renamedField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(location, that.location)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(customer, that.customer)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(addressList, that.addressList)) { + return false; + } + return ObjectUtils.nullSafeEquals(customerMap, that.customerMap); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(id); + result = 31 * result + intOne; + result = 31 * result + intTwo; + result = 31 * result + ObjectUtils.nullSafeHashCode(stringOne); + result = 31 * result + ObjectUtils.nullSafeHashCode(stringTwo); + result = 31 * result + ObjectUtils.nullSafeHashCode(renamedField); + result = 31 * result + ObjectUtils.nullSafeHashCode(location); + result = 31 * result + ObjectUtils.nullSafeHashCode(customer); + result = 31 * result + ObjectUtils.nullSafeHashCode(addressList); + result = 31 * result + ObjectUtils.nullSafeHashCode(customerMap); + return result; + } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java similarity index 97% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java index f10e03c5de..615500904d 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ import java.util.Collection; import java.util.Date; -import org.junit.Test; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Scope; @@ -33,6 +32,7 @@ import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.TimeValue; + import org.springframework.core.env.StandardEnvironment; import org.springframework.util.CollectionUtils; import org.springframework.util.ResourceUtils; @@ -41,8 +41,8 @@ /** * @author Christoph Strobl */ -@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS) -@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS) +@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS, time = 2) +@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS, time = 2) @Fork(AbstractMicrobenchmark.FORKS) @State(Scope.Thread) public class AbstractMicrobenchmark { @@ -62,7 +62,6 @@ public class AbstractMicrobenchmark { * @throws Exception * @see #options(String) */ - @Test public void run() throws Exception { String includes = includes(); @@ -322,7 +321,7 @@ private void publishResults(Collection results) { try { ResultsWriter.forUri(uri).write(results); } catch (Exception e) { - System.err.println(String.format("Cannot save benchmark results to '%s'. Error was %s.", uri, e)); + System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e)); } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java similarity index 89% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java index 2eec70441a..af56908755 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.microbenchmark; -import lombok.SneakyThrows; +import java.io.IOException; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; @@ -43,13 +43,20 @@ class HttpResultsWriter implements ResultsWriter { } @Override - @SneakyThrows public void write(Collection results) { if (CollectionUtils.isEmpty(results)) { return; } + try { + doWrite(results); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void doWrite(Collection results) throws IOException { StandardEnvironment env = new StandardEnvironment(); String projectVersion = env.getProperty("project.version", "unknown"); diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java similarity index 84% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java index d7166863f8..2114d2a06a 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,15 +21,16 @@ import org.bson.Document; import org.openjdk.jmh.results.RunResult; + import org.springframework.core.env.StandardEnvironment; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import com.mongodb.BasicDBObject; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; +import com.mongodb.ConnectionString; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; import com.mongodb.client.MongoDatabase; -import com.mongodb.util.JSON; /** * MongoDB specific {@link ResultsWriter} implementation. @@ -56,13 +57,14 @@ public void write(Collection results) { String gitDirty = env.getProperty("git.dirty", "no"); String gitCommitId = env.getProperty("git.commit.id", "unknown"); - MongoClientURI uri = new MongoClientURI(this.uri); - MongoClient client = new MongoClient(uri); + ConnectionString connectionString = new ConnectionString(this.uri); + MongoClient client = MongoClients.create(this.uri); - String dbName = StringUtils.hasText(uri.getDatabase()) ? uri.getDatabase() : "spring-data-mongodb-benchmarks"; + String dbName = StringUtils.hasText(connectionString.getDatabase()) ? connectionString.getDatabase() + : "spring-data-mongodb-benchmarks"; MongoDatabase db = client.getDatabase(dbName); - for (BasicDBObject dbo : (List) JSON.parse(ResultsWriter.jsonifyResults(results))) { + for (Document dbo : (List) Document.parse(ResultsWriter.jsonifyResults(results))) { String collectionName = extractClass(dbo.get("benchmark").toString()); diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java similarity index 87% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java index 73b4d04b44..95da1750bc 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.microbenchmark; -import lombok.SneakyThrows; - import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.nio.charset.StandardCharsets; @@ -54,13 +52,12 @@ static ResultsWriter forUri(String uri) { * * @param results * @return json string representation of results. - * @see org.openjdk.jmh.results.format.JSONResultFormat */ - @SneakyThrows static String jsonifyResults(Collection results) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, "UTF-8")).writeOut(results); + ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, StandardCharsets.UTF_8)) + .writeOut(results); return new String(baos.toByteArray(), StandardCharsets.UTF_8); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java new file mode 100644 index 0000000000..1f6875c080 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java @@ -0,0 +1,149 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json}) + * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter + * binding of placeholders like {@code ?0} is delayed upon first call on the target {@link Document} via + * {@link #toDocument()}. + *
+ * + *
+ * $toUpper : $name                -> { '$toUpper' : '$name' }
+ *
+ * { '$toUpper' : '$name' }        -> { '$toUpper' : '$name' }
+ *
+ * { '$toUpper' : '?0' }, "$name"  -> { '$toUpper' : '$name' }
+ * 
+ * + * Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry} + * containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}. + * + * @author Christoph Strobl + * @author Giacomo Baso + * @since 3.2 + */ +public class BindableMongoExpression implements MongoExpression { + + private final String expressionString; + + private final @Nullable CodecRegistryProvider codecRegistryProvider; + + private final @Nullable Object[] args; + + private final Lazy target; + + /** + * Create a new instance of {@link BindableMongoExpression}. + * + * @param expression must not be {@literal null}. + * @param args can be {@literal null}. + */ + public BindableMongoExpression(String expression, @Nullable Object[] args) { + this(expression, null, args); + } + + /** + * Create a new instance of {@link BindableMongoExpression}. + * + * @param expression must not be {@literal null}. + * @param codecRegistryProvider can be {@literal null}. + * @param args can be {@literal null}. + */ + public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider, + @Nullable Object[] args) { + + Assert.notNull(expression, "Expression must not be null"); + + this.expressionString = expression; + this.codecRegistryProvider = codecRegistryProvider; + this.args = args; + this.target = Lazy.of(this::parse); + } + + /** + * Provide the {@link CodecRegistry} used to convert expressions. + * + * @param codecRegistry must not be {@literal null}. + * @return new instance of {@link BindableMongoExpression}. + */ + public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) { + return new BindableMongoExpression(expressionString, () -> codecRegistry, args); + } + + /** + * Provide the arguments to bind to the placeholders via their index. + * + * @param args must not be {@literal null}. + * @return new instance of {@link BindableMongoExpression}. + */ + public BindableMongoExpression bind(Object... args) { + return new BindableMongoExpression(expressionString, codecRegistryProvider, args); + } + + @Override + public Document toDocument() { + return target.get(); + } + + @Override + public String toString() { + return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args=" + + Arrays.toString(args) + '}'; + } + + private Document parse() { + + String expression = wrapJsonIfNecessary(expressionString); + + if (ObjectUtils.isEmpty(args)) { + + if (codecRegistryProvider == null) { + return Document.parse(expression); + } + + return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class) + .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); + } + + ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec() + : new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry()); + return codec.decode(expression, args); + } + + private static String wrapJsonIfNecessary(String json) { + + if(!StringUtils.hasText(json)) { + return json; + } + + String raw = json.trim(); + return (raw.startsWith("{") && raw.endsWith("}")) ? raw : "{%s}".formatted(raw); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java index 5b4120389d..b36382a58e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,9 +19,9 @@ import org.springframework.dao.DataAccessException; -import com.mongodb.BulkWriteError; -import com.mongodb.BulkWriteException; -import com.mongodb.BulkWriteResult; +import com.mongodb.MongoBulkWriteException; +import com.mongodb.bulk.BulkWriteError; +import com.mongodb.bulk.BulkWriteResult; /** * Is thrown when errors occur during bulk operations. @@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException { private final BulkWriteResult result; /** - * Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}. + * Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}. * * @param message must not be {@literal null}. * @param source must not be {@literal null}. */ - public BulkOperationException(String message, BulkWriteException source) { + public BulkOperationException(String message, MongoBulkWriteException source) { super(message, source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java deleted file mode 100644 index 4d7d4d8752..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2010-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb; - -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.lang.Nullable; - -/** - * Exception being thrown in case we cannot connect to a MongoDB instance. - * - * @author Oliver Gierke - * @author Mark Paluch - */ -public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException { - - private final UserCredentials credentials; - private final @Nullable String database; - - private static final long serialVersionUID = 1172099106475265589L; - - public CannotGetMongoDbConnectionException(String msg, Throwable cause) { - super(msg, cause); - this.database = null; - this.credentials = UserCredentials.NO_CREDENTIALS; - } - - public CannotGetMongoDbConnectionException(String msg) { - this(msg, null, UserCredentials.NO_CREDENTIALS); - } - - public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) { - super(msg); - this.database = database; - this.credentials = credentials; - } - - /** - * Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance. - * - * @return - */ - public UserCredentials getCredentials() { - return this.credentials; - } - - /** - * Returns the name of the database trying to be accessed. - * - * @return - */ - @Nullable - public String getDatabase() { - return database; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java new file mode 100644 index 0000000000..53acf65470 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java @@ -0,0 +1,48 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.NonTransientDataAccessException; +import org.springframework.lang.Nullable; + +/** + * {@link NonTransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data + * access failures such as reading data using an already closed session. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ClientSessionException extends NonTransientDataAccessException { + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Must not be {@literal null}. + */ + public ClientSessionException(String msg) { + super(msg); + } + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Can be {@literal null}. + * @param cause the root cause. Can be {@literal null}. + */ + public ClientSessionException(@Nullable String msg, @Nullable Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java index 6c1992fc54..53515f9fcd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -62,7 +62,7 @@ default boolean hasCodecFor(Class type) { */ default Optional> getCodecFor(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); try { return Optional.of(getCodecRegistry().get(type)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..c07e2dbe4a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java @@ -0,0 +1,59 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.Set; + +import org.springframework.lang.Nullable; + +/** + * Default implementation of {@link MongoTransactionOptions} using {@literal mongo:} as {@link #getLabelPrefix() label + * prefix} creating {@link SimpleMongoTransactionOptions} out of a given argument {@link Map}. Uses + * {@link SimpleMongoTransactionOptions#KNOWN_KEYS} to validate entries in arguments to resolve and errors on unknown + * entries. + * + * @author Christoph Strobl + * @since 4.3 + */ +enum DefaultMongoTransactionOptionsResolver implements MongoTransactionOptionsResolver { + + INSTANCE; + + private static final String PREFIX = "mongo:"; + + @Override + public MongoTransactionOptions convert(Map options) { + + validateKeys(options.keySet()); + return SimpleMongoTransactionOptions.of(options); + } + + @Nullable + @Override + public String getLabelPrefix() { + return PREFIX; + } + + private static void validateKeys(Set keys) { + + if (!SimpleMongoTransactionOptions.KNOWN_KEYS.containsAll(keys)) { + + throw new IllegalArgumentException("Transaction labels contained invalid values. Has to be one of %s" + .formatted(SimpleMongoTransactionOptions.KNOWN_KEYS)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java index 24d35908c5..f95a3c5310 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java index bbcd737854..3fc3f82fbf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,8 +25,10 @@ public class LazyLoadingException extends UncategorizedDataAccessException { private static final long serialVersionUID = -7089224903873220037L; /** - * @param msg - * @param cause + * Constructor for LazyLoadingException. + * + * @param msg the detail message. + * @param cause the exception thrown by underlying data access API. */ public LazyLoadingException(String msg, Throwable cause) { super(msg, cause); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java index 3fdff0ec6a..72b2794d05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,8 +20,8 @@ /** * Helper class featuring helper methods for working with MongoDb collections. - *

- *

+ *
+ *
* Mainly intended for internal use within the framework. * * @author Thomas Risberg diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java new file mode 100644 index 0000000000..1fcd5de516 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java @@ -0,0 +1,112 @@ +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; + +/** + * Interface for factories creating {@link MongoDatabase} instances. + * + * @author Mark Pollack + * @author Thomas Darimont + * @author Christoph Strobl + * @since 3.0 + */ +public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider { + + /** + * Obtain a {@link MongoDatabase} from the underlying factory. + * + * @return never {@literal null}. + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase() throws DataAccessException; + + /** + * Obtain a {@link MongoDatabase} instance to access the database with the given name. + * + * @param dbName must not be {@literal null}. + * @return never {@literal null}. + * @throws DataAccessException + */ + MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + + /** + * Exposes a shared {@link MongoExceptionTranslator}. + * + * @return will never be {@literal null}. + */ + PersistenceExceptionTranslator getExceptionTranslator(); + + /** + * Get the underlying {@link CodecRegistry} used by the MongoDB Java driver. + * + * @return never {@literal null}. + */ + @Override + default CodecRegistry getCodecRegistry() { + return getMongoDatabase().getCodecRegistry(); + } + + /** + * Obtain a {@link ClientSession} for given ClientSessionOptions. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + ClientSession getSession(ClientSessionOptions options); + + /** + * Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase} + * instances that are aware and bound to a new session with given {@link ClientSessionOptions options}. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + default MongoDatabaseFactory withSession(ClientSessionOptions options) { + return withSession(getSession(options)); + } + + /** + * Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase} + * instances that are aware and bound to the given session. + * + * @param session must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + MongoDatabaseFactory withSession(ClientSession session); + + /** + * Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an + * {@link ClientSession#hasActiveTransaction() active transaction}. + * + * @return {@literal true} if there's an active transaction, {@literal false} otherwise. + * @since 2.1.3 + */ + default boolean isTransactionActive() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java new file mode 100644 index 0000000000..f73f9fb7ed --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -0,0 +1,227 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.support.ResourceHolderSynchronization; +import org.springframework.transaction.support.TransactionSynchronization; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; + +/** + * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining + * {@link ClientSession session bound} resources, such as {@link MongoDatabase} and + * {@link com.mongodb.client.MongoCollection} suitable for transactional usage. + *
+ * Note: Intended for internal usage only. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Shadow's Edge - Brent Weeks + * @since 2.1 + */ +public class MongoDatabaseUtils { + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(MongoDatabaseFactory factory) { + return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(null, factory, sessionSynchronization); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory) { + return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current + * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(dbName, factory, sessionSynchronization); + } + + private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + + Assert.notNull(factory, "Factory must not be null"); + + if (sessionSynchronization == SessionSynchronization.NEVER + || !TransactionSynchronizationManager.isSynchronizationActive()) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + ClientSession session = doGetSession(factory, sessionSynchronization); + + if (session == null) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + MongoDatabaseFactory factoryToUse = factory.withSession(session); + return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase(); + } + + /** + * Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active + * transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory + * resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active + * transaction}. + * + * @param dbFactory the resource to check transactions for. Must not be {@literal null}. + * @return {@literal true} if the factory has an ongoing transaction. + * @since 2.1.3 + */ + public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) { + + if (dbFactory.isTransactionActive()) { + return true; + } + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory); + return resourceHolder != null && resourceHolder.hasActiveTransaction(); + } + + @Nullable + private static ClientSession doGetSession(MongoDatabaseFactory dbFactory, + SessionSynchronization sessionSynchronization) { + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory); + + // check for native MongoDB transaction + if (resourceHolder != null && (resourceHolder.hasSession() || resourceHolder.isSynchronizedWithTransaction())) { + + if (!resourceHolder.hasSession()) { + resourceHolder.setSession(createClientSession(dbFactory)); + } + + return resourceHolder.getSession(); + } + + if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) { + return null; + } + + // init a non native MongoDB transaction by registering a MongoSessionSynchronization + + resourceHolder = new MongoResourceHolder(createClientSession(dbFactory), dbFactory); + resourceHolder.getRequiredSession().startTransaction(); + + TransactionSynchronizationManager + .registerSynchronization(new MongoSessionSynchronization(resourceHolder, dbFactory)); + resourceHolder.setSynchronizedWithTransaction(true); + TransactionSynchronizationManager.bindResource(dbFactory, resourceHolder); + + return resourceHolder.getSession(); + } + + private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) { + return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + } + + /** + * MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when + * participating in a non-native MongoDB transaction, such as a Jta or JDBC transaction. + * + * @author Christoph Strobl + * @since 2.1 + */ + private static class MongoSessionSynchronization extends ResourceHolderSynchronization { + + private final MongoResourceHolder resourceHolder; + + MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) { + + super(resourceHolder, dbFactory); + this.resourceHolder = resourceHolder; + } + + @Override + protected boolean shouldReleaseBeforeCompletion() { + return false; + } + + @Override + protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) { + + if (resourceHolder.hasActiveTransaction()) { + resourceHolder.getRequiredSession().commitTransaction(); + } + } + + @Override + public void afterCompletion(int status) { + + if (status == TransactionSynchronization.STATUS_ROLLED_BACK && this.resourceHolder.hasActiveTransaction()) { + resourceHolder.getRequiredSession().abortTransaction(); + } + + super.afterCompletion(status); + } + + @Override + protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) { + + if (resourceHolder.hasActiveSession()) { + resourceHolder.getRequiredSession().close(); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java deleted file mode 100644 index b46e624ce3..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb; - -import org.bson.codecs.configuration.CodecRegistry; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.core.MongoExceptionTranslator; - -import com.mongodb.DB; -import com.mongodb.client.MongoDatabase; - -/** - * Interface for factories creating {@link DB} instances. - * - * @author Mark Pollack - * @author Thomas Darimont - * @author Christoph Strobl - */ -public interface MongoDbFactory extends CodecRegistryProvider { - - /** - * Creates a default {@link DB} instance. - * - * @return - * @throws DataAccessException - */ - MongoDatabase getDb() throws DataAccessException; - - /** - * Creates a {@link DB} instance to access the database with the given name. - * - * @param dbName must not be {@literal null} or empty. - * @return - * @throws DataAccessException - */ - MongoDatabase getDb(String dbName) throws DataAccessException; - - /** - * Exposes a shared {@link MongoExceptionTranslator}. - * - * @return will never be {@literal null}. - */ - PersistenceExceptionTranslator getExceptionTranslator(); - - DB getLegacyDb(); - - /** - * Get the underlying {@link CodecRegistry} used by the MongoDB Java driver. - * - * @return never {@literal null}. - */ - @Override - default CodecRegistry getCodecRegistry() { - return getDb().getCodecRegistry(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java new file mode 100644 index 0000000000..a087439d72 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +/** + * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when + * passed on to the driver. + *
+ * A set of predefined {@link MongoExpression expressions}, including a + * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method + * like expressions (eg. {@code toUpper(name)}) are available via the + * {@link org.springframework.data.mongodb.core.aggregation Aggregation API}. + * + * @author Christoph Strobl + * @since 3.2 + * @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators + * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators + * @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators + * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators + * @see org.springframework.data.mongodb.core.aggregation.ConvertOperators + * @see org.springframework.data.mongodb.core.aggregation.DateOperators + * @see org.springframework.data.mongodb.core.aggregation.ObjectOperators + * @see org.springframework.data.mongodb.core.aggregation.SetOperators + * @see org.springframework.data.mongodb.core.aggregation.StringOperators + */ +@FunctionalInterface +public interface MongoExpression { + + /** + * Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}).
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document} + * if necessary. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MongoExpression}. + */ + static MongoExpression create(String expression) { + return new BindableMongoExpression(expression, null); + } + + /** + * Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0}) + * that will be resolved on first call of {@link #toDocument()}.
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document} + * if necessary. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MongoExpression}. + */ + static MongoExpression create(String expression, Object... args) { + return new BindableMongoExpression(expression, args); + } + + /** + * Obtain the native {@link org.bson.Document} representation. + * + * @return never {@literal null}. + */ + org.bson.Document toDocument(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java new file mode 100644 index 0000000000..39c4815d47 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java @@ -0,0 +1,81 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; +import java.util.function.Consumer; + +import org.springframework.data.domain.ManagedTypes; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public final class MongoManagedTypes implements ManagedTypes { + + private final ManagedTypes delegate; + + private MongoManagedTypes(ManagedTypes types) { + this.delegate = types; + } + + /** + * Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}. + * + * @param managedTypes + * @return + */ + public static MongoManagedTypes from(ManagedTypes managedTypes) { + return new MongoManagedTypes(managedTypes); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}. + * + * @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}. + */ + public static MongoManagedTypes from(Class... types) { + return fromIterable(Arrays.asList(types)); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of + * {@link Class types}. + * + * @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be + * {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class + * types}. + */ + public static MongoManagedTypes fromIterable(Iterable> types) { + return from(ManagedTypes.fromIterable(types)); + } + + /** + * Factory method to return an empty {@link MongoManagedTypes} object. + * + * @return an empty {@link MongoManagedTypes} object. + */ + public static MongoManagedTypes empty() { + return from(ManagedTypes.empty()); + } + + @Override + public void forEach(Consumer> action) { + delegate.forEach(action); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java new file mode 100644 index 0000000000..a1e8344a9f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java @@ -0,0 +1,153 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.support.ResourceHolderSupport; + +import com.mongodb.client.ClientSession; + +/** + * MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}. + * {@link MongoTransactionManager} binds instances of this class to the thread. + *
+ * Note: Intended for internal usage only. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see MongoTransactionManager + * @see org.springframework.data.mongodb.core.MongoTemplate + */ +class MongoResourceHolder extends ResourceHolderSupport { + + private @Nullable ClientSession session; + private MongoDatabaseFactory dbFactory; + + /** + * Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}. + * + * @param session the associated {@link ClientSession}. Can be {@literal null}. + * @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}. + */ + MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) { + + this.session = session; + this.dbFactory = dbFactory; + } + + /** + * @return the associated {@link ClientSession}. Can be {@literal null}. + */ + @Nullable + ClientSession getSession() { + return session; + } + + /** + * @return the required associated {@link ClientSession}. + * @throws IllegalStateException if no {@link ClientSession} is associated with this {@link MongoResourceHolder}. + * @since 2.1.3 + */ + ClientSession getRequiredSession() { + + ClientSession session = getSession(); + + if (session == null) { + throw new IllegalStateException("No session available"); + } + + return session; + } + + /** + * @return the associated {@link MongoDatabaseFactory}. + */ + public MongoDatabaseFactory getDbFactory() { + return dbFactory; + } + + /** + * Set the {@link ClientSession} to guard. + * + * @param session can be {@literal null}. + */ + public void setSession(@Nullable ClientSession session) { + this.session = session; + } + + /** + * Only set the timeout if it does not match the {@link TransactionDefinition#TIMEOUT_DEFAULT default timeout}. + * + * @param seconds + */ + void setTimeoutIfNotDefaulted(int seconds) { + + if (seconds != TransactionDefinition.TIMEOUT_DEFAULT) { + setTimeoutInSeconds(seconds); + } + } + + /** + * @return {@literal true} if session is not {@literal null}. + */ + boolean hasSession() { + return session != null; + } + + /** + * @return {@literal true} if the session is active and has not been closed. + */ + boolean hasActiveSession() { + + if (!hasSession()) { + return false; + } + + return hasServerSession() && !getRequiredSession().getServerSession().isClosed(); + } + + /** + * @return {@literal true} if the session has an active transaction. + * @since 2.1.3 + * @see #hasActiveSession() + */ + boolean hasActiveTransaction() { + + if (!hasActiveSession()) { + return false; + } + + return getRequiredSession().hasActiveTransaction(); + } + + /** + * @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated + * that is accessible via {@link ClientSession#getServerSession()}. + */ + boolean hasServerSession() { + + try { + return getRequiredSession().getServerSession() != null; + } catch (IllegalStateException serverSessionClosed) { + // ignore + } + + return false; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java new file mode 100644 index 0000000000..645b3508db --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java @@ -0,0 +1,41 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; + +/** + * A simple interface for obtaining a {@link ClientSession} to be consumed by + * {@link org.springframework.data.mongodb.core.MongoOperations} and MongoDB native operations that support causal + * consistency and transactions. + * + * @author Christoph Strobl + * @currentRead Shadow's Edge - Brent Weeks + * @since 2.1 + */ +@FunctionalInterface +public interface MongoSessionProvider { + + /** + * Obtain a {@link ClientSession} with with given options. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @throws org.springframework.dao.DataAccessException + */ + ClientSession getSession(ClientSessionOptions options); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java new file mode 100644 index 0000000000..4215479f62 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; + +/** + * A specific {@link ClientSessionException} related to issues with a transaction such as aborted or non existing + * transactions. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class MongoTransactionException extends ClientSessionException { + + /** + * Constructor for {@link MongoTransactionException}. + * + * @param msg the detail message. Must not be {@literal null}. + */ + public MongoTransactionException(String msg) { + super(msg); + } + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Can be {@literal null}. + * @param cause the root cause. Can be {@literal null}. + */ + public MongoTransactionException(@Nullable String msg, @Nullable Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java new file mode 100644 index 0000000000..eda657f5f1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java @@ -0,0 +1,493 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.support.AbstractPlatformTransactionManager; +import org.springframework.transaction.support.DefaultTransactionStatus; +import org.springframework.transaction.support.ResourceTransactionManager; +import org.springframework.transaction.support.SmartTransactionObject; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.transaction.support.TransactionSynchronizationUtils; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.MongoException; +import com.mongodb.TransactionOptions; +import com.mongodb.client.ClientSession; + +/** + * A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages + * {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}. + *
+ * Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread. + *
+ * {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal + * consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction() + * commit} or {@link ClientSession#abortTransaction() abort} a transaction. + *
+ * Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via + * {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard + * {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as + * {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly. + *
+ * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override + * {@link #doCommit(MongoTransactionObject)} to implement the + * Retry Commit Operation + * behavior as outlined in the MongoDB reference manual. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Shadow's Edge - Brent Weeks + * @since 2.1 + * @see MongoDB Transaction Documentation + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + */ +public class MongoTransactionManager extends AbstractPlatformTransactionManager + implements ResourceTransactionManager, InitializingBean { + + private @Nullable MongoDatabaseFactory databaseFactory; + private MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; + + /** + * Create a new {@link MongoTransactionManager} for bean-style usage.
+ * Note:The {@link MongoDatabaseFactory db factory} has to be + * {@link #setDatabaseFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a + * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
+ * Optionally it is possible to set default {@link TransactionOptions transaction options} defining + * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. + * + * @see #setDatabaseFactory(MongoDatabaseFactory) + * @see #setTransactionSynchronization(int) + */ + public MongoTransactionManager() { + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}. + * + * @param databaseFactory must not be {@literal null}. + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param options can be {@literal null}. + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, MongoTransactionOptions defaultTransactionOptions) { + + Assert.notNull(databaseFactory, "MongoDatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); + + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; + } + + @Override + protected Object doGetTransaction() throws TransactionException { + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager + .getResource(getRequiredDbFactory()); + return new MongoTransactionObject(resourceHolder); + } + + @Override + protected boolean isExistingTransaction(Object transaction) throws TransactionException { + return extractMongoTransaction(transaction).hasResourceHolder(); + } + + @Override + protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + + MongoResourceHolder resourceHolder = newResourceHolder(definition, + ClientSessionOptions.builder().causallyConsistent(true).build()); + mongoTransactionObject.setResourceHolder(resourceHolder); + + if (logger.isDebugEnabled()) { + logger + .debug(String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + try { + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition).mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); + } catch (MongoException ex) { + throw new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + } + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + resourceHolder.setSynchronizedWithTransaction(true); + TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder); + } + + @Override + protected Object doSuspend(Object transaction) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + mongoTransactionObject.setResourceHolder(null); + + return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory()); + } + + @Override + protected void doResume(@Nullable Object transaction, Object suspendedResources) { + TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources); + } + + @Override + protected final void doCommit(DefaultTransactionStatus status) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to commit transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + try { + doCommit(mongoTransactionObject); + } catch (Exception ex) { + + throw new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + } + } + + /** + * Customization hook to perform an actual commit of the given transaction.
+ * If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding + * {@literal error labels}.
+ * By default those labels are ignored, nevertheless one might check for + * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the + * commit.
+ *

+	 * 
+	 * int retries = 3;
+	 * do {
+	 *     try {
+	 *         transactionObject.commitTransaction();
+	 *         break;
+	 *     } catch (MongoException ex) {
+	 *         if (!ex.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)) {
+	 *             throw ex;
+	 *         }
+	 *     }
+	 *     Thread.sleep(500);
+	 * } while (--retries > 0);
+	 * 
+	 * 
+ * + * @param transactionObject never {@literal null}. + * @throws Exception in case of transaction errors. + */ + protected void doCommit(MongoTransactionObject transactionObject) throws Exception { + transactionObject.commitTransaction(); + } + + @Override + protected void doRollback(DefaultTransactionStatus status) throws TransactionException { + + MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to abort transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + try { + mongoTransactionObject.abortTransaction(); + } catch (MongoException ex) { + + throw new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + } + } + + @Override + protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException { + + MongoTransactionObject transactionObject = extractMongoTransaction(status); + transactionObject.getRequiredResourceHolder().setRollbackOnly(); + } + + @Override + protected void doCleanupAfterCompletion(Object transaction) { + + Assert.isInstanceOf(MongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class, + transaction.getClass())); + + MongoTransactionObject mongoTransactionObject = (MongoTransactionObject) transaction; + + // Remove the connection holder from the thread. + TransactionSynchronizationManager.unbindResource(getRequiredDbFactory()); + mongoTransactionObject.getRequiredResourceHolder().clear(); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to release Session %s after transaction.", + debugString(mongoTransactionObject.getSession()))); + } + + mongoTransactionObject.closeSession(); + } + + /** + * Set the {@link MongoDatabaseFactory} that this instance should manage transactions for. + * + * @param databaseFactory must not be {@literal null}. + */ + public void setDatabaseFactory(MongoDatabaseFactory databaseFactory) { + + Assert.notNull(databaseFactory, "DbFactory must not be null"); + this.databaseFactory = databaseFactory; + } + + /** + * Set the {@link TransactionOptions} to be applied when starting transactions. + * + * @param options can be {@literal null}. + */ + public void setOptions(@Nullable TransactionOptions options) { + this.options = MongoTransactionOptions.of(options); + } + + /** + * Get the {@link MongoDatabaseFactory} that this instance manages transactions for. + * + * @return can be {@literal null}. + */ + @Nullable + public MongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + @Override + public MongoDatabaseFactory getResourceFactory() { + return getRequiredDbFactory(); + } + + @Override + public void afterPropertiesSet() { + getRequiredDbFactory(); + } + + private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) { + + MongoDatabaseFactory dbFactory = getResourceFactory(); + + MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory); + resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition)); + + return resourceHolder; + } + + /** + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. + */ + private MongoDatabaseFactory getRequiredDbFactory() { + + Assert.state(databaseFactory != null, + "MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required"); + + return databaseFactory; + } + + private static MongoTransactionObject extractMongoTransaction(Object transaction) { + + Assert.isInstanceOf(MongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class, + transaction.getClass())); + + return (MongoTransactionObject) transaction; + } + + private static MongoTransactionObject extractMongoTransaction(DefaultTransactionStatus status) { + + Assert.isInstanceOf(MongoTransactionObject.class, status.getTransaction(), + () -> String.format("Expected to find a %s but it turned out to be %s.", MongoTransactionObject.class, + status.getTransaction().getClass())); + + return (MongoTransactionObject) status.getTransaction(); + } + + private static String debugString(@Nullable ClientSession session) { + + if (session == null) { + return "null"; + } + + String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()), + Integer.toHexString(session.hashCode())); + + try { + if (session.getServerSession() != null) { + debugString += String.format("id = %s, ", session.getServerSession().getIdentifier()); + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } else { + debugString += "id = n/a"; + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } + } catch (RuntimeException e) { + debugString += String.format("error = %s", e.getMessage()); + } + + debugString += "]"; + + return debugString; + } + + /** + * MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by + * {@link MongoTransactionManager}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see MongoResourceHolder + */ + protected static class MongoTransactionObject implements SmartTransactionObject { + + private @Nullable MongoResourceHolder resourceHolder; + + MongoTransactionObject(@Nullable MongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * Set the {@link MongoResourceHolder}. + * + * @param resourceHolder can be {@literal null}. + */ + void setResourceHolder(@Nullable MongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * @return {@literal true} if a {@link MongoResourceHolder} is set. + */ + final boolean hasResourceHolder() { + return resourceHolder != null; + } + + /** + * Start a MongoDB transaction optionally given {@link TransactionOptions}. + * + * @param options can be {@literal null} + */ + void startTransaction(@Nullable TransactionOptions options) { + + ClientSession session = getRequiredSession(); + if (options != null) { + session.startTransaction(options); + } else { + session.startTransaction(); + } + } + + /** + * Commit the transaction. + */ + public void commitTransaction() { + getRequiredSession().commitTransaction(); + } + + /** + * Rollback (abort) the transaction. + */ + public void abortTransaction() { + getRequiredSession().abortTransaction(); + } + + /** + * Close a {@link ClientSession} without regard to its transactional state. + */ + void closeSession() { + + ClientSession session = getRequiredSession(); + if (session.getServerSession() != null && !session.getServerSession().isClosed()) { + session.close(); + } + } + + @Nullable + public ClientSession getSession() { + return resourceHolder != null ? resourceHolder.getSession() : null; + } + + private MongoResourceHolder getRequiredResourceHolder() { + + Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O"); + return resourceHolder; + } + + private ClientSession getRequiredSession() { + + ClientSession session = getSession(); + Assert.state(session != null, "A Session is required but it turned out to be null"); + return session; + } + + @Override + public boolean isRollbackOnly() { + return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); + } + + @Override + public void flush() { + TransactionSynchronizationUtils.triggerFlush(); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java new file mode 100644 index 0000000000..e411bd5d2d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java @@ -0,0 +1,204 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.WriteConcernAware; +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.TransactionOptions; +import com.mongodb.WriteConcern; + +/** + * Options to be applied within a specific transaction scope. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptions + extends TransactionMetadata, ReadConcernAware, ReadPreferenceAware, WriteConcernAware { + + /** + * Value Object representing empty options enforcing client defaults. Returns {@literal null} for all getter methods. + */ + MongoTransactionOptions NONE = new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + return null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return null; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return null; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return null; + } + }; + + /** + * Merge current options with given ones. Will return first non {@literal null} value from getters whereas the + * {@literal this} has precedence over the given fallbackOptions. + * + * @param fallbackOptions can be {@literal null}. + * @return new instance of {@link MongoTransactionOptions} or this if {@literal fallbackOptions} is {@literal null} or + * {@link #NONE}. + */ + default MongoTransactionOptions mergeWith(@Nullable MongoTransactionOptions fallbackOptions) { + + if (fallbackOptions == null || MongoTransactionOptions.NONE.equals(fallbackOptions)) { + return this; + } + + return new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + return MongoTransactionOptions.this.hasMaxCommitTime() ? MongoTransactionOptions.this.getMaxCommitTime() + : fallbackOptions.getMaxCommitTime(); + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return MongoTransactionOptions.this.hasReadConcern() ? MongoTransactionOptions.this.getReadConcern() + : fallbackOptions.getReadConcern(); + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return MongoTransactionOptions.this.hasReadPreference() ? MongoTransactionOptions.this.getReadPreference() + : fallbackOptions.getReadPreference(); + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return MongoTransactionOptions.this.hasWriteConcern() ? MongoTransactionOptions.this.getWriteConcern() + : fallbackOptions.getWriteConcern(); + } + }; + } + + /** + * Apply the current options using the given mapping {@link Function} and return its result. + * + * @param mappingFunction + * @return result of the mapping function. + */ + default T map(Function mappingFunction) { + return mappingFunction.apply(this); + } + + /** + * @return MongoDB driver native {@link TransactionOptions}. + * @see MongoTransactionOptions#map(Function) + */ + @Nullable + default TransactionOptions toDriverOptions() { + + return map(it -> { + + if (MongoTransactionOptions.NONE.equals(it)) { + return null; + } + + TransactionOptions.Builder builder = TransactionOptions.builder(); + if (it.hasMaxCommitTime()) { + builder.maxCommitTime(it.getMaxCommitTime().toMillis(), TimeUnit.MILLISECONDS); + } + if (it.hasReadConcern()) { + builder.readConcern(it.getReadConcern()); + } + if (it.hasReadPreference()) { + builder.readPreference(it.getReadPreference()); + } + if (it.hasWriteConcern()) { + builder.writeConcern(it.getWriteConcern()); + } + return builder.build(); + }); + } + + /** + * Factory method to wrap given MongoDB driver native {@link TransactionOptions} into {@link MongoTransactionOptions}. + * + * @param options + * @return {@link MongoTransactionOptions#NONE} if given object is {@literal null}. + */ + static MongoTransactionOptions of(@Nullable TransactionOptions options) { + + if (options == null) { + return NONE; + } + + return new MongoTransactionOptions() { + + @Nullable + @Override + public Duration getMaxCommitTime() { + + Long millis = options.getMaxCommitTime(TimeUnit.MILLISECONDS); + return millis != null ? Duration.ofMillis(millis) : null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return options.getReadConcern(); + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return options.getReadPreference(); + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return options.getWriteConcern(); + } + + @Nullable + @Override + public TransactionOptions toDriverOptions() { + return options; + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..b73b079a99 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java @@ -0,0 +1,114 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.stream.Collectors; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A {@link TransactionOptionResolver} reading MongoDB specific {@link MongoTransactionOptions transaction options} from + * a {@link TransactionDefinition}. Implementations of {@link MongoTransactionOptions} may choose a specific + * {@link #getLabelPrefix() prefix} for {@link TransactionAttribute#getLabels() transaction attribute labels} to avoid + * evaluating non-store specific ones. + *

+ * {@link TransactionAttribute#getLabels()} evaluated by default should follow the property style using {@code =} to + * separate key and value pairs. + *

+ * By default {@link #resolve(TransactionDefinition)} will filter labels by the {@link #getLabelPrefix() prefix} and + * strip the prefix from the label before handing the pruned {@link Map} to the {@link #convert(Map)} function. + *

+ * A transaction definition with labels targeting MongoDB may look like the following: + *

+ * + * @Transactional(label = { "mongo:readConcern=majority" }) + * + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptionsResolver extends TransactionOptionResolver { + + /** + * Obtain the default {@link MongoTransactionOptionsResolver} implementation using a {@literal mongo:} + * {@link #getLabelPrefix() prefix}. + * + * @return instance of default {@link MongoTransactionOptionsResolver} implementation. + */ + static MongoTransactionOptionsResolver defaultResolver() { + return DefaultMongoTransactionOptionsResolver.INSTANCE; + } + + /** + * Get the prefix used to filter applicable {@link TransactionAttribute#getLabels() labels}. + * + * @return {@literal null} if no label defined. + */ + @Nullable + String getLabelPrefix(); + + /** + * Resolve {@link MongoTransactionOptions} from a given {@link TransactionDefinition} by evaluating + * {@link TransactionAttribute#getLabels()} labels if possible. + *

+ * Splits applicable labels property style using {@literal =} as deliminator and removes a potential + * {@link #getLabelPrefix() prefix} before calling {@link #convert(Map)} with filtered label values. + * + * @param definition + * @return {@link MongoTransactionOptions#NONE} in case the given {@link TransactionDefinition} is not a + * {@link TransactionAttribute} if no matching {@link TransactionAttribute#getLabels() labels} could be found. + * @throws IllegalArgumentException for options that do not map to valid transactions options or malformatted labels. + */ + @Override + default MongoTransactionOptions resolve(TransactionDefinition definition) { + + if (!(definition instanceof TransactionAttribute attribute)) { + return MongoTransactionOptions.NONE; + } + + if (attribute.getLabels().isEmpty()) { + return MongoTransactionOptions.NONE; + } + + Map attributeMap = attribute.getLabels().stream() + .filter(it -> !StringUtils.hasText(getLabelPrefix()) || it.startsWith(getLabelPrefix())) + .map(it -> StringUtils.hasText(getLabelPrefix()) ? it.substring(getLabelPrefix().length()) : it).map(it -> { + + String[] kvPair = StringUtils.split(it, "="); + Assert.isTrue(kvPair != null && kvPair.length == 2, + () -> "No value present for transaction option %s".formatted(kvPair != null ? kvPair[0] : it)); + return kvPair; + }) + + .collect(Collectors.toMap(it -> it[0].trim(), it -> it[1].trim())); + + return attributeMap.isEmpty() ? MongoTransactionOptions.NONE : convert(attributeMap); + } + + /** + * Convert the given {@link Map} into an instance of {@link MongoTransactionOptions}. + * + * @param options never {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException for invalid options. + */ + MongoTransactionOptions convert(Map options); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java index 4ce6a714b7..f2a6714a95 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,14 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb; +import reactor.core.publisher.Mono; + import org.bson.codecs.configuration.CodecRegistry; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; import com.mongodb.reactivestreams.client.MongoDatabase; /** @@ -28,6 +31,7 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet * @since 2.0 */ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { @@ -35,19 +39,19 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { /** * Creates a default {@link MongoDatabase} instance. * - * @return + * @return never {@literal null}. * @throws DataAccessException */ - MongoDatabase getMongoDatabase() throws DataAccessException; + Mono getMongoDatabase() throws DataAccessException; /** - * Creates a {@link MongoDatabase} instance to access the database with the given name. + * Obtain a {@link MongoDatabase} instance to access the database with the given name. * * @param dbName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. * @throws DataAccessException */ - MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + Mono getMongoDatabase(String dbName) throws DataAccessException; /** * Exposes a shared {@link MongoExceptionTranslator}. @@ -61,8 +65,36 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { * * @return never {@literal null}. */ - @Override - default CodecRegistry getCodecRegistry() { - return getMongoDatabase().getCodecRegistry(); + CodecRegistry getCodecRegistry(); + + /** + * Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + Mono getSession(ClientSessionOptions options); + + /** + * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoDatabaseFactory} returning + * {@link MongoDatabase} instances that are aware and bound to the given session. + * + * @param session must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + ReactiveMongoDatabaseFactory withSession(ClientSession session); + + /** + * Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a + * {@link com.mongodb.reactivestreams.client.ClientSession} that has an + * {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}. + * + * @return {@literal true} if there's an active transaction, {@literal false} otherwise. + * @since 2.2 + */ + default boolean isTransactionActive() { + return false; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java new file mode 100644 index 0000000000..f397818a4c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -0,0 +1,266 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; +import reactor.util.context.Context; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.NoTransactionException; +import org.springframework.transaction.reactive.ReactiveResourceSynchronization; +import org.springframework.transaction.reactive.TransactionSynchronization; +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.support.ResourceHolderSynchronization; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for + * obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection} + * suitable for transactional usage. + *
+ * Note: Intended for internal usage only. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.2 + */ +public class ReactiveMongoDatabaseUtils { + + /** + * Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a + * {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a + * {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the + * {@link ReactiveMongoDatabaseFactory resource} and if the associated + * {@link com.mongodb.reactivestreams.client.ClientSession} has an + * {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}. + * + * @param databaseFactory the resource to check transactions for. Must not be {@literal null}. + * @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction. + */ + public static Mono isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) { + + if (databaseFactory.isTransactionActive()) { + return Mono.just(true); + } + + return TransactionSynchronizationManager.forCurrentTransaction() // + .map(it -> { + + ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory); + return holder != null && holder.hasActiveTransaction(); + }) // + .onErrorResume(NoTransactionException.class, e -> Mono.just(false)); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(ReactiveMongoDatabaseFactory factory) { + return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(null, factory, sessionSynchronization); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory + * factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) { + return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory + * factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(String dbName, ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(dbName, factory, sessionSynchronization); + } + + private static Mono doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + + Assert.notNull(factory, "DatabaseFactory must not be null"); + + if (sessionSynchronization == SessionSynchronization.NEVER) { + return getMongoDatabaseOrDefault(dbName, factory); + } + + return TransactionSynchronizationManager.forCurrentTransaction() + .filter(TransactionSynchronizationManager::isSynchronizationActive) // + .flatMap(synchronizationManager -> { + + return doGetSession(synchronizationManager, factory, sessionSynchronization) // + .flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it))); + }) // + .onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory)) + .switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory)); + } + + private static Mono getMongoDatabaseOrDefault(@Nullable String dbName, + ReactiveMongoDatabaseFactory factory) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + private static Mono doGetSession(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) { + + final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager + .getResource(dbFactory); + + // check for native MongoDB transaction + if (registeredHolder != null + && (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) { + + return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession()) + : createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent); + } + + if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) { + return Mono.empty(); + } + + // init a non native MongoDB transaction by registering a MongoSessionSynchronization + return createClientSession(dbFactory).map(session -> { + + ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory); + newHolder.getRequiredSession().startTransaction(); + + synchronizationManager + .registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory)); + newHolder.setSynchronizedWithTransaction(true); + synchronizationManager.bindResource(dbFactory, newHolder); + + return newHolder.getSession(); + }); + } + + private static Mono createClientSession(ReactiveMongoDatabaseFactory dbFactory) { + return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + } + + /** + * MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when + * participating in a non-native MongoDB transaction, such as a R2CBC transaction. + * + * @author Mark Paluch + * @since 2.2 + */ + private static class MongoSessionSynchronization + extends ReactiveResourceSynchronization { + + private final ReactiveMongoResourceHolder resourceHolder; + + MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) { + + super(resourceHolder, dbFactory, synchronizationManager); + this.resourceHolder = resourceHolder; + } + + @Override + protected boolean shouldReleaseBeforeCompletion() { + return false; + } + + @Override + protected Mono processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) { + + if (isTransactionActive(resourceHolder)) { + return Mono.from(resourceHolder.getRequiredSession().commitTransaction()); + } + + return Mono.empty(); + } + + @Override + public Mono afterCompletion(int status) { + + return Mono.defer(() -> { + + if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) { + + return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) // + .then(super.afterCompletion(status)); + } + + return super.afterCompletion(status); + }); + } + + @Override + protected Mono releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) { + + return Mono.fromRunnable(() -> { + if (resourceHolder.hasActiveSession()) { + resourceHolder.getRequiredSession().close(); + } + }); + } + + private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) { + + if (!resourceHolder.hasSession()) { + return false; + } + + return resourceHolder.getRequiredSession().hasActiveTransaction(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java new file mode 100644 index 0000000000..33caa5e7fe --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java @@ -0,0 +1,155 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.lang.Nullable; +import org.springframework.transaction.support.ResourceHolderSupport; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds + * instances of this class to the subscriber context. + *
+ * Note: Intended for internal usage only. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + * @see ReactiveMongoTransactionManager + * @see ReactiveMongoTemplate + */ +class ReactiveMongoResourceHolder extends ResourceHolderSupport { + + private @Nullable ClientSession session; + private ReactiveMongoDatabaseFactory databaseFactory; + + /** + * Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}. + * + * @param session the associated {@link ClientSession}. Can be {@literal null}. + * @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}. + */ + ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) { + + this.session = session; + this.databaseFactory = databaseFactory; + } + + /** + * @return the associated {@link ClientSession}. Can be {@literal null}. + */ + @Nullable + ClientSession getSession() { + return session; + } + + /** + * @return the required associated {@link ClientSession}. + * @throws IllegalStateException if no session is associated. + */ + ClientSession getRequiredSession() { + + ClientSession session = getSession(); + + if (session == null) { + throw new IllegalStateException("No ClientSession associated"); + } + return session; + } + + /** + * @return the associated {@link ReactiveMongoDatabaseFactory}. + */ + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + /** + * Set the {@link ClientSession} to guard. + * + * @param session can be {@literal null}. + */ + public void setSession(@Nullable ClientSession session) { + this.session = session; + } + + /** + * @return {@literal true} if session is not {@literal null}. + */ + boolean hasSession() { + return session != null; + } + + /** + * If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a + * {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current + * bound session is returned. + * + * @param session + * @return + */ + @Nullable + public ClientSession setSessionIfAbsent(@Nullable ClientSession session) { + + if (!hasSession()) { + setSession(session); + } + + return session; + } + + /** + * @return {@literal true} if the session is active and has not been closed. + */ + boolean hasActiveSession() { + + if (!hasSession()) { + return false; + } + + return hasServerSession() && !getRequiredSession().getServerSession().isClosed(); + } + + /** + * @return {@literal true} if the session has an active transaction. + * @see #hasActiveSession() + */ + boolean hasActiveTransaction() { + + if (!hasActiveSession()) { + return false; + } + + return getRequiredSession().hasActiveTransaction(); + } + + /** + * @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated + * that is accessible via {@link ClientSession#getServerSession()}. + */ + boolean hasServerSession() { + + try { + return getRequiredSession().getServerSession() != null; + } catch (IllegalStateException serverSessionClosed) { + // ignore + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java new file mode 100644 index 0000000000..2c65c26b79 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java @@ -0,0 +1,501 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.reactive.AbstractReactiveTransactionManager; +import org.springframework.transaction.reactive.GenericReactiveTransaction; +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.support.SmartTransactionObject; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.MongoException; +import com.mongodb.TransactionOptions; +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages + * {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
+ * Binds a {@link ClientSession} from the specified + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber + * {@link reactor.util.context.Context}.
+ * {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a + * {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start}, + * {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or + * {@link ClientSession#abortTransaction() abort} a transaction.
+ * Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead + * of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring + * classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly. + *
+ * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override + * {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the + * Retry Commit Operation + * behavior as outlined in the MongoDB reference manual. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see MongoDB Transaction Documentation + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) + */ +public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean { + + private @Nullable ReactiveMongoDatabaseFactory databaseFactory; + private @Nullable MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; + + /** + * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
+ * Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to + * be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor + * to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. + *
+ * Optionally it is possible to set default {@link TransactionOptions transaction options} defining + * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. + * + * @see #setDatabaseFactory(ReactiveMongoDatabaseFactory) + */ + public ReactiveMongoTransactionManager() { + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory}. + * + * @param databaseFactory must not be {@literal null}. + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param options can be {@literal null}. + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, + @Nullable MongoTransactionOptions defaultTransactionOptions) { + + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); + + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; + } + + @Override + protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager) + throws TransactionException { + + ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager + .getResource(getRequiredDatabaseFactory()); + return new ReactiveMongoTransactionObject(resourceHolder); + } + + @Override + protected boolean isExistingTransaction(Object transaction) throws TransactionException { + return extractMongoTransaction(transaction).hasResourceHolder(); + } + + @Override + protected Mono doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction, + TransactionDefinition definition) throws TransactionException { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + + Mono holder = newResourceHolder(definition, + ClientSessionOptions.builder().causallyConsistent(true).build()); + + return holder.doOnNext(resourceHolder -> { + + mongoTransactionObject.setResourceHolder(resourceHolder); + + if (logger.isDebugEnabled()) { + logger.debug( + String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + }).doOnNext(resourceHolder -> { + + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition) + .mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + })// + .onErrorMap( + ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex)) + .doOnSuccess(resourceHolder -> { + + synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder); + }).then(); + }); + } + + @Override + protected Mono doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction) + throws TransactionException { + + return Mono.fromSupplier(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + mongoTransactionObject.setResourceHolder(null); + + return synchronizationManager.unbindResource(getRequiredDatabaseFactory()); + }); + } + + @Override + protected Mono doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction, + Object suspendedResources) { + return Mono + .fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources)); + } + + @Override + protected final Mono doCommit(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) throws TransactionException { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to commit transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> { + return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + }); + }); + } + + /** + * Customization hook to perform an actual commit of the given transaction.
+ * If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding + * {@literal error labels}.
+ * By default those labels are ignored, nevertheless one might check for + * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the + * commit. + * + * @param synchronizationManager reactive synchronization manager. + * @param transactionObject never {@literal null}. + */ + protected Mono doCommit(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoTransactionObject transactionObject) { + return transactionObject.commitTransaction(); + } + + @Override + protected Mono doRollback(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to abort transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> { + return Mono + .error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex)); + }); + }); + } + + @Override + protected Mono doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) throws TransactionException { + + return Mono.fromRunnable(() -> { + ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status); + transactionObject.getRequiredResourceHolder().setRollbackOnly(); + }); + } + + @Override + protected Mono doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager, + Object transaction) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + transaction.getClass())); + + return Mono.fromRunnable(() -> { + ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction; + + // Remove the connection holder from the thread. + synchronizationManager.unbindResource(getRequiredDatabaseFactory()); + mongoTransactionObject.getRequiredResourceHolder().clear(); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to release Session %s after transaction.", + debugString(mongoTransactionObject.getSession()))); + } + + mongoTransactionObject.closeSession(); + }); + } + + /** + * Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for. + * + * @param databaseFactory must not be {@literal null}. + */ + public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) { + + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + this.databaseFactory = databaseFactory; + } + + /** + * Set the {@link TransactionOptions} to be applied when starting transactions. + * + * @param options can be {@literal null}. + */ + public void setOptions(@Nullable TransactionOptions options) { + this.options = MongoTransactionOptions.of(options); + } + + /** + * Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for. + * + * @return can be {@literal null}. + */ + @Nullable + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + @Override + public void afterPropertiesSet() { + getRequiredDatabaseFactory(); + } + + private Mono newResourceHolder(TransactionDefinition definition, + ClientSessionOptions options) { + + ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory(); + + return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory)); + } + + /** + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. + */ + private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() { + + Assert.state(databaseFactory != null, + "ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required"); + + return databaseFactory; + } + + private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + transaction.getClass())); + + return (ReactiveMongoTransactionObject) transaction; + } + + private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(), + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + status.getTransaction().getClass())); + + return (ReactiveMongoTransactionObject) status.getTransaction(); + } + + private static String debugString(@Nullable ClientSession session) { + + if (session == null) { + return "null"; + } + + String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()), + Integer.toHexString(session.hashCode())); + + try { + if (session.getServerSession() != null) { + debugString += String.format("id = %s, ", session.getServerSession().getIdentifier()); + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } else { + debugString += "id = n/a"; + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } + } catch (RuntimeException e) { + debugString += String.format("error = %s", e.getMessage()); + } + + debugString += "]"; + + return debugString; + } + + /** + * MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by + * {@link ReactiveMongoTransactionManager}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see ReactiveMongoResourceHolder + */ + protected static class ReactiveMongoTransactionObject implements SmartTransactionObject { + + private @Nullable ReactiveMongoResourceHolder resourceHolder; + + ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * Set the {@link MongoResourceHolder}. + * + * @param resourceHolder can be {@literal null}. + */ + void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * @return {@literal true} if a {@link MongoResourceHolder} is set. + */ + final boolean hasResourceHolder() { + return resourceHolder != null; + } + + /** + * Start a MongoDB transaction optionally given {@link TransactionOptions}. + * + * @param options can be {@literal null} + */ + void startTransaction(@Nullable TransactionOptions options) { + + ClientSession session = getRequiredSession(); + if (options != null) { + session.startTransaction(options); + } else { + session.startTransaction(); + } + } + + /** + * Commit the transaction. + */ + public Mono commitTransaction() { + return Mono.from(getRequiredSession().commitTransaction()); + } + + /** + * Rollback (abort) the transaction. + */ + public Mono abortTransaction() { + return Mono.from(getRequiredSession().abortTransaction()); + } + + /** + * Close a {@link ClientSession} without regard to its transactional state. + */ + void closeSession() { + + ClientSession session = getRequiredSession(); + if (session.getServerSession() != null && !session.getServerSession().isClosed()) { + session.close(); + } + } + + @Nullable + public ClientSession getSession() { + return resourceHolder != null ? resourceHolder.getSession() : null; + } + + private ReactiveMongoResourceHolder getRequiredResourceHolder() { + + Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O"); + return resourceHolder; + } + + private ClientSession getRequiredSession() { + + ClientSession session = getSession(); + Assert.state(session != null, "A Session is required but it turned out to be null"); + return session; + } + + @Override + public boolean isRollbackOnly() { + return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); + } + + @Override + public void flush() { + throw new UnsupportedOperationException("flush() not supported"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java new file mode 100644 index 0000000000..93dbf5db69 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java @@ -0,0 +1,207 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.util.Optional; +import java.util.function.BiFunction; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.springframework.core.MethodClassKey; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ConcurrentReferenceHashMap; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.WriteConcern; +import com.mongodb.session.ClientSession; + +/** + * {@link MethodInterceptor} implementation looking up and invoking an alternative target method having + * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base. + *
+ * The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself + * like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them + * if not already proxied. + * + * @param Type of the actual Mongo Database. + * @param Type of the actual Mongo Collection. + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public class SessionAwareMethodInterceptor implements MethodInterceptor { + + private static final MethodCache METHOD_CACHE = new MethodCache(); + + private final ClientSession session; + private final ClientSessionOperator collectionDecorator; + private final ClientSessionOperator databaseDecorator; + private final Object target; + private final Class targetType; + private final Class collectionType; + private final Class databaseType; + private final Class sessionType; + + /** + * Create a new SessionAwareMethodInterceptor for given target. + * + * @param session the {@link ClientSession} to be used on invocation. + * @param target the original target object. + * @param databaseType the MongoDB database type + * @param databaseDecorator a {@link ClientSessionOperator} used to create the proxy for an imperative / reactive + * {@code MongoDatabase}. + * @param collectionType the MongoDB collection type. + * @param collectionDecorator a {@link ClientSessionOperator} used to create the proxy for an imperative / reactive + * {@code MongoCollection}. + * @param target object type. + */ + public SessionAwareMethodInterceptor(ClientSession session, T target, Class sessionType, + Class databaseType, ClientSessionOperator databaseDecorator, Class collectionType, + ClientSessionOperator collectionDecorator) { + + Assert.notNull(session, "ClientSession must not be null"); + Assert.notNull(target, "Target must not be null"); + Assert.notNull(sessionType, "SessionType must not be null"); + Assert.notNull(databaseType, "Database type must not be null"); + Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null"); + Assert.notNull(collectionType, "Collection type must not be null"); + Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null"); + + this.session = session; + this.target = target; + this.databaseType = ClassUtils.getUserClass(databaseType); + this.collectionType = ClassUtils.getUserClass(collectionType); + this.collectionDecorator = collectionDecorator; + this.databaseDecorator = databaseDecorator; + + this.targetType = ClassUtils.isAssignable(databaseType, target.getClass()) ? databaseType : collectionType; + this.sessionType = sessionType; + } + + @Nullable + @Override + public Object invoke(MethodInvocation methodInvocation) throws Throwable { + + if (requiresDecoration(methodInvocation.getMethod())) { + + Object target = methodInvocation.proceed(); + if (target instanceof Proxy) { + return target; + } + + return decorate(target); + } + + if (!requiresSession(methodInvocation.getMethod())) { + return methodInvocation.proceed(); + } + + Optional targetMethod = METHOD_CACHE.lookup(methodInvocation.getMethod(), targetType, sessionType); + + return !targetMethod.isPresent() ? methodInvocation.proceed() + : ReflectionUtils.invokeMethod(targetMethod.get(), target, + prependSessionToArguments(session, methodInvocation)); + } + + private boolean requiresDecoration(Method method) { + + return ClassUtils.isAssignable(databaseType, method.getReturnType()) + || ClassUtils.isAssignable(collectionType, method.getReturnType()); + } + + @SuppressWarnings("unchecked") + protected Object decorate(Object target) { + + return ClassUtils.isAssignable(databaseType, target.getClass()) ? databaseDecorator.apply(session, target) + : collectionDecorator.apply(session, target); + } + + private static boolean requiresSession(Method method) { + + return method.getParameterCount() == 0 + || !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0]); + } + + private static Object[] prependSessionToArguments(ClientSession session, MethodInvocation invocation) { + + Object[] args = new Object[invocation.getArguments().length + 1]; + + args[0] = session; + System.arraycopy(invocation.getArguments(), 0, args, 1, invocation.getArguments().length); + + return args; + } + + /** + * Simple {@link Method} to {@link Method} caching facility for {@link ClientSession} overloaded targets. + * + * @since 2.1 + * @author Christoph Strobl + */ + static class MethodCache { + + private final ConcurrentReferenceHashMap> cache = new ConcurrentReferenceHashMap<>(); + + /** + * Lookup the target {@link Method}. + * + * @param method + * @param targetClass + * @return + */ + Optional lookup(Method method, Class targetClass, Class sessionType) { + + return cache.computeIfAbsent(new MethodClassKey(method, targetClass), + val -> Optional.ofNullable(findTargetWithSession(method, targetClass, sessionType))); + } + + @Nullable + private Method findTargetWithSession(Method sourceMethod, Class targetType, + Class sessionType) { + + Class[] argTypes = sourceMethod.getParameterTypes(); + Class[] args = new Class[argTypes.length + 1]; + args[0] = sessionType; + System.arraycopy(argTypes, 0, args, 1, argTypes.length); + + return ReflectionUtils.findMethod(targetType, sourceMethod.getName(), args); + } + + /** + * Check whether the cache contains an entry for {@link Method} and {@link Class}. + * + * @param method + * @param targetClass + * @return + */ + boolean contains(Method method, Class targetClass) { + return cache.containsKey(new MethodClassKey(method, targetClass)); + } + } + + /** + * Represents an operation upon two operands of the same type, producing a result of the same type as the operands + * accepting {@link ClientSession}. This is a specialization of {@link BiFunction} for the case where the operands and + * the result are all of the same type. + * + * @param the type of the operands and result of the operator + */ + public interface ClientSessionOperator extends BiFunction {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java new file mode 100644 index 0000000000..07b5c31586 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java @@ -0,0 +1,52 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + +/** + * {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to + * participate if any. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see MongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + * @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) + */ +public enum SessionSynchronization { + + /** + * Synchronize with any transaction even with empty transactions and initiate a MongoDB transaction when doing so by + * registering a MongoDB specific {@link org.springframework.transaction.support.ResourceHolderSynchronization}. + */ + ALWAYS, + + /** + * Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}. + */ + ON_ACTUAL_TRANSACTION, + + /** + * Do not participate in ongoing transactions. + * + * @since 3.2.5 + */ + NEVER +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java new file mode 100644 index 0000000000..b52fc0bd71 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java @@ -0,0 +1,154 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.Function; +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; + +/** + * Trivial implementation of {@link MongoTransactionOptions}. + * + * @author Christoph Strobl + * @since 4.3 + */ +class SimpleMongoTransactionOptions implements MongoTransactionOptions { + + static final Set KNOWN_KEYS = Arrays.stream(OptionKey.values()).map(OptionKey::getKey) + .collect(Collectors.toSet()); + + private final Duration maxCommitTime; + private final ReadConcern readConcern; + private final ReadPreference readPreference; + private final WriteConcern writeConcern; + + static SimpleMongoTransactionOptions of(Map options) { + return new SimpleMongoTransactionOptions(options); + } + + private SimpleMongoTransactionOptions(Map options) { + + this.maxCommitTime = doGetMaxCommitTime(options); + this.readConcern = doGetReadConcern(options); + this.readPreference = doGetReadPreference(options); + this.writeConcern = doGetWriteConcern(options); + } + + @Nullable + @Override + public Duration getMaxCommitTime() { + return maxCommitTime; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return readConcern; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return readPreference; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return writeConcern; + } + + @Override + public String toString() { + + return "DefaultMongoTransactionOptions{" + "maxCommitTime=" + maxCommitTime + ", readConcern=" + readConcern + + ", readPreference=" + readPreference + ", writeConcern=" + writeConcern + '}'; + } + + @Nullable + private static Duration doGetMaxCommitTime(Map options) { + + return getValue(options, OptionKey.MAX_COMMIT_TIME, value -> { + + Duration timeout = Duration.parse(value); + Assert.isTrue(!timeout.isNegative(), "%s cannot be negative".formatted(OptionKey.MAX_COMMIT_TIME)); + return timeout; + }); + } + + @Nullable + private static ReadConcern doGetReadConcern(Map options) { + return getValue(options, OptionKey.READ_CONCERN, value -> new ReadConcern(ReadConcernLevel.fromString(value))); + } + + @Nullable + private static ReadPreference doGetReadPreference(Map options) { + return getValue(options, OptionKey.READ_PREFERENCE, ReadPreference::valueOf); + } + + @Nullable + private static WriteConcern doGetWriteConcern(Map options) { + + return getValue(options, OptionKey.WRITE_CONCERN, value -> { + + WriteConcern writeConcern = WriteConcern.valueOf(value); + if (writeConcern == null) { + throw new IllegalArgumentException("'%s' is not a valid WriteConcern".formatted(options.get("writeConcern"))); + } + return writeConcern; + }); + } + + @Nullable + private static T getValue(Map options, OptionKey key, Function convertFunction) { + + String value = options.get(key.getKey()); + return value != null ? convertFunction.apply(value) : null; + } + + enum OptionKey { + + MAX_COMMIT_TIME("maxCommitTime"), READ_CONCERN("readConcern"), READ_PREFERENCE("readPreference"), WRITE_CONCERN( + "writeConcern"); + + final String key; + + OptionKey(String key) { + this.key = key; + } + + public String getKey() { + return key; + } + + @Override + public String toString() { + return getKey(); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java new file mode 100644 index 0000000000..a3d600270f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java @@ -0,0 +1,77 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.data.util.Version; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoDriverInformation; + +/** + * Class that exposes the SpringData MongoDB specific information like the current {@link Version} or + * {@link MongoDriverInformation driver information}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class SpringDataMongoDB { + + private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class); + + private static final Version FALLBACK_VERSION = new Version(3); + private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation + .builder(MongoDriverInformation.builder().build()).driverName("spring-data").build(); + + /** + * Obtain the SpringData MongoDB specific driver information. + * + * @return never {@literal null}. + */ + public static MongoDriverInformation driverInformation() { + return DRIVER_INFORMATION; + } + + /** + * Fetches the "Implementation-Version" manifest attribute from the jar file. + *
+ * Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the + * version in all environments. In this case the current Major version is returned as a fallback. + * + * @return never {@literal null}. + */ + public static Version version() { + + Package pkg = SpringDataMongoDB.class.getPackage(); + String versionString = (pkg != null ? pkg.getImplementationVersion() : null); + + if (!StringUtils.hasText(versionString)) { + + LOGGER.debug("Unable to find Spring Data MongoDB version."); + return FALLBACK_VERSION; + } + + try { + return Version.parse(versionString); + } catch (Exception e) { + LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString)); + } + + return FALLBACK_VERSION; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java new file mode 100644 index 0000000000..cd5f58d5b1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; + +import org.springframework.lang.Nullable; + +/** + * MongoDB-specific transaction metadata. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface TransactionMetadata { + + /** + * @return the maximum commit time. Can be {@literal null} if not configured. + */ + @Nullable + Duration getMaxCommitTime(); + + /** + * @return {@literal true} if the max commit time is configured; {@literal false} otherwise. + */ + default boolean hasMaxCommitTime() { + return getMaxCommitTime() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java new file mode 100644 index 0000000000..37c7e3686b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.TransactionDefinition; + +/** + * Interface that defines a resolver for {@link TransactionMetadata} based on a {@link TransactionDefinition}. + * Transaction metadata is used to enrich the MongoDB transaction with additional information. + * + * @author Christoph Strobl + * @since 4.3 + */ +interface TransactionOptionResolver { + + /** + * Resolves the transaction metadata from a given {@link TransactionDefinition}. + * + * @param definition the {@link TransactionDefinition}. + * @return the resolved {@link TransactionMetadata} or {@literal null} if the resolver cannot resolve any metadata. + */ + @Nullable + T resolve(TransactionDefinition definition); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java new file mode 100644 index 0000000000..5446170ff9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * {@link TransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data + * access failures such as reading data using an already closed session. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientClientSessionException extends TransientMongoDbException { + + /** + * Constructor for {@link TransientClientSessionException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientClientSessionException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java new file mode 100644 index 0000000000..cad05ca17c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java @@ -0,0 +1,39 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * Root of the hierarchy of MongoDB specific data access exceptions that are considered transient such as + * {@link com.mongodb.MongoException MongoExceptions} carrying {@link com.mongodb.MongoException#hasErrorLabel(String) + * specific labels}. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientMongoDbException extends TransientDataAccessException { + + /** + * Constructor for {@link TransientMongoDbException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientMongoDbException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java index 1fb5868407..bec05d0d68 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,12 +16,13 @@ package org.springframework.data.mongodb; import org.springframework.dao.UncategorizedDataAccessException; +import org.springframework.lang.Nullable; public class UncategorizedMongoDbException extends UncategorizedDataAccessException { private static final long serialVersionUID = -2336595514062364929L; - public UncategorizedMongoDbException(String msg, Throwable cause) { + public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) { super(msg, cause); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java new file mode 100644 index 0000000000..2254b3c9a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.aot.hint.TypeReference; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.core.annotation.MergedAnnotations; +import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public class LazyLoadingProxyAotProcessor { + + private boolean generalLazyLoadingProxyContributed = false; + + public void registerLazyLoadingProxyIfNeeded(Class type, GenerationContext generationContext) { + + Set refFields = getFieldsWithAnnotationPresent(type, Reference.class); + if (refFields.isEmpty()) { + return; + } + + refFields.stream() // + .filter(LazyLoadingProxyAotProcessor::isLazyLoading) // + .forEach(field -> { + + if (!generalLazyLoadingProxyContributed) { + generationContext.getRuntimeHints().proxies().registerJdkProxy( + TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class), + TypeReference.of(org.springframework.aop.SpringProxy.class), + TypeReference.of(org.springframework.aop.framework.Advised.class), + TypeReference.of(org.springframework.core.DecoratingProxy.class)); + generalLazyLoadingProxyContributed = true; + } + + if (field.getType().isInterface()) { + + List> interfaces = new ArrayList<>( + Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces())); + interfaces.add(org.springframework.aop.SpringProxy.class); + interfaces.add(org.springframework.aop.framework.Advised.class); + interfaces.add(org.springframework.core.DecoratingProxy.class); + + generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new)); + } else { + + Class proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(), + LazyLoadingInterceptor::none); + + // see: spring-projects/spring-framework/issues/29309 + generationContext.getRuntimeHints().reflection().registerType(proxyClass, MongoAotReflectionHelper::cglibProxyReflectionMemberAccess); + } + }); + } + + private static boolean isLazyLoading(Field field) { + if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy(); + } + if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy(); + } + return false; + } + + private static Set getFieldsWithAnnotationPresent(Class type, Class annotation) { + + Set fields = new LinkedHashSet<>(); + for (Field field : type.getDeclaredFields()) { + if (MergedAnnotations.from(field).get(annotation).isPresent()) { + fields.add(field); + } + } + return fields; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java new file mode 100644 index 0000000000..2fe27a2c9e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java @@ -0,0 +1,68 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.util.function.Predicate; + +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary; +import org.springframework.data.util.TypeUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * Collection of {@link Predicate predicates} to determine dynamic library aspects during AOT computation. Intended for + * internal usage only. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class MongoAotPredicates { + + public static final Predicate> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) + || TypeUtils.type(type).isPartOf("org.bson"); + public static final Predicate IS_REACTIVE_LIBARARY_AVAILABLE = ReactiveWrappers::isAvailable; + public static final Predicate IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.client.MongoClient", classLoader); + public static final Predicate IS_REACTIVE_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.reactivestreams.client.MongoClient", classLoader); + + /** + * @return {@literal true} if the Project Reactor is present. + */ + public static boolean isReactorPresent() { + return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.client.MongoClient} is present. + * @since 4.0 + */ + public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) { + return IS_SYNC_CLIENT_PRESENT.test(classLoader); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.reactivestreams.client.MongoClient} is present. + * @since 4.3 + */ + public static boolean isReactiveClientPresent(@Nullable ClassLoader classLoader) { + return IS_REACTIVE_CLIENT_PRESENT.test(classLoader); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java new file mode 100644 index 0000000000..ff8d04b382 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java @@ -0,0 +1,31 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.TypeHint.Builder; + +/** + * @author Christoph Strobl + */ +public final class MongoAotReflectionHelper { + + public static void cglibProxyReflectionMemberAccess(Builder builder) { + + builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, + MemberCategory.DECLARED_FIELDS); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java new file mode 100644 index 0000000000..a33f20ffb6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java @@ -0,0 +1,56 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.core.ResolvableType; +import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @since 2022/06 + */ +class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor { + + private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor(); + + public MongoManagedTypesBeanRegistrationAotProcessor() { + setModuleIdentifier("mongo"); + } + + @Override + protected boolean isMatch(@Nullable Class beanType, @Nullable String beanName) { + return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName); + } + + protected boolean isMongoManagedTypes(@Nullable Class beanType) { + return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType); + } + + @Override + protected void contributeType(ResolvableType type, GenerationContext generationContext) { + + if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) { + return; + } + + super.contributeType(type, generationContext); + lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java new file mode 100644 index 0000000000..538fe4e812 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java @@ -0,0 +1,129 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.springframework.data.mongodb.aot.MongoAotPredicates.*; + +import java.util.Arrays; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.UnixServerAddress; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * {@link RuntimeHintsRegistrar} for repository types and entity callbacks. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.0 + */ +class MongoRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class), + TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + registerTransactionProxyHints(hints, classLoader); + registerMongoCompatibilityAdapterHints(hints, classLoader); + + if (isReactorPresent()) { + + hints.reflection() + .registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class), + TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class), + TypeReference.of(ReactiveAfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + } + + private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + if (MongoAotPredicates.isSyncClientPresent(classLoader) + && ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) { + + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + } + } + + @SuppressWarnings("deprecation") + private static void registerMongoCompatibilityAdapterHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection() // + .registerType(MongoClientSettings.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MongoClientSettings.Builder.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(IndexOptions.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(ServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(UnixServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) // + .registerType(TypeReference.of("com.mongodb.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.build.MongoDriverVersion"), MemberCategory.PUBLIC_FIELDS); + + if (MongoAotPredicates.isSyncClientPresent(classLoader)) { + + hints.reflection() // + .registerType(MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReduceIterable.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + + if (MongoAotPredicates.isReactiveClientPresent(classLoader)) { + + hints.reflection() // + .registerType(com.mongodb.reactivestreams.client.MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReducePublisher.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java new file mode 100644 index 0000000000..93033417fb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java @@ -0,0 +1,111 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SpringDataMongoDB; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}. + * + * @author Christoph Strobl + * @since 2.1 + * @see MongoConfigurationSupport + */ +@Configuration(proxyBeanMethods = false) +public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport { + + /** + * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a + * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
+ * Override {@link #mongoClientSettings()} to configure connection details. + * + * @return never {@literal null}. + * @see #mongoClientSettings() + * @see #configureClientSettings(Builder) + */ + public MongoClient mongoClient() { + return createMongoClient(mongoClientSettings()); + } + + /** + * Creates a {@link MongoTemplate}. + * + * @see #mongoDbFactory() + * @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext) + */ + @Bean + public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) { + return new MongoTemplate(databaseFactory, converter); + } + + /** + * Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the + * {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}. + * + * @see #mongoClient() + * @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter) + */ + @Bean + public MongoDatabaseFactory mongoDbFactory() { + return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName()); + } + + /** + * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. + * + * @see #customConversions() + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) + * @see #mongoDbFactory() + */ + @Bean + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + + DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory); + MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions(customConversions); + converter.setCodecRegistryProvider(databaseFactory); + + return converter; + } + + /** + * Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given + * {@link MongoClientSettings}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClient createMongoClient(MongoClientSettings settings) { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java deleted file mode 100644 index d7a3ad2e6a..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.lang.Nullable; - -import com.mongodb.MongoClient; - -/** - * Base class for Spring Data MongoDB configuration using JavaConfig. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Ryan Tenney - * @author Christoph Strobl - * @author Mark Paluch - * @see MongoConfigurationSupport - */ -@Configuration -public abstract class -AbstractMongoConfiguration extends MongoConfigurationSupport { - - /** - * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a - * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. - * - * @return - */ - public abstract MongoClient mongoClient(); - - /** - * Creates a {@link MongoTemplate}. - * - * @return - */ - @Bean - public MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongoDbFactory(), mappingMongoConverter()); - } - - /** - * Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient} - * instance configured in {@link #mongo()}. - * - * @see #mongoClient() - * @see #mongoTemplate() - * @return - */ - @Bean - public MongoDbFactory mongoDbFactory() { - return new SimpleMongoDbFactory(mongoClient(), getDatabaseName()); - } - - /** - * Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration - * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending - * {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is - * overridden to implement alternate behavior. - * - * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for - * entities. - * @deprecated use {@link #getMappingBasePackages()} instead. - */ - @Deprecated - @Nullable - protected String getMappingBasePackage() { - - Package mappingBasePackage = getClass().getPackage(); - return mappingBasePackage == null ? null : mappingBasePackage.getName(); - } - - /** - * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and - * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. - * - * @see #customConversions() - * @see #mongoMappingContext() - * @see #mongoDbFactory() - * @return - * @throws Exception - */ - @Bean - public MappingMongoConverter mappingMongoConverter() throws Exception { - - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory()); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext()); - converter.setCustomConversions(customConversions()); - - return converter; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java index 292cf800ef..f93c4ae708 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,12 +18,19 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.SpringDataMongoDB; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; /** * Base class for reactive Spring Data MongoDB configuration using JavaConfig. @@ -33,25 +40,33 @@ * @since 2.0 * @see MongoConfigurationSupport */ -@Configuration +@Configuration(proxyBeanMethods = false) public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport { /** * Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want - * to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. + * to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
+ * Override {@link #mongoClientSettings()} to configure connection details. * * @return never {@literal null}. + * @see #mongoClientSettings() + * @see #configureClientSettings(Builder) */ - public abstract MongoClient reactiveMongoClient(); + public MongoClient reactiveMongoClient() { + return createReactiveMongoClient(mongoClientSettings()); + } /** * Creates {@link ReactiveMongoOperations}. * + * @see #reactiveMongoDbFactory() + * @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext) * @return never {@literal null}. */ @Bean - public ReactiveMongoOperations reactiveMongoTemplate() throws Exception { - return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter()); + public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory, + MappingMongoConverter mongoConverter) { + return new ReactiveMongoTemplate(databaseFactory, mongoConverter); } /** @@ -59,7 +74,7 @@ public ReactiveMongoOperations reactiveMongoTemplate() throws Exception { * {@link MongoClient} instance configured in {@link #reactiveMongoClient()}. * * @see #reactiveMongoClient() - * @see #reactiveMongoTemplate() + * @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter) * @return never {@literal null}. */ @Bean @@ -69,21 +84,31 @@ public ReactiveMongoDatabaseFactory reactiveMongoDbFactory() { /** * Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and - * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. * * @see #customConversions() - * @see #mongoMappingContext() + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) * @see #reactiveMongoDbFactory() * @return never {@literal null}. - * @throws Exception */ @Bean - public MappingMongoConverter mappingMongoConverter() throws Exception { + public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { - MappingMongoConverter converter = new MappingMongoConverter(ReactiveMongoTemplate.NO_OP_REF_RESOLVER, - mongoMappingContext()); - converter.setCustomConversions(customConversions()); + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(customConversions); + converter.setCodecRegistryProvider(databaseFactory); return converter; } + + /** + * Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClient createReactiveMongoClient(MongoClientSettings settings) { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java index 7865028887..584fbfba30 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java new file mode 100644 index 0000000000..b070a0190f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.ConnectionString; + +/** + * Parse a {@link String} to a {@link com.mongodb.ConnectionString}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class ConnectionStringPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String connectionString) { + + if (!StringUtils.hasText(connectionString)) { + return; + } + + setValue(new ConnectionString(connectionString)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java index 9035da6115..d6ce19f3ee 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -42,29 +42,29 @@ /** * Configures the {@link AuditorAware} bean to be used to lookup the current principal. * - * @return + * @return empty {@link String} by default. */ String auditorAwareRef() default ""; /** * Configures whether the creation and modification dates are set. Defaults to {@literal true}. * - * @return + * @return {@literal true} by default. */ boolean setDates() default true; /** * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. * - * @return + * @return {@literal true} by default. */ boolean modifyOnCreate() default true; /** - * Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be - * used for setting creation and modification dates. + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. * - * @return + * @return empty {@link String} by default. */ String dateTimeProviderRef() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java new file mode 100644 index 0000000000..21fadf86c6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java @@ -0,0 +1,70 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.context.annotation.Import; +import org.springframework.data.auditing.DateTimeProvider; +import org.springframework.data.domain.ReactiveAuditorAware; + +/** + * Annotation to enable auditing in MongoDB using reactive infrastructure via annotation configuration. + * + * @author Mark Paluch + * @since 3.1 + */ +@Inherited +@Documented +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Import(ReactiveMongoAuditingRegistrar.class) +public @interface EnableReactiveMongoAuditing { + + /** + * Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal. + * + * @return empty {@link String} by default. + */ + String auditorAwareRef() default ""; + + /** + * Configures whether the creation and modification dates are set. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean setDates() default true; + + /** + * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean modifyOnCreate() default true; + + /** + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. + * + * @return empty {@link String} by default. + */ + String dateTimeProviderRef() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java index 048913ed4e..3b10019cc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java index a33674ef0a..b86da91dad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,10 +34,6 @@ */ class GridFsTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -46,10 +42,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java index 55b84cf7ec..164b4defb6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +18,10 @@ import static org.springframework.data.mongodb.config.BeanNames.*; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Set; import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; @@ -51,7 +48,6 @@ import org.springframework.core.type.filter.TypeFilter; import org.springframework.data.annotation.Persistent; import org.springframework.data.config.BeanComponentDefinitionBuilder; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; @@ -60,8 +56,10 @@ import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -75,11 +73,13 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Zied Yaich + * @author Tomasz Forys */ public class MappingMongoConverterParser implements BeanDefinitionParser { private static final String BASE_PACKAGE = "base-package"; - private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator", + private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator", MappingMongoConverterParser.class.getClassLoader()); /* (non-Javadoc) @@ -95,12 +95,12 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE); id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME; + boolean autoIndexCreationEnabled = isAutoIndexCreationEnabled(element); + parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element)); BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext); - String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id); - - createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element); + String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id, autoIndexCreationEnabled); // Need a reference to a Mongo instance String dbFactoryRef = element.getAttribute("db-factory-ref"); @@ -133,9 +133,7 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider")); } - try { - registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME); - } catch (NoSuchBeanDefinitionException ignored) { + if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) { BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoPersistentEntityIndexCreator.class); @@ -149,7 +147,7 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext); - if (validatingMongoEventListener != null) { + if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) { parserContext.registerBeanComponent( new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME)); } @@ -159,18 +157,20 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { return null; } + @Nullable private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) { String disableValidation = element.getAttribute("disable-validation"); - boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation); + boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation); if (!validationDisabled) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(); - RuntimeBeanReference validator = getValidator(builder, parserContext); + RuntimeBeanReference validator = getValidator(element, parserContext); if (validator != null) { builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class); + builder.getRawBeanDefinition().setSource(element); builder.addConstructorArgValue(validator); return builder.getBeanDefinition(); @@ -180,6 +180,7 @@ private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element ele return null; } + @Nullable private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) { if (!JSR_303_PRESENT) { @@ -191,13 +192,39 @@ private RuntimeBeanReference getValidator(Object source, ParserContext parserCon validatorDef.setSource(source); validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef); - parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName)); return new RuntimeBeanReference(validatorName); } + private static boolean isAutoIndexCreationEnabled(Element element) { + + String autoIndexCreation = element.getAttribute("auto-index-creation"); + return StringUtils.hasText(autoIndexCreation) && Boolean.parseBoolean(autoIndexCreation); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + * @deprecated since 4.3. Use + * {@link #potentiallyCreateMappingContext(Element, ParserContext, BeanDefinition, String, boolean)} + * instead. + */ + @Deprecated(since = "4.3", forRemoval = true) + public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, + @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) { + return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + */ public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, - BeanDefinition conversionsDefinition, String converterId) { + @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) { String ctxRef = element.getAttribute("mapping-context-ref"); @@ -211,7 +238,7 @@ public static String potentiallyCreateMappingContext(Element element, ParserCont BeanDefinitionBuilder mappingContextBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoMappingContext.class); - Set classesToAdd = getInititalEntityClasses(element); + Set classesToAdd = getInitialEntityClasses(element); if (classesToAdd != null) { mappingContextBuilder.addPropertyValue("initialEntitySet", classesToAdd); @@ -225,6 +252,8 @@ public static String potentiallyCreateMappingContext(Element element, ParserCont mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition); } + mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation); + parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder); ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME @@ -244,7 +273,7 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont && Boolean.parseBoolean(abbreviateFieldNames); if (fieldNamingStrategyReferenced && abbreviationActivated) { - context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!", + context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured", element); return; } @@ -262,6 +291,7 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont } } + @Nullable private BeanDefinition getCustomConversions(Element element, ParserContext parserContext) { List customConvertersElements = DomUtils.getChildElementsByTagName(element, "custom-converters"); @@ -269,10 +299,10 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse if (customConvertersElements.size() == 1) { Element customerConvertersElement = customConvertersElements.get(0); - ManagedList converterBeans = new ManagedList(); + ManagedList converterBeans = new ManagedList<>(); List converterElements = DomUtils.getChildElementsByTagName(customerConvertersElement, "converter"); - if (converterElements != null) { + if (!ObjectUtils.isEmpty(converterElements)) { for (Element listenerElement : converterElements) { converterBeans.add(parseConverter(listenerElement, parserContext)); } @@ -285,9 +315,7 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse provider.addExcludeFilter(new NegatingFilter(new AssignableTypeFilter(Converter.class), new AssignableTypeFilter(GenericConverter.class))); - for (BeanDefinition candidate : provider.findCandidateComponents(packageToScan)) { - converterBeans.add(candidate); - } + converterBeans.addAll(provider.findCandidateComponents(packageToScan)); } BeanDefinitionBuilder conversionsBuilder = BeanDefinitionBuilder.rootBeanDefinition(MongoCustomConversions.class); @@ -304,7 +332,8 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse return null; } - private static Set getInititalEntityClasses(Element element) { + @Nullable + private static Set getInitialEntityClasses(Element element) { String basePackage = element.getAttribute(BASE_PACKAGE); @@ -317,7 +346,7 @@ private static Set getInititalEntityClasses(Element element) { componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); - Set classes = new ManagedSet(); + Set classes = new ManagedSet<>(); for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { classes.add(candidate.getBeanClassName()); } @@ -325,6 +354,7 @@ private static Set getInititalEntityClasses(Element element) { return classes; } + @Nullable public BeanMetadataElement parseConverter(Element element, ParserContext parserContext) { String converterRef = element.getAttribute("ref"); @@ -343,20 +373,6 @@ public BeanMetadataElement parseConverter(Element element, ParserContext parserC return null; } - public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context, - Element element) { - - BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder - .rootBeanDefinition(MappingContextIsNewStrategyFactory.class); - mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef); - - BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context); - context.registerBeanComponent( - builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME)); - - return IS_NEW_STRATEGY_FACTORY_BEAN_NAME; - } - /** * {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches. * @@ -375,13 +391,9 @@ public NegatingFilter(TypeFilter... filters) { Assert.notNull(filters, "TypeFilters must not be null"); - this.delegates = new HashSet(Arrays.asList(filters)); + this.delegates = Set.of(filters); } - /* - * (non-Javadoc) - * @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory) - */ public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory) throws IOException { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java index 991ffe0c44..4e05fe6c39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.BeanNames.*; +import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.RootBeanDefinition; @@ -26,40 +27,36 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; + import org.w3c.dom.Element; /** - * {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on - * an entity. + * {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information + * on an entity. * * @author Oliver Gierke + * @author Mark Paluch */ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ + private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono", + MongoAuditingRegistrar.class.getClassLoader()); + @Override protected Class getBeanClass(Element element) { - return AuditingEventListener.class; + return AuditingEntityCallback.class; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId() - */ @Override protected boolean shouldGenerateId() { return true; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ @Override protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { @@ -80,7 +77,24 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit mappingContextRef); parser.parse(element, parserContext); - builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(), - parserContext.extractSource(element))); + AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(), + parserContext.extractSource(element)); + builder.addConstructorArgValue(isNewAwareAuditingHandler); + + if (PROJECT_REACTOR_AVAILABLE) { + registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler, + parserContext.extractSource(element)); + } + } + + private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, + AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) { + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); + + builder.addConstructorArgValue(isNewAwareAuditingHandler); + builder.getRawBeanDefinition().setSource(source); + + registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java index 4ad7944acc..37e509a38a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,22 +17,16 @@ import java.lang.annotation.Annotation; -import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; -import org.springframework.core.type.AnnotationMetadata; +import org.springframework.core.Ordered; import org.springframework.data.auditing.IsNewAwareAuditingHandler; import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; import org.springframework.data.auditing.config.AuditingConfiguration; import org.springframework.data.config.ParsingUtils; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; import org.springframework.util.Assert; /** @@ -40,123 +34,55 @@ * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ -class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { +class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered { - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableMongoAuditing.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() - */ @Override protected String getAuditingHandlerBeanName() { return "mongoAuditingHandler"; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override - public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) { - - Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { - super.registerBeanDefinitions(annotationMetadata, registry); + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration) - */ @Override protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - Assert.notNull(configuration, "AuditingConfiguration must not be null!"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class); + Assert.notNull(configuration, "AuditingConfiguration must not be null"); - BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(MongoMappingContextLookup.class); - definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR); - - builder.addConstructorArgValue(definition.getBeanDefinition()); - return configureDefaultAuditHandlerAttributes(configuration, builder); + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, BeanDefinitionRegistry registry) { - Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder - .rootBeanDefinition(AuditingEventListener.class); + .rootBeanDefinition(AuditingEntityCallback.class); listenerBeanDefinitionBuilder .addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(), - AuditingEventListener.class.getName(), registry); + AuditingEntityCallback.class.getName(), registry); } - /** - * Simple helper to be able to wire the {@link MappingContext} from a {@link MappingMongoConverter} bean available in - * the application context. - * - * @author Oliver Gierke - */ - static class MongoMappingContextLookup - implements FactoryBean, MongoPersistentProperty>> { - - private final MappingMongoConverter converter; - - /** - * Creates a new {@link MongoMappingContextLookup} for the given {@link MappingMongoConverter}. - * - * @param converter must not be {@literal null}. - */ - public MongoMappingContextLookup(MappingMongoConverter converter) { - this.converter = converter; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ - @Override - public MappingContext, MongoPersistentProperty> getObject() throws Exception { - return converter.getMappingContext(); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return MappingContext.class; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return true; - } + @Override + public int getOrder() { + return Ordered.LOWEST_PRECEDENCE; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java index 5ae3725354..501c00b9d6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -35,10 +35,6 @@ */ public class MongoClientParser implements BeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ public BeanDefinition parse(Element element, ParserContext parserContext) { Object source = parserContext.extractSource(element); @@ -50,10 +46,11 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { ParsingUtils.setPropertyValue(builder, element, "port", "port"); ParsingUtils.setPropertyValue(builder, element, "host", "host"); - ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials"); + ParsingUtils.setPropertyValue(builder, element, "credential", "credential"); + ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet"); + ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString"); - MongoParsingUtils.parseMongoClientOptions(element, builder); - MongoParsingUtils.parseReplicaSet(element, builder); + MongoParsingUtils.parseMongoClientSettings(element, builder); String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME; @@ -62,22 +59,34 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId); parserContext.registerBeanComponent(mongoComponent); - BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils - .getServerAddressPropertyEditorBuilder()); + BeanComponentDefinition connectionStringPropertyEditor = helper + .getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder()); + parserContext.registerBeanComponent(connectionStringPropertyEditor); + + BeanComponentDefinition serverAddressPropertyEditor = helper + .getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder()); parserContext.registerBeanComponent(serverAddressPropertyEditor); - BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils - .getWriteConcernPropertyEditorBuilder()); + BeanComponentDefinition writeConcernEditor = helper + .getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder()); parserContext.registerBeanComponent(writeConcernEditor); - BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils - .getReadPreferencePropertyEditorBuilder()); + BeanComponentDefinition readConcernEditor = helper + .getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder()); + parserContext.registerBeanComponent(readConcernEditor); + + BeanComponentDefinition readPreferenceEditor = helper + .getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder()); parserContext.registerBeanComponent(readPreferenceEditor); - BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils - .getMongoCredentialPropertyEditor()); + BeanComponentDefinition credentialsEditor = helper + .getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor()); parserContext.registerBeanComponent(credentialsEditor); + BeanComponentDefinition uuidRepresentationEditor = helper + .getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder()); + parserContext.registerBeanComponent(uuidRepresentationEditor); + parserContext.popAndRegisterContainingComponent(); return mongoComponent.getBeanDefinition(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java index 28c1821d61..0594f6176c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,32 @@ */ package org.springframework.data.mongodb.config; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; +import org.bson.UuidRepresentation; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.core.convert.converter.Converter; import org.springframework.core.type.filter.AnnotationTypeFilter; -import org.springframework.data.annotation.Persistent; import org.springframework.data.convert.CustomConversions; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; -import org.springframework.data.mapping.context.PersistentEntities; import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; import org.springframework.data.mapping.model.FieldNamingStrategy; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; +import org.springframework.data.mongodb.MongoManagedTypes; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.support.CachingIsNewStrategyFactory; -import org.springframework.data.support.IsNewStrategyFactory; import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; + /** * Base class for Spring Data MongoDB to be extended for JavaConfiguration usage. * @@ -78,42 +77,56 @@ protected Collection getMappingBasePackages() { * * @see #getMappingBasePackages() * @return - * @throws ClassNotFoundException */ @Bean - public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { + public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions, + MongoManagedTypes mongoManagedTypes) { MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(getInitialEntitySet()); - mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); + mappingContext.setManagedTypes(mongoManagedTypes); + mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); + mappingContext.setAutoIndexCreation(autoIndexCreation()); return mappingContext; } /** - * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. - * - * @return + * @return new instance of {@link MongoManagedTypes}. * @throws ClassNotFoundException + * @since 4.0 */ @Bean - public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { - - return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory( - new PersistentEntities(Arrays.> asList(new MappingContext[] { mongoMappingContext() })))); + public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException { + return MongoManagedTypes.fromIterable(getInitialEntitySet()); } /** * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These - * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and - * {@link #mongoMappingContext()}. Returns an empty {@link MongoCustomConversions} instance by default. + * {@link CustomConversions} will be registered with the + * {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link MongoMappingContext}. + * Returns an empty {@link MongoCustomConversions} instance by default. + *

+ * NOTE: Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB + * native simple types and register custom {@link Converter converters}. * * @return must not be {@literal null}. */ @Bean - public CustomConversions customConversions() { - return new MongoCustomConversions(Collections.emptyList()); + public MongoCustomConversions customConversions() { + return MongoCustomConversions.create(this::configureConverters); + } + + /** + * Configuration hook for {@link MongoCustomConversions} creation. + * + * @param converterConfigurationAdapter never {@literal null}. + * @since 2.3 + * @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs() + * @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs() + */ + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + } /** @@ -136,8 +149,7 @@ protected Set> getInitialEntitySet() throws ClassNotFoundException { } /** - * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and - * {@link Persistent}. + * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}. * * @param basePackage must not be {@literal null}. * @return @@ -157,7 +169,6 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( false); componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { @@ -171,8 +182,7 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound /** * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. * * @return */ @@ -190,4 +200,41 @@ protected FieldNamingStrategy fieldNamingStrategy() { return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() : PropertyNameFieldNamingStrategy.INSTANCE; } + + /** + * Configure whether to automatically create indices for domain types by deriving the + * {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not. + * + * @return {@literal false} by default.
+ * INFO: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}. + * @since 2.2 + */ + protected boolean autoIndexCreation() { + return false; + } + + /** + * Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}.
+ * Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClientSettings mongoClientSettings() { + + MongoClientSettings.Builder builder = MongoClientSettings.builder(); + builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY); + configureClientSettings(builder); + return builder.build(); + } + + /** + * Configure {@link MongoClientSettings} via its {@link Builder} API. + * + * @param builder never {@literal null}. + * @since 3.0 + */ + protected void configureClientSettings(MongoClientSettings.Builder builder) { + // customization hook + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java index 8e8214f858..b8f23a35af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,9 @@ package org.springframework.data.mongodb.config; import java.beans.PropertyEditorSupport; -import java.io.UnsupportedEncodingException; +import java.lang.reflect.Method; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -26,6 +27,7 @@ import java.util.regex.Pattern; import org.springframework.lang.Nullable; +import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import com.mongodb.MongoCredential; @@ -35,6 +37,8 @@ * * @author Christoph Strobl * @author Oliver Gierke + * @author Stephen Tyler Conrad + * @author Mark Paluch * @since 1.7 */ public class MongoCredentialPropertyEditor extends PropertyEditorSupport { @@ -47,10 +51,6 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport { private static final String OPTIONS_DELIMITER = "?"; private static final String OPTION_VALUE_DELIMITER = "&"; - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String text) throws IllegalArgumentException { @@ -76,12 +76,23 @@ public void setAsText(@Nullable String text) throws IllegalArgumentException { verifyUserNamePresent(userNameAndPassword); credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0])); - } else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) { + } else if ("MONGODB-CR".equals(authMechanism)) { verifyUsernameAndPasswordPresent(userNameAndPassword); verifyDatabasePresent(database); - credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database, - userNameAndPassword[1].toCharArray())); + + Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class, + "createMongoCRCredential", String.class, String.class, char[].class); + + if (createCRCredentialMethod == null) { + throw new IllegalArgumentException("MONGODB-CR is no longer supported."); + } + + MongoCredential credential = MongoCredential.class + .cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database, + userNameAndPassword[1].toCharArray())); + credentials.add(credential); + } else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) { verifyUserNamePresent(userNameAndPassword); @@ -98,9 +109,15 @@ public void setAsText(@Nullable String text) throws IllegalArgumentException { verifyDatabasePresent(database); credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database, userNameAndPassword[1].toCharArray())); + } else if (MongoCredential.SCRAM_SHA_256_MECHANISM.equals(authMechanism)) { + + verifyUsernameAndPasswordPresent(userNameAndPassword); + verifyDatabasePresent(database); + credentials.add(MongoCredential.createScramSha256Credential(userNameAndPassword[0], database, + userNameAndPassword[1].toCharArray())); } else { throw new IllegalArgumentException( - String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism)); + String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism)); } } } else { @@ -164,7 +181,7 @@ private static String extractDB(String text) { private static Properties extractOptions(String text) { int optionsSeparationIndex = text.lastIndexOf(OPTIONS_DELIMITER); - int dbSeparationIndex = text.lastIndexOf(OPTIONS_DELIMITER); + int dbSeparationIndex = text.lastIndexOf(DATABASE_DELIMITER); if (optionsSeparationIndex == -1 || dbSeparationIndex > optionsSeparationIndex) { return new Properties(); @@ -173,7 +190,13 @@ private static Properties extractOptions(String text) { Properties properties = new Properties(); for (String option : text.substring(optionsSeparationIndex + 1).split(OPTION_VALUE_DELIMITER)) { + String[] optionArgs = option.split("="); + + if (optionArgs.length == 1) { + throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0])); + } + properties.put(optionArgs[0], optionArgs[1]); } @@ -186,29 +209,25 @@ private static void verifyUsernameAndPasswordPresent(String[] source) { if (source.length != 2) { throw new IllegalArgumentException( - "Credentials need to specify username and password like in 'username:password@database'!"); + "Credentials need to specify username and password like in 'username:password@database'"); } } private static void verifyDatabasePresent(String source) { if (!StringUtils.hasText(source)) { - throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!"); + throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'"); } } private static void verifyUserNamePresent(String[] source) { if (source.length == 0 || !StringUtils.hasText(source[0])) { - throw new IllegalArgumentException("Credentials need to specify username!"); + throw new IllegalArgumentException("Credentials need to specify username"); } } private static String decodeParameter(String it) { - try { - return URLDecoder.decode(it, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("o_O UTF-8 not supported!", e); - } + return URLDecoder.decode(it, StandardCharsets.UTF_8); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java index e85a681a3d..2e733cc79f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +18,6 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.MongoParsingUtils.*; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; import org.springframework.beans.factory.BeanDefinitionStoreException; @@ -32,14 +30,12 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.data.config.BeanComponentDefinitionBuilder; import org.springframework.data.mongodb.core.MongoClientFactoryBean; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.lang.Nullable; import org.springframework.util.StringUtils; import org.w3c.dom.Element; -import com.mongodb.Mongo; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; +import com.mongodb.ConnectionString; /** * {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s. @@ -53,21 +49,8 @@ */ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser { - private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES; + private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Set.of("id", "write-concern"); - static { - - Set mongoUriAllowedAdditionalAttributes = new HashSet(); - mongoUriAllowedAdditionalAttributes.add("id"); - mongoUriAllowedAdditionalAttributes.add("write-concern"); - - MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -76,18 +59,15 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { // Common setup - BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class); + BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder + .genericBeanDefinition(SimpleMongoClientDatabaseFactory.class); setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern"); - BeanDefinition mongoUri = getMongoUri(element, parserContext); + BeanDefinition mongoUri = getConnectionString(element, parserContext); if (mongoUri != null) { @@ -97,7 +77,8 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext); - String mongoRef = element.getAttribute("mongo-ref"); + String mongoRef = element.getAttribute("mongo-client-ref"); + String dbname = element.getAttribute("dbname"); // Defaulting @@ -119,8 +100,8 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa } /** - * Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the - * {@link Mongo} instance was registered under. + * Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the + * name under which the {@link com.mongodb.client.MongoClient} instance was registered under. * * @param element must not be {@literal null}. * @param parserContext must not be {@literal null}. @@ -136,8 +117,7 @@ private BeanDefinition registerMongoBeanDefinition(Element element, ParserContex } /** - * Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured - * attributes.
+ * Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes.
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except * {@literal write-concern} and/or {@literal id}. * @@ -146,11 +126,19 @@ private BeanDefinition registerMongoBeanDefinition(Element element, ParserContex * @return {@literal null} in case no client-/uri defined. */ @Nullable - private BeanDefinition getMongoUri(Element element, ParserContext parserContext) { + private BeanDefinition getConnectionString(Element element, ParserContext parserContext) { - boolean hasClientUri = element.hasAttribute("client-uri"); + String type = null; - if (!hasClientUri && !element.hasAttribute("uri")) { + if (element.hasAttribute("client-uri")) { + type = "client-uri"; + } else if (element.hasAttribute("connection-string")) { + type = "connection-string"; + } else if (element.hasAttribute("uri")) { + type = "uri"; + } + + if (!StringUtils.hasText(type)) { return null; } @@ -164,16 +152,12 @@ private BeanDefinition getMongoUri(Element element, ParserContext parserContext) if (element.getAttributes().getLength() > allowedAttributesCount) { - parserContext.getReaderContext().error( - "Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!", + parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually", parserContext.extractSource(element)); } - Class type = MongoClientURI.class; - String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type); - builder.addConstructorArgValue(uri); + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class); + builder.addConstructorArgValue(element.getAttribute(type)); return builder.getBeanDefinition(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java index b2aa54d985..af1ffbbb02 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,7 +32,9 @@ * @author John Brisbin * @author Oliver Gierke * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) public class MongoJmxParser implements BeanDefinitionParser { public BeanDefinition parse(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java index fb2ab93ccc..47519ca615 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,10 +26,6 @@ */ public class MongoNamespaceHandler extends NamespaceHandlerSupport { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.NamespaceHandler#init() - */ public void init() { registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java index 5caf989a70..95b56b58f3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,9 +22,12 @@ import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.CustomEditorConfigurer; import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionValidationException; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.BeanDefinitionParser; -import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean; +import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean; +import org.springframework.data.mongodb.core.MongoServerApiFactoryBean; +import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -35,66 +38,99 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ -@SuppressWarnings("deprecation") abstract class MongoParsingUtils { private MongoParsingUtils() {} /** - * Parses the mongo replica-set element. - * - * @param parserContext the parser context - * @param element the mongo element - * @param mongoBuilder the bean definition builder to populate - * @return - */ - static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) { - setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds"); - } - - /** - * Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper + * Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper * attributes. * - * @param element must not be {@literal null}. - * @param mongoClientBuilder must not be {@literal null}. + * @param element + * @param mongoClientBuilder * @return - * @since 1.7 + * @since 3.0 */ - public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) { - - Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options"); + public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) { - if (optionsElement == null) { + Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings"); + if (settingsElement == null) { return false; } BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder - .genericBeanDefinition(MongoClientOptionsFactoryBean.class); - - setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier", - "threadsAllowedToBlockForConnectionMultiplier"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl"); - setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout"); - - mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition()); + .genericBeanDefinition(MongoClientSettingsFactoryBean.class); + + setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation"); + + // SocketSettings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize"); + + // Server Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency", + "serverHeartbeatFrequencyMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency", + "serverMinHeartbeatFrequencyMS"); + + // Cluster Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout", + "clusterServerSelectionTimeoutMS"); + + // Connection Pool Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time", + "poolMaxConnectionLifeTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time", + "poolMaxConnectionIdleTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay", + "poolMaintenanceInitialDelayMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency", + "poolMaintenanceFrequencyMS"); + + // SSL Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed", + "sslInvalidHostNameAllowed"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider"); + + // Field level encryption + setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings"); + + // ServerAPI + if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) { + + MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean(); + serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version")); + try { + clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject()); + } catch (Exception exception) { + throw new BeanDefinitionValidationException("Non parsable server-api.", exception); + } + } else { + setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi"); + } + + // and the rest + + mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition()); return true; } @@ -116,6 +152,24 @@ static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() { return builder; } + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ReadConcernPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + /** * One should only register one bean definition but want to have the convenience of using * AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the @@ -123,7 +177,7 @@ static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() { */ static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() { - Map customEditors = new ManagedMap(); + Map customEditors = new ManagedMap<>(); customEditors.put("com.mongodb.ServerAddress[]", "org.springframework.data.mongodb.config.ServerAddressPropertyEditor"); @@ -141,7 +195,7 @@ static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() { */ static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() { - Map> customEditors = new ManagedMap>(); + Map> customEditors = new ManagedMap<>(); customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class); BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); @@ -167,4 +221,41 @@ static BeanDefinitionBuilder getMongoCredentialPropertyEditor() { return builder; } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ConnectionStringPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ConnectionStringPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java index 96763fd2d9..1e1b11356f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -39,10 +39,6 @@ */ class MongoTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -51,10 +47,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java new file mode 100644 index 0000000000..e46701a7f3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java @@ -0,0 +1,53 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; + +/** + * Simple helper to be able to wire the {@link PersistentEntities} from a {@link MappingMongoConverter} bean available + * in the application context. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + * @since 3.1 + */ +public class PersistentEntitiesFactoryBean implements FactoryBean { + + private final MappingMongoConverter converter; + + /** + * Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link MappingMongoConverter}. + * + * @param converter must not be {@literal null}. + */ + public PersistentEntitiesFactoryBean(MappingMongoConverter converter) { + this.converter = converter; + } + + @Override + public PersistentEntities getObject() { + return PersistentEntities.of(converter.getMappingContext()); + } + + @Override + public Class getObjectType() { + return PersistentEntities.class; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java new file mode 100644 index 0000000000..80cf404434 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.lang.annotation.Annotation; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; +import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; +import org.springframework.data.auditing.config.AuditingConfiguration; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.util.Assert; + +/** + * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 3.1 + */ +class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { + + @Override + protected Class getAnnotation() { + return EnableReactiveMongoAuditing.class; + } + + @Override + protected String getAuditingHandlerBeanName() { + return "reactiveMongoAuditingHandler"; + } + + @Override + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); + } + + @Override + protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { + + Assert.notNull(configuration, "AuditingConfiguration must not be null"); + + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class)); + } + + @Override + protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, + BeanDefinitionRegistry registry) { + + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); + + builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); + builder.getRawBeanDefinition().setSource(auditingHandlerDefinition.getSource()); + + registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(), + registry); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java new file mode 100644 index 0000000000..60bf126ae7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; + +/** + * Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the + * {@link ReadConcernLevel#fromString(String)}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class ReadConcernPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String readConcernString) { + + if (!StringUtils.hasText(readConcernString)) { + return; + } + + setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java index dbbe7aadfd..5ed9b66619 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,10 +29,6 @@ */ public class ReadPreferencePropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java index c2ba9a675d..9c51900902 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,8 +21,8 @@ import java.util.HashSet; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -43,13 +43,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport { * A port is a number without a leading 0 at the end of the address that is proceeded by just a single :. */ private static final String HOST_PORT_SPLIT_PATTERN = "(? 2) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source)); + } return null; } @@ -105,9 +105,13 @@ private ServerAddress parseServerAddress(String source) { return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port); } catch (UnknownHostException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0])); + } } catch (NumberFormatException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]); + if(LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1])); + } } return null; @@ -121,7 +125,7 @@ private ServerAddress parseServerAddress(String source) { */ private String[] extractHostAddressAndPort(String addressAndPortSource) { - Assert.notNull(addressAndPortSource, "Address and port source must not be null!"); + Assert.notNull(addressAndPortSource, "Address and port source must not be null"); String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN); String hostAddress = hostAndPort[0]; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java index a3583df058..9f579b8fe9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,10 +26,6 @@ */ public class StringToWriteConcernConverter implements Converter { - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ public WriteConcern convert(String source) { WriteConcern writeConcern = WriteConcern.valueOf(source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java new file mode 100644 index 0000000000..b777969967 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java @@ -0,0 +1,41 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.bson.UuidRepresentation; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Parse a {@link String} to a {@link UuidRepresentation}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class UUidRepresentationPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String value) { + + if (!StringUtils.hasText(value)) { + return; + } + + setValue(UuidRepresentation.valueOf(value)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java index f8c26ece38..ee0d09e555 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,7 +34,7 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport { /** - * Parse a string to a List + * Parse a string to a {@link WriteConcern}. */ @Override public void setAsText(@Nullable String writeConcernString) { @@ -51,6 +51,5 @@ public void setAsText(@Nullable String writeConcernString) { // pass on the string to the constructor setValue(new WriteConcern(writeConcernString)); } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java new file mode 100644 index 0000000000..a00d95a9ad --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java @@ -0,0 +1,102 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.bson.Document; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping; +import org.springframework.data.mongodb.core.aggregation.FieldLookupPolicy; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; + +/** + * Utility methods to map {@link org.springframework.data.mongodb.core.aggregation.Aggregation} pipeline definitions and + * create type-bound {@link AggregationOperationContext}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class AggregationUtil { + + final QueryMapper queryMapper; + final MappingContext, MongoPersistentProperty> mappingContext; + final Lazy untypedMappingContext; + + AggregationUtil(QueryMapper queryMapper, + MappingContext, MongoPersistentProperty> mappingContext) { + + this.queryMapper = queryMapper; + this.mappingContext = mappingContext; + this.untypedMappingContext = Lazy.of(() -> new TypeBasedAggregationOperationContext(Object.class, mappingContext, + queryMapper, FieldLookupPolicy.relaxed())); + } + + AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class inputType) { + + DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping(); + + if (domainTypeMapping == DomainTypeMapping.NONE) { + return Aggregation.DEFAULT_CONTEXT; + } + + FieldLookupPolicy lookupPolicy = domainTypeMapping == DomainTypeMapping.STRICT + && !aggregation.getPipeline().containsUnionWith() ? FieldLookupPolicy.strict() : FieldLookupPolicy.relaxed(); + + if (aggregation instanceof TypedAggregation ta) { + return new TypeBasedAggregationOperationContext(ta.getInputType(), mappingContext, queryMapper, lookupPolicy); + } + + if (inputType == null) { + return untypedMappingContext.get(); + } + + return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper, lookupPolicy); + } + + /** + * Extract and map the aggregation pipeline into a {@link List} of {@link Document}. + * + * @param aggregation + * @param context + * @return + */ + List createPipeline(Aggregation aggregation, AggregationOperationContext context) { + return aggregation.toPipeline(context); + } + + /** + * Extract the command and map the aggregation pipeline. + * + * @param aggregation + * @param context + * @return + */ + Document createCommand(String collection, Aggregation aggregation, AggregationOperationContext context) { + return aggregation.toDocument(collection, context); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java index 179e1475bb..4820c2355c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,20 +17,36 @@ import java.util.List; +import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; import com.mongodb.bulk.BulkWriteResult; /** - * Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB - * 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add - * multiple single operations or list of similar operations in sequence which can then eventually be executed by calling + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling * {@link #execute()}. * + *

+ * MongoOperations ops = …;
+ *
+ * ops.bulkOps(BulkMode.UNORDERED, Person.class)
+ * 				.insert(newPerson)
+ * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
+ * 				.execute();
+ * 
+ *

+ * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * * @author Tobias Trelle * @author Oliver Gierke + * @author Minsu Kim * @since 1.9 */ public interface BulkOperations { @@ -45,7 +61,7 @@ enum BulkMode { /** Perform bulk operations in parallel. Processing will continue on errors. */ UNORDERED - }; + } /** * Add a single insert to the bulk operation. @@ -66,11 +82,25 @@ enum BulkMode { /** * Add a single update to the bulk operation. For the update request, only the first matching document is updated. * - * @param query update criteria, must not be {@literal null}. + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link Update} operation to perform, must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateOne(Query query, Update update) { + return updateOne(query, (UpdateDefinition) update); + } + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. * @param update {@link Update} operation to perform, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateOne(Query query, Update update); + BulkOperations updateOne(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, only the first matching document is updated. @@ -78,7 +108,18 @@ enum BulkMode { * @param updates Update operations to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateOne(List> updates); + BulkOperations updateOne(List> updates); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateMulti(Query query, Update update) { + return updateMulti(query, (UpdateDefinition) update); + } /** * Add a single update to the bulk operation. For the update request, all matching documents are updated. @@ -86,17 +127,17 @@ enum BulkMode { * @param query Update criteria. * @param update Update operation to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateMulti(Query query, Update update); + BulkOperations updateMulti(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, all matching documents are updated. * * @param updates Update operations to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateMulti(List> updates); + BulkOperations updateMulti(List> updates); /** * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, @@ -104,17 +145,28 @@ enum BulkMode { * * @param query Update criteria. * @param update Update operation to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations upsert(Query query, Update update); + default BulkOperations upsert(Query query, Update update) { + return upsert(query, (UpdateDefinition) update); + } + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 + */ + BulkOperations upsert(Query query, UpdateDefinition update); /** * Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty, * else an insert. * * @param updates Updates/insert operations to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ BulkOperations upsert(List> updates); @@ -135,6 +187,31 @@ enum BulkMode { */ BulkOperations remove(List removes); + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. + * @since 2.2 + */ + default BulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. + * @since 2.2 + */ + BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + /** * Execute all bulk operations using the default write concern. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java new file mode 100644 index 0000000000..1f5509cd60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java @@ -0,0 +1,243 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.context.ApplicationEvent; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.util.Assert; + +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOneModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.WriteModel; + +/** + * Support class for bulk operations. + * + * @author Mark Paluch + * @since 4.1 + */ +abstract class BulkOperationsSupport { + + private final String collectionName; + + BulkOperationsSupport(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + + this.collectionName = collectionName; + } + + /** + * Emit a {@link BeforeSaveEvent}. + * + * @param holder + */ + void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } + } + + /** + * Emit a {@link AfterSaveEvent}. + * + * @param holder + */ + void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } + } + + WriteModel mapWriteModel(Object source, WriteModel writeModel) { + + if (writeModel instanceof UpdateOneModel model) { + + Bson sort = model.getOptions().getSort(); + if (sort instanceof Document sortDocument) { + model.getOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof UpdateManyModel model) { + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof DeleteOneModel model) { + return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof DeleteManyModel model) { + return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof ReplaceOneModel model) { + + Bson sort = model.getReplaceOptions().getSort(); + + if (sort instanceof Document sortDocument) { + model.getReplaceOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + return new ReplaceOneModel<>(getMappedQuery(model.getFilter()), model.getReplacement(), + model.getReplaceOptions()); + } + + return writeModel; + } + + private List mapUpdatePipeline(AggregationUpdate source) { + + Class type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class; + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, + updateMapper().getMappingContext(), queryMapper()); + + return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context); + } + + /** + * Emit a {@link ApplicationEvent} if event multicasting is enabled. + * + * @param event + */ + protected abstract void maybeEmitEvent(ApplicationEvent event); + + /** + * @return the {@link UpdateMapper} to use. + */ + protected abstract UpdateMapper updateMapper(); + + /** + * @return the {@link QueryMapper} to use. + */ + protected abstract QueryMapper queryMapper(); + + /** + * @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}. + */ + protected abstract Optional> entity(); + + protected Bson getMappedUpdate(Bson update) { + return updateMapper().getMappedObject(update, entity()); + } + + protected Bson getMappedQuery(Bson query) { + return queryMapper().getMappedObject(query, entity()); + } + + protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { + + BulkWriteOptions options = new BulkWriteOptions(); + + return switch (bulkMode) { + case ORDERED -> options.ordered(true); + case UNORDERED -> options.ordered(false); + }; + } + + /** + * @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}. + * @param update The {@link Update} to apply + * @param upsert flag to indicate if document should be upserted. + * @param multi flag to indicate if update might affect multiple documents. + * @return new instance of {@link UpdateOptions}. + */ + protected UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert, + boolean multi) { + + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update.hasArrayFilters()) { + List list = new ArrayList<>(update.getArrayFilters().size()); + for (ArrayFilter arrayFilter : update.getArrayFilters()) { + list.add(arrayFilter.asDocument()); + } + options.arrayFilters(list); + } + + if (!multi && filterQuery.isSorted()) { + options.sort(filterQuery.getSortObject()); + } + + filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + return options; + } + + /** + * Value object chaining together an actual source with its {@link WriteModel} representation. + * + * @author Christoph Strobl + */ + record SourceAwareWriteModelHolder(Object source, WriteModel model) { + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java index a5930833f3..17b8835b7e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,20 @@ */ package org.springframework.data.mongodb.core; -import lombok.EqualsAndHashCode; - -import java.util.concurrent.atomic.AtomicReference; +import java.time.Instant; +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; +import org.bson.BsonTimestamp; +import org.bson.BsonValue; import org.bson.Document; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.messaging.Message; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.OperationType; /** * {@link Message} implementation specific to MongoDB Change @@ -33,16 +36,29 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ -@EqualsAndHashCode public class ChangeStreamEvent { + @SuppressWarnings("rawtypes") // + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument"); + + @SuppressWarnings("rawtypes") // + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange"); + private final @Nullable ChangeStreamDocument raw; private final Class targetType; private final MongoConverter converter; - private final AtomicReference converted = new AtomicReference<>(); + + // accessed through CONVERTED_FULL_DOCUMENT_UPDATER. + private volatile @Nullable T convertedFullDocument; + + // accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER. + private volatile @Nullable T convertedFullDocumentBeforeChange; /** * @param raw can be {@literal null}. @@ -67,6 +83,69 @@ public ChangeStreamDocument getRaw() { return raw; } + /** + * Get the {@link ChangeStreamDocument#getClusterTime() cluster time} as {@link Instant} the event was emitted at. + * + * @return can be {@literal null}. + */ + @Nullable + public Instant getTimestamp() { + + return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) + : null; + } + + /** + * Get the {@link ChangeStreamDocument#getClusterTime() cluster time}. + * + * @return can be {@literal null}. + * @since 2.2 + */ + @Nullable + public BsonTimestamp getBsonTimestamp() { + return raw != null ? raw.getClusterTime() : null; + } + + /** + * Get the {@link ChangeStreamDocument#getResumeToken() resume token} for this event. + * + * @return can be {@literal null}. + */ + @Nullable + public BsonValue getResumeToken() { + return raw != null ? raw.getResumeToken() : null; + } + + /** + * Get the {@link ChangeStreamDocument#getOperationType() operation type} for this event. + * + * @return can be {@literal null}. + */ + @Nullable + public OperationType getOperationType() { + return raw != null ? raw.getOperationType() : null; + } + + /** + * Get the database name the event was originated at. + * + * @return can be {@literal null}. + */ + @Nullable + public String getDatabaseName() { + return raw != null ? raw.getNamespace().getDatabaseName() : null; + } + + /** + * Get the collection name the event was originated at. + * + * @return can be {@literal null}. + */ + @Nullable + public String getCollectionName() { + return raw != null ? raw.getNamespace().getCollectionName() : null; + } + /** * Get the potentially converted {@link ChangeStreamDocument#getFullDocument()}. * @@ -76,42 +155,89 @@ public ChangeStreamDocument getRaw() { @Nullable public T getBody() { - if (raw == null) { + if (raw == null || raw.getFullDocument() == null) { return null; } - if (raw.getFullDocument() == null) { - return targetType.cast(raw.getFullDocument()); + return getConvertedFullDocument(raw.getFullDocument()); + } + + /** + * Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being changed. + * + * @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is + * {@literal null}. + * @since 4.0 + */ + @Nullable + public T getBodyBeforeChange() { + + if (raw == null || raw.getFullDocumentBeforeChange() == null) { + return null; } - return getConverted(); + return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange()); } - private T getConverted() { + @SuppressWarnings("unchecked") + private T getConvertedFullDocumentBeforeChange(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER); + } + + @SuppressWarnings("unchecked") + private T getConvertedFullDocument(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER); + } + + private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater updater) { + + Object result = updater.get(this); - T result = converted.get(); if (result != null) { return result; } - if (ClassUtils.isAssignable(Document.class, raw.getFullDocument().getClass())) { + if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) { - result = converter.read(targetType, raw.getFullDocument()); - return converted.compareAndSet(null, result) ? result : converted.get(); + result = converter.read(targetType, fullDocument); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); } - if (converter.getConversionService().canConvert(raw.getFullDocument().getClass(), targetType)) { + if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) { - result = converter.getConversionService().convert(raw.getFullDocument(), targetType); - return converted.compareAndSet(null, result) ? result : converted.get(); + result = converter.getConversionService().convert(fullDocument, targetType); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); } - throw new IllegalArgumentException(String.format("No converter found capable of converting %s to %s", - raw.getFullDocument().getClass(), targetType)); + throw new IllegalArgumentException( + String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType)); } @Override public String toString() { return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}'; } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ChangeStreamEvent that = (ChangeStreamEvent) o; + + if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.targetType, that.targetType); + } + + @Override + public int hashCode() { + int result = raw != null ? raw.hashCode() : 0; + result = 31 * result + ObjectUtils.nullSafeHashCode(targetType); + return result; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java index e5c4fa5d84..aaee3b76af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,24 @@ */ package org.springframework.data.mongodb.core; -import lombok.EqualsAndHashCode; - +import java.time.Instant; import java.util.Arrays; import java.util.Optional; +import org.bson.BsonDocument; +import org.bson.BsonTimestamp; import org.bson.BsonValue; import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; /** * Options applicable to MongoDB Change Streams. Intended @@ -37,15 +41,18 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ -@EqualsAndHashCode public class ChangeStreamOptions { private @Nullable Object filter; private @Nullable BsonValue resumeToken; private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; private @Nullable Collation collation; + private @Nullable Object resumeTimestamp; + private Resume resume = Resume.UNDEFINED; protected ChangeStreamOptions() {} @@ -70,6 +77,14 @@ public Optional getFullDocumentLookup() { return Optional.ofNullable(fullDocumentLookup); } + /** + * @return {@link Optional#empty()} if not set. + * @since 4.0 + */ + public Optional getFullDocumentBeforeChangeLookup() { + return Optional.ofNullable(fullDocumentBeforeChangeLookup); + } + /** * @return {@link Optional#empty()} if not set. */ @@ -77,6 +92,37 @@ public Optional getCollation() { return Optional.ofNullable(collation); } + /** + * @return {@link Optional#empty()} if not set. + */ + public Optional getResumeTimestamp() { + return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class)); + } + + /** + * @return {@link Optional#empty()} if not set. + * @since 2.2 + */ + public Optional getResumeBsonTimestamp() { + return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class)); + } + + /** + * @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}. + * @since 2.2 + */ + public boolean isStartAfter() { + return Resume.START_AFTER.equals(resume); + } + + /** + * @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}. + * @since 2.2 + */ + public boolean isResumeAfter() { + return Resume.RESUME_AFTER.equals(resume); + } + /** * @return empty {@link ChangeStreamOptions}. */ @@ -86,7 +132,7 @@ public static ChangeStreamOptions empty() { /** * Obtain a shiny new {@link ChangeStreamOptionsBuilder} and start defining options in this fancy fluent way. Just - * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when your're done. + * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when done. * * @return new instance of {@link ChangeStreamOptionsBuilder}. */ @@ -94,6 +140,90 @@ public static ChangeStreamOptionsBuilder builder() { return new ChangeStreamOptionsBuilder(); } + private static T asTimestampOfType(Object timestamp, Class targetType) { + return targetType.cast(doGetTimestamp(timestamp, targetType)); + } + + private static Object doGetTimestamp(Object timestamp, Class targetType) { + + if (ClassUtils.isAssignableValue(targetType, timestamp)) { + return timestamp; + } + + if (timestamp instanceof Instant instant) { + return new BsonTimestamp((int) instant.getEpochSecond(), 0); + } + + if (timestamp instanceof BsonTimestamp bsonTimestamp) { + return Instant.ofEpochSecond(bsonTimestamp.getTime()); + } + + throw new IllegalArgumentException( + "o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was " + + ObjectUtils.nullSafeClassName(timestamp)); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ChangeStreamOptions that = (ChangeStreamOptions) o; + + if (!ObjectUtils.nullSafeEquals(this.filter, that.filter)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.resumeToken, that.resumeToken)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.resumeTimestamp, that.resumeTimestamp)) { + return false; + } + return resume == that.resume; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(filter); + result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp); + result = 31 * result + ObjectUtils.nullSafeHashCode(resume); + return result; + } + + /** + * @author Christoph Strobl + * @since 2.2 + */ + enum Resume { + + UNDEFINED, + + /** + * @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument) + */ + START_AFTER, + + /** + * @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument) + */ + RESUME_AFTER + } + /** * Builder for creating {@link ChangeStreamOptions}. * @@ -105,7 +235,10 @@ public static class ChangeStreamOptionsBuilder { private @Nullable Object filter; private @Nullable BsonValue resumeToken; private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; private @Nullable Collation collation; + private @Nullable Object resumeTimestamp; + private Resume resume = Resume.UNDEFINED; private ChangeStreamOptionsBuilder() {} @@ -117,7 +250,7 @@ private ChangeStreamOptionsBuilder() {} */ public ChangeStreamOptionsBuilder collation(Collation collation) { - Assert.notNull(collation, "Collation must not be null nor empty!"); + Assert.notNull(collation, "Collation must not be null nor empty"); this.collation = collation; return this; @@ -125,13 +258,13 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { /** * Set the filter to apply. - *

+ *
* Fields on aggregation expression root level are prefixed to map to fields contained in * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken * as given, during the mapping procedure. You may want to have a look at the * structure of Change Events. - *

+ *
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are * mapped to domain type fields. * @@ -141,7 +274,7 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { */ public ChangeStreamOptionsBuilder filter(Aggregation filter) { - Assert.notNull(filter, "Filter must not be null!"); + Assert.notNull(filter, "Filter must not be null"); this.filter = filter; return this; @@ -170,9 +303,14 @@ public ChangeStreamOptionsBuilder filter(Document... filter) { */ public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) { - Assert.notNull(resumeToken, "ResumeToken must not be null!"); + Assert.notNull(resumeToken, "ResumeToken must not be null"); this.resumeToken = resumeToken; + + if (this.resume == Resume.UNDEFINED) { + this.resume = Resume.RESUME_AFTER; + } + return this; } @@ -194,12 +332,97 @@ public ChangeStreamOptionsBuilder returnFullDocumentOnUpdate() { */ public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) { - Assert.notNull(lookup, "Lookup must not be null!"); + Assert.notNull(lookup, "Lookup must not be null"); this.fullDocumentLookup = lookup; return this; } + /** + * Set the {@link FullDocumentBeforeChange} lookup to use. + * + * @param lookup must not be {@literal null}. + * @return this. + * @since 4.0 + */ + public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) { + + Assert.notNull(lookup, "Lookup must not be null"); + + this.fullDocumentBeforeChangeLookup = lookup; + return this; + } + + /** + * Return the full document before being changed if it is available. + * + * @return this. + * @since 4.0 + * @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange) + */ + public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() { + return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE); + } + + /** + * Set the cluster time to resume from. + * + * @param resumeTimestamp must not be {@literal null}. + * @return this. + */ + public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) { + + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); + + this.resumeTimestamp = resumeTimestamp; + return this; + } + + /** + * Set the cluster time to resume from. + * + * @param resumeTimestamp must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) { + + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); + + this.resumeTimestamp = resumeTimestamp; + return this; + } + + /** + * Set the resume token after which to continue emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) { + + resumeToken(resumeToken); + this.resume = Resume.RESUME_AFTER; + + return this; + } + + /** + * Set the resume token after which to start emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) { + + resumeToken(resumeToken); + this.resume = Resume.START_AFTER; + + return this; + } + /** * @return the built {@link ChangeStreamOptions} */ @@ -207,10 +430,13 @@ public ChangeStreamOptions build() { ChangeStreamOptions options = new ChangeStreamOptions(); - options.filter = filter; - options.resumeToken = resumeToken; - options.fullDocumentLookup = fullDocumentLookup; - options.collation = collation; + options.filter = this.filter; + options.resumeToken = this.resumeToken; + options.fullDocumentLookup = this.fullDocumentLookup; + options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup; + options.collation = this.collation; + options.resumeTimestamp = this.resumeTimestamp; + options.resume = this.resume; return options; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java index c7ad700cba..c142aca173 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,7 +29,7 @@ * @author Grame Rocher * @author Oliver Gierke * @author John Brisbin - * @auhtor Christoph Strobl + * @author Christoph Strobl * @since 1.0 */ public interface CollectionCallback { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index 7abfbb86e8..5df30e0b92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,37 @@ */ package org.springframework.data.mongodb.core; -import lombok.RequiredArgsConstructor; - +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.StreamSupport; +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Contract; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.ValidationAction; import com.mongodb.client.model.ValidationLevel; @@ -36,6 +57,8 @@ * @author Christoph Strobl * @author Mark Paluch * @author Andreas Zink + * @author Ben Foster + * @author Ross Lawley */ public class CollectionOptions { @@ -44,29 +67,23 @@ public class CollectionOptions { private @Nullable Boolean capped; private @Nullable Collation collation; private ValidationOptions validationOptions; - - /** - * Constructs a new CollectionOptions instance. - * - * @param size the collection size in bytes, this data space is preallocated. Can be {@literal null}. - * @param maxDocuments the maximum number of documents in the collection. Can be {@literal null}. - * @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order), - * false otherwise. Can be {@literal null}. - * @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point. - */ - @Deprecated - public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) { - this(size, maxDocuments, capped, null, ValidationOptions.none()); - } + private @Nullable TimeSeriesOptions timeSeriesOptions; + private @Nullable CollectionChangeStreamOptions changeStreamOptions; + private @Nullable EncryptedFieldsOptions encryptedFieldsOptions; private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, - @Nullable Collation collation, ValidationOptions validationOptions) { + @Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions, + @Nullable CollectionChangeStreamOptions changeStreamOptions, + @Nullable EncryptedFieldsOptions encryptedFieldsOptions) { this.maxDocuments = maxDocuments; this.size = size; this.capped = capped; this.collation = collation; this.validationOptions = validationOptions; + this.timeSeriesOptions = timeSeriesOptions; + this.changeStreamOptions = changeStreamOptions; + this.encryptedFieldsOptions = encryptedFieldsOptions; } /** @@ -78,9 +95,9 @@ private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nul */ public static CollectionOptions just(Collation collation) { - Assert.notNull(collation, "Collation must not be null!"); + Assert.notNull(collation, "Collation must not be null"); - return new CollectionOptions(null, null, null, collation, ValidationOptions.none()); + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null, null); } /** @@ -90,18 +107,97 @@ public static CollectionOptions just(Collation collation) { * @since 2.0 */ public static CollectionOptions empty() { - return new CollectionOptions(null, null, null, null, ValidationOptions.none()); + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null, null); + } + + /** + * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use + * {@link #timeSeries(String, Function)}. + * + * @param timeField The name of the property which contains the date in each time series document. Must not be + * {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @see #timeSeries(TimeSeriesOptions) + * @since 3.3 + */ + public static CollectionOptions timeSeries(String timeField) { + return timeSeries(timeField, it -> it); + } + + /** + * Set up {@link CollectionOptions} for a Time Series collection. + * + * @param timeField the name of the field that contains the date in each time series document. + * @param options a function to apply additional settings to {@link TimeSeriesOptions}. + * @return new instance of {@link CollectionOptions}. + * @since 4.4 + */ + public static CollectionOptions timeSeries(String timeField, Function options) { + return empty().timeSeries(options.apply(TimeSeriesOptions.timeSeries(timeField))); + } + + /** + * Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events. + * + * @return new instance of {@link CollectionOptions}. + * @see #changeStream(CollectionChangeStreamOptions) + * @see CollectionChangeStreamOptions#preAndPostImages(boolean) + * @since 4.0 + */ + public static CollectionOptions emitChangedRevisions() { + return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true)); + } + + /** + * Create new {@link CollectionOptions} with the given {@code encryptedFields}. + * + * @param encryptedFieldsOptions can be null + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(@Nullable EncryptedFieldsOptions encryptedFieldsOptions) { + return new CollectionOptions(null, null, null, null, ValidationOptions.NONE, null, null, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} reading encryption options from the given {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(MongoJsonSchema schema) { + return encryptedCollection(EncryptedFieldsOptions.fromSchema(schema)); + } + + /** + * Create new {@link CollectionOptions} building encryption options in a fluent style. + * + * @param optionsFunction must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection( + Function optionsFunction) { + return encryptedCollection(optionsFunction.apply(new EncryptedFieldsOptions())); } /** * Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}.
- * NOTE Using capped collections requires defining {@link #size(int)}. + * NOTE: Using capped collections requires defining {@link #size(long)}. * * @return new {@link CollectionOptions}. * @since 2.0 */ public CollectionOptions capped() { - return new CollectionOptions(size, maxDocuments, true, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -112,7 +208,8 @@ public CollectionOptions capped() { * @since 2.0 */ public CollectionOptions maxDocuments(long maxDocuments) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -123,7 +220,8 @@ public CollectionOptions maxDocuments(long maxDocuments) { * @since 2.0 */ public CollectionOptions size(long size) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -134,18 +232,19 @@ public CollectionOptions size(long size) { * @since 2.0 */ public CollectionOptions collation(@Nullable Collation collation) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** * Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given * {@link MongoJsonSchema}. * - * @param schema can be {@literal null}. + * @param schema must not be {@literal null}. * @return new {@link CollectionOptions}. * @since 2.1 */ - public CollectionOptions schema(@Nullable MongoJsonSchema schema) { + public CollectionOptions schema(MongoJsonSchema schema) { return validator(Validator.schema(schema)); } @@ -226,7 +325,7 @@ public CollectionOptions failOnValidationError() { */ public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) { - Assert.notNull(validationLevel, "ValidationLevel must not be null!"); + Assert.notNull(validationLevel, "ValidationLevel must not be null"); return validation(validationOptions.validationLevel(validationLevel)); } @@ -240,7 +339,7 @@ public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) */ public CollectionOptions schemaValidationAction(ValidationAction validationAction) { - Assert.notNull(validationAction, "ValidationAction must not be null!"); + Assert.notNull(validationAction, "ValidationAction must not be null"); return validation(validationOptions.validationAction(validationAction)); } @@ -253,8 +352,52 @@ public CollectionOptions schemaValidationAction(ValidationAction validationActio */ public CollectionOptions validation(ValidationOptions validationOptions) { - Assert.notNull(validationOptions, "ValidationOptions must not be null!"); - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + Assert.notNull(validationOptions, "ValidationOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param timeSeriesOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { + + Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param changeStreamOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) { + + Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Set the {@link EncryptedFieldsOptions} for collections using queryable encryption. + * + * @param encryptedFieldsOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + */ + @Contract("_ -> new") + @CheckReturnValue + public CollectionOptions encrypted(EncryptedFieldsOptions encryptedFieldsOptions) { + + Assert.notNull(encryptedFieldsOptions, "EncryptedCollectionOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -305,6 +448,94 @@ public Optional getValidationOptions() { return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions); } + /** + * Get the {@link TimeSeriesOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 3.3 + */ + public Optional getTimeSeriesOptions() { + return Optional.ofNullable(timeSeriesOptions); + } + + /** + * Get the {@link CollectionChangeStreamOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.0 + */ + public Optional getChangeStreamOptions() { + return Optional.ofNullable(changeStreamOptions); + } + + /** + * Get the {@code encryptedFields} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.5 + */ + public Optional getEncryptedFieldsOptions() { + return Optional.ofNullable(encryptedFieldsOptions); + } + + @Override + public String toString() { + return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped + + ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions=" + + timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", encryptedCollectionOptions=" + + encryptedFieldsOptions + ", disableValidation=" + disableValidation() + ", strictValidation=" + + strictValidation() + ", moderateValidation=" + moderateValidation() + ", warnOnValidationError=" + + warnOnValidationError() + ", failOnValidationError=" + failOnValidationError() + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionOptions that = (CollectionOptions) o; + + if (!ObjectUtils.nullSafeEquals(maxDocuments, that.maxDocuments)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(size, that.size)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(capped, that.capped)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions)) { + return false; + } + return ObjectUtils.nullSafeEquals(encryptedFieldsOptions, that.encryptedFieldsOptions); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(maxDocuments); + result = 31 * result + ObjectUtils.nullSafeHashCode(size); + result = 31 * result + ObjectUtils.nullSafeHashCode(capped); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(encryptedFieldsOptions); + return result; + } + /** * Encapsulation of ValidationOptions options. * @@ -312,7 +543,6 @@ public Optional getValidationOptions() { * @author Andreas Zink * @since 2.1 */ - @RequiredArgsConstructor public static class ValidationOptions { private static final ValidationOptions NONE = new ValidationOptions(null, null, null); @@ -321,6 +551,14 @@ public static class ValidationOptions { private final @Nullable ValidationLevel validationLevel; private final @Nullable ValidationAction validationAction; + public ValidationOptions(@Nullable Validator validator, @Nullable ValidationLevel validationLevel, + @Nullable ValidationAction validationAction) { + + this.validator = validator; + this.validationLevel = validationLevel; + this.validationAction = validationAction; + } + /** * Create an empty {@link ValidationOptions}. * @@ -381,7 +619,7 @@ public Optional getValidationLevel() { /** * Get the {@code validationAction} to perform. * - * @return @return {@link Optional#empty()} if not set. + * @return {@link Optional#empty()} if not set. */ public Optional getValidationAction() { return Optional.ofNullable(validationAction); @@ -393,5 +631,418 @@ public Optional getValidationAction() { boolean isEmpty() { return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel()); } + + @Override + public String toString() { + + return "ValidationOptions{" + "validator=" + validator + ", validationLevel=" + validationLevel + + ", validationAction=" + validationAction + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ValidationOptions that = (ValidationOptions) o; + + if (!ObjectUtils.nullSafeEquals(validator, that.validator)) { + return false; + } + if (validationLevel != that.validationLevel) + return false; + return validationAction == that.validationAction; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(validator); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationLevel); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationAction); + return result; + } + } + + /** + * Encapsulation of Encryption options for collections. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class EncryptedFieldsOptions { + + private static final EncryptedFieldsOptions NONE = new EncryptedFieldsOptions(); + + private final @Nullable MongoJsonSchema schema; + private final List queryableProperties; + + EncryptedFieldsOptions() { + this(null, List.of()); + } + + private EncryptedFieldsOptions(@Nullable MongoJsonSchema schema, + List queryableProperties) { + + this.schema = schema; + this.queryableProperties = queryableProperties; + } + + /** + * @return {@link EncryptedFieldsOptions#NONE} + */ + public static EncryptedFieldsOptions none() { + return NONE; + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromSchema(MongoJsonSchema schema) { + return new EncryptedFieldsOptions(schema, List.of()); + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromProperties(List properties) { + return new EncryptedFieldsOptions(null, List.copyOf(properties)); + } + + /** + * Add a new {@link QueryableJsonSchemaProperty queryable property} for the given source property. + *

+ * Please note that, a given {@link JsonSchemaProperty} may override options from a given {@link MongoJsonSchema} if + * set. + * + * @param property the queryable source - typically + * {@link org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty + * encrypted}. + * @param characteristics the query options to set. + * @return new instance of {@link EncryptedFieldsOptions}. + */ + @Contract("_, _ -> new") + @CheckReturnValue + public EncryptedFieldsOptions queryable(JsonSchemaProperty property, QueryCharacteristic... characteristics) { + + List targetPropertyList = new ArrayList<>(queryableProperties.size() + 1); + targetPropertyList.addAll(queryableProperties); + targetPropertyList.add(JsonSchemaProperty.queryable(property, List.of(characteristics))); + + return new EncryptedFieldsOptions(schema, targetPropertyList); + } + + public Document toDocument() { + return new Document("fields", selectPaths()); + } + + private List selectPaths() { + + Map fields = new LinkedHashMap<>(); + for (Document field : fromSchema()) { + fields.put(field.get("path", String.class), field); + } + for (Document field : fromProperties()) { + fields.put(field.get("path", String.class), field); + } + return List.copyOf(fields.values()); + } + + private List fromProperties() { + + if (queryableProperties.isEmpty()) { + return List.of(); + } + + List converted = new ArrayList<>(queryableProperties.size()); + for (QueryableJsonSchemaProperty property : queryableProperties) { + + Document field = new Document("path", property.getIdentifier()); + + if (!property.getTypes().isEmpty()) { + field.append("bsonType", property.getTypes().iterator().next().toBsonType().value()); + } + + if (property + .getTargetProperty() instanceof IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty encrypted) { + if (encrypted.getKeyId() != null) { + if (encrypted.getKeyId() instanceof String stringKey) { + field.append("keyId", + new BsonBinary(BsonBinarySubType.UUID_STANDARD, stringKey.getBytes(StandardCharsets.UTF_8))); + } else { + field.append("keyId", encrypted.getKeyId()); + } + } + } + + field.append("queries", StreamSupport.stream(property.getCharacteristics().spliterator(), false) + .map(QueryCharacteristic::toDocument).toList()); + + if (!field.containsKey("keyId")) { + field.append("keyId", BsonNull.VALUE); + } + + converted.add(field); + } + return converted; + } + + private List fromSchema() { + + if (schema == null) { + return List.of(); + } + + Document root = schema.schemaDocument(); + Map paths = new LinkedHashMap<>(); + collectPaths(root, null, paths); + + List fields = new ArrayList<>(); + if (!paths.isEmpty()) { + + for (Entry entry : paths.entrySet()) { + Document field = new Document("path", entry.getKey()); + field.append("keyId", entry.getValue().getOrDefault("keyId", BsonNull.VALUE)); + if (entry.getValue().containsKey("bsonType")) { + field.append("bsonType", entry.getValue().get("bsonType")); + } + field.put("queries", entry.getValue().get("queries")); + fields.add(field); + } + } + + return fields; + } + } + + private static void collectPaths(Document document, @Nullable String currentPath, Map paths) { + + if (document.containsKey("type") && document.get("type").equals("object")) { + Object o = document.get("properties"); + if (o == null) { + return; + } + + if (o instanceof Document properties) { + for (Entry entry : properties.entrySet()) { + if (entry.getValue() instanceof Document nested) { + + String path = currentPath == null ? entry.getKey() : (currentPath + "." + entry.getKey()); + if (nested.containsKey("encrypt")) { + Document target = new Document(nested.get("encrypt", Document.class)); + if (nested.containsKey("queries")) { + List queries = nested.get("queries", List.class); + if (!queries.isEmpty() && queries.iterator().next() instanceof Document qd) { + target.putAll(qd); + } + } + paths.put(path, target); + } else { + collectPaths(nested, path, paths); + } + } + } + } + } + } + + /** + * Encapsulation of options applied to define collections change stream behaviour. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class CollectionChangeStreamOptions { + + private final boolean preAndPostImages; + + private CollectionChangeStreamOptions(boolean emitChangedRevisions) { + this.preAndPostImages = emitChangedRevisions; + } + + /** + * Output the version of a document before and after changes (the document pre- and post-images). + * + * @return new instance of {@link CollectionChangeStreamOptions}. + */ + public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) { + return new CollectionChangeStreamOptions(true); + } + + public boolean getPreAndPostImages() { + return preAndPostImages; + } + + @Override + public String toString() { + return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o; + + return preAndPostImages == that.preAndPostImages; + } + + @Override + public int hashCode() { + return (preAndPostImages ? 1 : 0); + } + } + + /** + * Options applicable to Time Series collections. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/core/timeseries-collections + * @since 3.3 + */ + public static class TimeSeriesOptions { + + private final String timeField; + + private @Nullable final String metaField; + + private final GranularityDefinition granularity; + + private final Duration expireAfter; + + private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity, + Duration expireAfter) { + Assert.hasText(timeField, "Time field must not be empty or null"); + + this.timeField = timeField; + this.metaField = metaField; + this.granularity = granularity; + this.expireAfter = expireAfter; + } + + /** + * Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one, + * that contains the date in each time series document.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param timeField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public static TimeSeriesOptions timeSeries(String timeField) { + return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT, Duration.ofSeconds(-1)); + } + + /** + * Set the name of the field which contains metadata in each time series document. Should not be the {@literal id} + * nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or + * {@link java.util.Collection}.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param metaField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions metaField(String metaField) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized. + * Select one that is closest to the time span between incoming measurements. + * + * @return new instance of {@link TimeSeriesOptions}. + * @see Granularity + */ + public TimeSeriesOptions granularity(GranularityDefinition granularity) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Set the {@link Duration} for automatic removal of documents older than a specified value. + * + * @param ttl must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + * @see com.mongodb.client.model.CreateCollectionOptions#expireAfter(long, java.util.concurrent.TimeUnit) + * @since 4.4 + */ + public TimeSeriesOptions expireAfter(Duration ttl) { + return new TimeSeriesOptions(timeField, metaField, granularity, ttl); + } + + /** + * @return never {@literal null}. + */ + public String getTimeField() { + return timeField; + } + + /** + * @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via + * {@link org.springframework.util.StringUtils#hasText(String)}. + */ + @Nullable + public String getMetaField() { + return metaField; + } + + /** + * @return never {@literal null}. + */ + public GranularityDefinition getGranularity() { + return granularity; + } + + /** + * Get the {@link Duration} for automatic removal of documents. + * + * @return a {@link Duration#isNegative() negative} value if not specified. + * @since 4.4 + */ + public Duration getExpireAfter() { + return expireAfter; + } + + @Override + public String toString() { + + return "TimeSeriesOptions{" + "timeField='" + timeField + '\'' + ", metaField='" + metaField + '\'' + + ", granularity=" + granularity + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TimeSeriesOptions that = (TimeSeriesOptions) o; + + if (!ObjectUtils.nullSafeEquals(timeField, that.timeField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(metaField, that.metaField)) { + return false; + } + return ObjectUtils.nullSafeEquals(granularity, that.granularity); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(timeField); + result = 31 * result + ObjectUtils.nullSafeHashCode(metaField); + result = 31 * result + ObjectUtils.nullSafeHashCode(granularity); + return result; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java new file mode 100644 index 0000000000..f3769355c7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.util.Assert; + +import com.mongodb.client.MongoCollection; + +/** + * Interface for functional preparation of a {@link MongoCollection}. + * + * @author Mark Paluch + * @since 4.1 + */ +public interface CollectionPreparer { + + /** + * Returns a preparer that always returns its input collection. + * + * @return a preparer that always returns its input collection. + */ + static CollectionPreparer identity() { + return it -> it; + } + + /** + * Prepare the {@code collection}. + * + * @param collection the collection to prepare. + * @return the prepared collection. + */ + T prepare(T collection); + + /** + * Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies + * the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to + * the caller of the composed function. + * + * @param after the collection preparer to apply after this function is applied. + * @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after} + * preparer. + */ + default CollectionPreparer andThen(CollectionPreparer after) { + Assert.notNull(after, "After CollectionPreparer must not be null"); + return c -> after.prepare(prepare(c)); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java new file mode 100644 index 0000000000..644a3a54d1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java @@ -0,0 +1,182 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.Function; + +import org.bson.Document; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.client.MongoCollection; + +/** + * Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon + * {@link CollectionPreparer preparing a collection}. + * + * @author Mark Paluch + * @since 4.1 + */ +class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware { + + private final List sources; + + private CollectionPreparerSupport(List sources) { + this.sources = sources; + } + + T doPrepare(T collection, Function concernAccessor, BiFunction concernFunction, + Function preferenceAccessor, BiFunction preferenceFunction) { + + T collectionToUse = collection; + + for (Object source : sources) { + if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) { + + ReadConcern concern = rca.getReadConcern(); + if (concernAccessor.apply(collectionToUse) != concern) { + collectionToUse = concernFunction.apply(collectionToUse, concern); + } + break; + } + } + + for (Object source : sources) { + if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + + ReadPreference preference = rpa.getReadPreference(); + if (preferenceAccessor.apply(collectionToUse) != preference) { + collectionToUse = preferenceFunction.apply(collectionToUse, preference); + } + break; + } + } + + return collectionToUse; + } + + @Override + public boolean hasReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return true; + } + } + + return false; + } + + @Override + public ReadConcern getReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return rca.getReadConcern(); + } + } + + return null; + } + + @Override + public boolean hasReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return true; + } + } + + return false; + } + + @Override + public ReadPreference getReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return rpa.getReadPreference(); + } + } + + return null; + } + + static class CollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private CollectionPreparerDelegate(List sources) { + super(sources); + } + + public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static CollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (CollectionPreparerDelegate) mixedAwares[0]; + } + + return new CollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public MongoCollection prepare(MongoCollection collection) { + return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern, + MongoCollection::getReadPreference, MongoCollection::withReadPreference); + } + + } + + static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private ReactiveCollectionPreparerDelegate(List sources) { + super(sources); + } + + public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (ReactiveCollectionPreparerDelegate) mixedAwares[0]; + } + + return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public com.mongodb.reactivestreams.client.MongoCollection prepare( + com.mongodb.reactivestreams.client.MongoCollection collection) { + return doPrepare(collection, // + com.mongodb.reactivestreams.client.MongoCollection::getReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::withReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::getReadPreference, + com.mongodb.reactivestreams.client.MongoCollection::withReadPreference); + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java new file mode 100644 index 0000000000..4fa6b3e97d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java @@ -0,0 +1,260 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.MetricConversion; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite + * to {@code $geoWithin}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +class CountQuery { + + private final Document source; + + private CountQuery(Document source) { + this.source = source; + } + + public static CountQuery of(Document source) { + return new CountQuery(source); + } + + /** + * Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query + * to be usable with {@code countDocuments()}. + * + * @return the query {@link Document} that can be used with {@code countDocuments()}. + */ + public Document toQueryDocument() { + + if (!requiresRewrite(source)) { + return source; + } + + Document target = new Document(); + + for (Map.Entry entry : source.entrySet()) { + + if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) { + + target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and"))); + continue; + } + + if (entry.getValue() instanceof Collection collection && requiresRewrite(entry.getValue())) { + + target.put(entry.getKey(), rewriteCollection(collection)); + continue; + } + + if ("$and".equals(entry.getKey()) && target.containsKey("$and")) { + // Expect $and to be processed with Document and createGeoWithin. + continue; + } + + target.put(entry.getKey(), entry.getValue()); + } + + return target; + } + + /** + * @param valueToInspect + * @return {@code true} if the enclosing element needs to be rewritten. + */ + private boolean requiresRewrite(Object valueToInspect) { + + if (valueToInspect instanceof Document document) { + return requiresRewrite(document); + } + + if (valueToInspect instanceof Collection collection) { + return requiresRewrite(collection); + } + + return false; + } + + private boolean requiresRewrite(Collection collection) { + + for (Object o : collection) { + if (o instanceof Document document && requiresRewrite(document)) { + return true; + } + } + + return false; + } + + private boolean requiresRewrite(Document document) { + + if (containsNear(document)) { + return true; + } + + for (Object entry : document.values()) { + + if (requiresRewrite(entry)) { + return true; + } + } + + return false; + } + + private Collection rewriteCollection(Collection source) { + + Collection rewrittenCollection = new ArrayList<>(source.size()); + + for (Object item : source) { + if (item instanceof Document document && requiresRewrite(item)) { + rewrittenCollection.add(CountQuery.of(document).toQueryDocument()); + } else { + rewrittenCollection.add(item); + } + } + + return rewrittenCollection; + } + + /** + * Rewrite the near query for field {@code key} to {@code $geoWithin}. + * + * @param key the queried field. + * @param source source {@link Document}. + * @param $and potentially existing {@code $and} condition. + * @return the rewritten query {@link Document}. + */ + @SuppressWarnings("unchecked") + private static Document createGeoWithin(String key, Document source, @Nullable Object $and) { + + boolean spheric = source.containsKey("$nearSphere"); + Object $near = spheric ? source.get("$nearSphere") : source.get("$near"); + + Number maxDistance = getMaxDistance(source, $near, spheric); + + List $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance); + Document $geoWithinMax = new Document("$geoWithin", + new Document(spheric ? "$centerSphere" : "$center", $centerMax)); + + if (!containsNearWithMinDistance(source)) { + return new Document(key, $geoWithinMax); + } + + Number minDistance = (Number) source.get("$minDistance"); + List $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance); + Document $geoWithinMin = new Document("$geoWithin", + new Document(spheric ? "$centerSphere" : "$center", $centerMin)); + + List criteria; + + if ($and != null) { + if ($and instanceof Collection) { + Collection andElements = (Collection) $and; + criteria = new ArrayList<>(andElements.size() + 2); + criteria.addAll(andElements); + } else { + throw new IllegalArgumentException( + "Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: " + + $and); + } + } else { + criteria = new ArrayList<>(2); + } + + criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin)))); + criteria.add(new Document(key, $geoWithinMax)); + + return new Document("$and", criteria); + } + + private static Number getMaxDistance(Document source, Object $near, boolean spheric) { + + Number maxDistance = Double.MAX_VALUE; + + if (source.containsKey("$maxDistance")) { // legacy coordinate pair + return (Number) source.get("$maxDistance"); + } + + if ($near instanceof Document nearDoc) { + + if (nearDoc.containsKey("$maxDistance")) { + + maxDistance = (Number) nearDoc.get("$maxDistance"); + // geojson is in Meters but we need radians x/(6378.1*1000) + if (spheric && nearDoc.containsKey("$geometry")) { + maxDistance = MetricConversion.metersToRadians(maxDistance.doubleValue()); + } + } + } + + return maxDistance; + } + + private static boolean containsNear(Document source) { + return source.containsKey("$near") || source.containsKey("$nearSphere"); + } + + private static boolean containsNearWithMinDistance(Document source) { + + if (!containsNear(source)) { + return false; + } + + return source.containsKey("$minDistance"); + } + + private static Object toCenterCoordinates(Object value) { + + if (ObjectUtils.isArray(value)) { + return value; + } + + if (value instanceof Point point) { + return Arrays.asList(point.getX(), point.getY()); + } + + if (value instanceof Document document) { + + if (document.containsKey("x")) { + return Arrays.asList(document.get("x"), document.get("y")); + } + + if (document.containsKey("$geometry")) { + Document geoJsonPoint = document.get("$geometry", Document.class); + return geoJsonPoint.get("coordinates"); + } + } + + return value; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java index 0a6ea266c3..9b7408b0cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,15 @@ */ package org.springframework.data.mongodb.core; +import java.util.function.Function; + import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import com.mongodb.ReadPreference; import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; /** * Simple callback interface to allow customization of a {@link FindIterable}. @@ -25,12 +31,53 @@ * @author Oliver Gierke * @author Christoph Strobl */ -interface CursorPreparer { +public interface CursorPreparer extends ReadPreferenceAware { + + /** + * Default {@link CursorPreparer} just passing on the given {@link FindIterable}. + * + * @since 2.2 + */ + CursorPreparer NO_OP_PREPARER = (iterable -> iterable); /** * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. * - * @param cursor + * @param iterable must not be {@literal null}. + * @return never {@literal null}. + */ + FindIterable prepare(FindIterable iterable); + + /** + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a + * {@link FindIterable} via the given {@link Function find} function. + * + * @param collection must not be {@literal null}. + * @param find must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + * @since 2.2 + */ + default FindIterable initiateFind(MongoCollection collection, + Function, FindIterable> find) { + + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); + + if (hasReadPreference()) { + collection = collection.withReadPreference(getReadPreference()); + } + + return prepare(find.apply(collection)); + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none defined. + * @since 2.2 */ - FindIterable prepare(FindIterable cursor); + @Override + @Nullable + default ReadPreference getReadPreference() { + return null; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java index 0ce27d2737..9d588ad16d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java index 63c3256dce..52343522a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,44 @@ */ package org.springframework.data.mongodb.core; -import lombok.NonNull; -import lombok.Value; - import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.bson.Document; -import org.bson.conversions.Bson; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mongodb.BulkOperationException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BulkWriteException; +import com.mongodb.MongoBulkWriteException; import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.BulkWriteOptions; import com.mongodb.client.model.DeleteManyModel; -import com.mongodb.client.model.DeleteOneModel; import com.mongodb.client.model.DeleteOptions; import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; import com.mongodb.client.model.UpdateManyModel; import com.mongodb.client.model.UpdateOneModel; import com.mongodb.client.model.UpdateOptions; @@ -58,18 +65,21 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Minsu Kim + * @author Jens Schauder + * @author Michail Nikolaev + * @author Roman Puchkovskiy + * @author Jacob Botuck * @since 1.9 */ -class DefaultBulkOperations implements BulkOperations { +class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations { private final MongoOperations mongoOperations; private final String collectionName; private final BulkOperationContext bulkOperationContext; - private final List> models = new ArrayList<>(); + private final List models = new ArrayList<>(); - private PersistenceExceptionTranslator exceptionTranslator; private @Nullable WriteConcern defaultWriteConcern; - private BulkWriteOptions bulkOptions; /** @@ -84,24 +94,15 @@ class DefaultBulkOperations implements BulkOperations { DefaultBulkOperations(MongoOperations mongoOperations, String collectionName, BulkOperationContext bulkOperationContext) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.hasText(collectionName, "CollectionName must not be null nor empty!"); - Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null!"); + super(collectionName); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); this.mongoOperations = mongoOperations; this.collectionName = collectionName; this.bulkOperationContext = bulkOperationContext; - this.exceptionTranslator = new MongoExceptionTranslator(); - this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode()); - } - - /** - * Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}. - * - * @param exceptionTranslator can be {@literal null}. - */ - public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); } /** @@ -113,116 +114,77 @@ void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { this.defaultWriteConcern = defaultWriteConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object) - */ @Override public BulkOperations insert(Object document) { - Assert.notNull(document, "Document must not be null!"); - - if (document instanceof Document) { - - models.add(new InsertOneModel<>((Document) document)); - return this; - } - - Document sink = new Document(); - mongoOperations.getConverter().write(document, sink); + Assert.notNull(document, "Document must not be null"); - models.add(new InsertOneModel<>(sink)); + maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName)); + Object source = maybeInvokeBeforeConvertCallback(document); + addModel(source, new InsertOneModel<>(getMappedObject(source))); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List) - */ @Override public BulkOperations insert(List documents) { - Assert.notNull(documents, "Documents must not be null!"); + Assert.notNull(documents, "Documents must not be null"); documents.forEach(this::insert); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateOne(Query query, Update update) { + public BulkOperations updateOne(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateOne(Collections.singletonList(Pair.of(query, update))); + return update(query, update, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List) - */ @Override - public BulkOperations updateOne(List> updates) { + public BulkOperations updateOne(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, false); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateMulti(Query query, Update update) { + public BulkOperations updateMulti(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateMulti(Collections.singletonList(Pair.of(query, update))); + update(query, update, false, true); + + return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List) - */ @Override - public BulkOperations updateMulti(List> updates) { + public BulkOperations updateMulti(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, true); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - public BulkOperations upsert(Query query, Update update) { + public BulkOperations upsert(Query query, UpdateDefinition update) { return update(query, update, true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List) - */ @Override public BulkOperations upsert(List> updates) { @@ -233,31 +195,23 @@ public BulkOperations upsert(List> updates) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query) - */ @Override public BulkOperations remove(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); DeleteOptions deleteOptions = new DeleteOptions(); query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); - models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions)); + addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions)); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List) - */ @Override public BulkOperations remove(List removes) { - Assert.notNull(removes, "Removals must not be null!"); + Assert.notNull(removes, "Removals must not be null"); for (Query query : removes) { remove(query); @@ -266,30 +220,87 @@ public BulkOperations remove(List removes) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#executeBulk() - */ + @Override + public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName)); + Object source = maybeInvokeBeforeConvertCallback(replacement); + addModel(source, new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(source), replaceOptions)); + + return this; + } + @Override public com.mongodb.bulk.BulkWriteResult execute() { try { - MongoCollection collection = mongoOperations.getCollection(collectionName); - if (defaultWriteConcern != null) { - collection = collection.withWriteConcern(defaultWriteConcern); + com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo); + + Assert.state(result != null, "Result must not be null"); + + models.forEach(this::maybeEmitAfterSaveEvent); + models.forEach(this::maybeInvokeAfterSaveCallback); + + return result; + } finally { + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + } + + private BulkWriteResult bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + try { + + return collection.bulkWrite( // + models.stream() // + .map(this::extractAndMapWriteModel) // + .collect(Collectors.toList()), // + bulkOptions); + } catch (RuntimeException ex) { + + if (ex instanceof MongoBulkWriteException mongoBulkWriteException) { + + if (mongoBulkWriteException.getWriteConcernError() != null) { + throw new DataIntegrityViolationException(ex.getMessage(), ex); + } + throw new BulkOperationException(ex.getMessage(), mongoBulkWriteException); } - return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions); + throw ex; + } + } + + private WriteModel extractAndMapWriteModel(SourceAwareWriteModelHolder it) { - } catch (BulkWriteException o_O) { + maybeEmitBeforeSaveEvent(it); - DataAccessException toThrow = exceptionTranslator.translateExceptionIfPossible(o_O); - throw toThrow == null ? o_O : toThrow; + if (it.model() instanceof InsertOneModel model) { - } finally { - this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode()); + Document target = model.getDocument(); + maybeInvokeBeforeSaveCallback(it.source(), target); + } else if (it.model() instanceof ReplaceOneModel model) { + + Document target = model.getReplacement(); + maybeInvokeBeforeSaveCallback(it.source(), target); } + + return mapWriteModel(it.source(), it.model()); } /** @@ -301,95 +312,135 @@ public com.mongodb.bulk.BulkWriteResult execute() { * @param multi whether to issue a multi-update. * @return the {@link BulkOperations} with the update registered. */ - private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) { + private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - UpdateOptions options = new UpdateOptions(); - options.upsert(upsert); - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); if (multi) { - models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options)); + addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options)); } else { - models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options)); + addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options)); } return this; } - private WriteModel mapWriteModel(WriteModel writeModel) { - - if (writeModel instanceof UpdateOneModel) { + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } - UpdateOneModel model = (UpdateOneModel) writeModel; + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } - return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), - model.getOptions()); - } + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } - if (writeModel instanceof UpdateManyModel) { + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } - UpdateManyModel model = (UpdateManyModel) writeModel; + private Document getMappedObject(Object source) { - return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), - model.getOptions()); + if (source instanceof Document document) { + return document; } - if (writeModel instanceof DeleteOneModel) { + Document sink = new Document(); - DeleteOneModel model = (DeleteOneModel) writeModel; + mongoOperations.getConverter().write(source, sink); + return sink; + } - return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); - } + private void addModel(Object source, WriteModel model) { + models.add(new SourceAwareWriteModelHolder(source, model)); + } - if (writeModel instanceof DeleteManyModel) { + private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { - DeleteManyModel model = (DeleteManyModel) writeModel; + if (holder.model() instanceof InsertOneModel model) { - return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); - } + Document target = model.getDocument(); + maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel model) { - return writeModel; + Document target = model.getReplacement(); + maybeInvokeAfterSaveCallback(holder.source(), target); + } } - private Bson getMappedUpdate(Bson update) { - return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity()); + private void publishEvent(MongoMappingEvent event) { + bulkOperationContext.publishEvent(event); } - private Bson getMappedQuery(Bson query) { - return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity()); + private Object maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName); } - private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { - - BulkWriteOptions options = new BulkWriteOptions(); - - switch (bulkMode) { - case ORDERED: - return options.ordered(true); - case UNORDERED: - return options.ordered(false); - } + private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName); + } - throw new IllegalStateException("BulkMode was null!"); + private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName); } /** - * {@link BulkOperationContext} holds information about - * {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to + * {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to * {@link QueryMapper} and {@link UpdateMapper}. * * @author Christoph Strobl * @since 2.0 */ - @Value - static class BulkOperationContext { + record BulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable EntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } - @NonNull BulkMode bulkMode; - @NonNull Optional> entity; - @NonNull QueryMapper queryMapper; - @NonNull UpdateMapper updateMapper; + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings("rawtypes") + public T callback(Class callbackType, T entity, String collectionName) { + + if (skipEntityCallbacks()) { + return entity; + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings("rawtypes") + public T callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return entity; + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java index 7871532f10..2057e2f046 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.mongodb.core.MongoTemplate.*; - import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.bson.Document; import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; @@ -31,6 +30,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; @@ -50,19 +50,23 @@ public class DefaultIndexOperations implements IndexOperations { private static final String PARTIAL_FILTER_EXPRESSION_KEY = "partialFilterExpression"; - private final MongoDbFactory mongoDbFactory; private final String collectionName; private final QueryMapper mapper; private final @Nullable Class type; + private final MongoOperations mongoOperations; + /** * Creates a new {@link DefaultIndexOperations}. * * @param mongoDbFactory must not be {@literal null}. * @param collectionName must not be {@literal null}. * @param queryMapper must not be {@literal null}. + * @deprecated since 2.1. Please use + * {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}. */ - public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) { + @Deprecated + public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) { this(mongoDbFactory, collectionName, queryMapper, null); } @@ -74,41 +78,56 @@ public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionNa * @param queryMapper must not be {@literal null}. * @param type Type used for mapping potential partial index filter expression. Can be {@literal null}. * @since 1.10 + * @deprecated since 2.1. Please use + * {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}. */ - public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper, + @Deprecated + public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper, @Nullable Class type) { - Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); - Assert.notNull(collectionName, "Collection name can not be null!"); - Assert.notNull(queryMapper, "QueryMapper must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); + Assert.notNull(collectionName, "Collection name can not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); - this.mongoDbFactory = mongoDbFactory; this.collectionName = collectionName; this.mapper = queryMapper; this.type = type; + this.mongoOperations = new MongoTemplate(mongoDbFactory); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) + /** + * Creates a new {@link DefaultIndexOperations}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @param type can be {@literal null}. + * @since 2.1 */ - public String ensureIndex(final IndexDefinition indexDefinition) { + public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class type) { - return execute(collection -> { + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - Document indexOptions = indexDefinition.getIndexOptions(); + this.mongoOperations = mongoOperations; + this.mapper = new QueryMapper(mongoOperations.getConverter()); + this.collectionName = collectionName; + this.type = type; + } + + @Override + public String ensureIndex(IndexDefinition indexDefinition) { - IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); + return execute(collection -> { - if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + MongoPersistentEntity entity = lookupPersistentEntity(type, collectionName); - Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), - lookupPersistentEntity(type, collectionName))); - } + indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity); + indexOptions = addDefaultCollationIfRequired(indexOptions, entity); - return collection.createIndex(indexDefinition.getIndexKeys(), ops); + Document mappedKeys = mapper.getMappedSort(indexDefinition.getIndexKeys(), entity); + return collection.createIndex(mappedKeys, indexOptions); }); } @@ -130,11 +149,8 @@ private MongoPersistentEntity lookupPersistentEntity(@Nullable Class entit return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#dropIndex(java.lang.String) - */ - public void dropIndex(final String name) { + @Override + public void dropIndex(String name) { execute(collection -> { collection.dropIndex(name); @@ -143,18 +159,27 @@ public void dropIndex(final String name) { } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#dropAllIndexes() - */ + @Override + public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + Document result = mongoOperations + .execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))); + + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + } + + @Override public void dropAllIndexes() { dropIndex("*"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#getIndexInfo() - */ + @Override public List getIndexInfo() { return execute(new CollectionCallback>() { @@ -168,7 +193,8 @@ public List doInCollection(MongoCollection collection) private List getIndexData(MongoCursor cursor) { - List indexInfoList = new ArrayList(); + int available = cursor.available(); + List indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { @@ -185,13 +211,30 @@ private List getIndexData(MongoCursor cursor) { @Nullable public T execute(CollectionCallback callback) { - Assert.notNull(callback, "CollectionCallback must not be null!"); + Assert.notNull(callback, "CollectionCallback must not be null"); + + return mongoOperations.execute(collectionName, callback); + } - try { - MongoCollection collection = mongoDbFactory.getDb().getCollection(collectionName); - return callback.doInCollection(collection); - } catch (RuntimeException e) { - throw potentiallyConvertRuntimeException(e, mongoDbFactory.getExceptionTranslator()); + private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions, + @Nullable MongoPersistentEntity entity) { + + if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + return ops; } + + Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + return ops.partialFilterExpression( + mapper.getMappedSort((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); + } + + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { + + if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { + return ops; + } + + return ops.collation(entity.getCollation().toMongoCollation()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java index d20404af85..e2471dbb14 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,13 @@ */ package org.springframework.data.mongodb.core; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; /** - * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}. + * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}. * * @author Mark Paluch * @author Christoph Strobl @@ -29,25 +29,21 @@ */ class DefaultIndexOperationsProvider implements IndexOperationsProvider { - private final MongoDbFactory mongoDbFactory; + private final MongoDatabaseFactory mongoDbFactory; private final QueryMapper mapper; /** * @param mongoDbFactory must not be {@literal null}. * @param mapper must not be {@literal null}. */ - DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) { + DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) { this.mongoDbFactory = mongoDbFactory; this.mapper = mapper; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String) - */ @Override - public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper); + public IndexOperations indexOps(String collectionName, Class type) { + return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java new file mode 100644 index 0000000000..59b7ccd63e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java @@ -0,0 +1,393 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Default implementation for {@link ReactiveBulkOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.1 + */ +class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations { + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + private final ReactiveBulkOperationContext bulkOperationContext; + private final List> models = new ArrayList<>(); + + private @Nullable WriteConcern defaultWriteConcern; + + private BulkWriteOptions bulkOptions; + + /** + * Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and + * {@link ReactiveBulkOperationContext}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param bulkOperationContext must not be {@literal null}. + */ + DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName, + ReactiveBulkOperationContext bulkOperationContext) { + + super(collectionName); + + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + this.bulkOperationContext = bulkOperationContext; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + + /** + * Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}. + * + * @param defaultWriteConcern can be {@literal null}. + */ + void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { + this.defaultWriteConcern = defaultWriteConcern; + } + + @Override + public ReactiveBulkOperations insert(Object document) { + + Assert.notNull(document, "Document must not be null"); + + this.models.add(Mono.just(document).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it))))); + + return this; + } + + @Override + public ReactiveBulkOperations insert(List documents) { + + Assert.notNull(documents, "Documents must not be null"); + + documents.forEach(this::insert); + + return this; + } + + @Override + public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, false); + return this; + } + + @Override + public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, true); + return this; + } + + @Override + public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) { + return update(query, update, true, true); + } + + @Override + public ReactiveBulkOperations remove(Query query) { + + Assert.notNull(query, "Query must not be null"); + + DeleteOptions deleteOptions = new DeleteOptions(); + query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); + + this.models.add(Mono.just(query) + .map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions)))); + + return this; + } + + @Override + public ReactiveBulkOperations remove(List removes) { + + Assert.notNull(removes, "Removals must not be null"); + + for (Query query : removes) { + remove(query); + } + + return this; + } + + @Override + public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + this.models.add(Mono.just(replacement).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, + new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(it), replaceOptions)))); + + return this; + } + + @Override + public Mono execute() { + + try { + return mongoOperations.execute(collectionName, this::bulkWriteTo).next(); + } finally { + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + } + + private Mono bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + Flux concat = Flux.concat(models).flatMapSequential(it -> { + + if (it.model() instanceof InsertOneModel iom) { + + Document target = iom.getDocument(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom))); + } else if (it.model() instanceof ReplaceOneModel rom) { + + Document target = rom.getReplacement(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom))); + } + + return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model()))); + }); + + MongoCollection theCollection = collection; + return concat.collectList().flatMap(it -> { + + return Mono + .from(theCollection + .bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions)) + .doOnSuccess(state -> { + it.forEach(this::maybeEmitAfterSaveEvent); + }).flatMap(state -> { + List> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList()); + + return Flux.concat(monos).then(Mono.just(state)); + }); + }); + } + + /** + * Performs update and upsert bulk operations. + * + * @param query the {@link Query} to determine documents to update. + * @param update the {@link Update} to perform, must not be {@literal null}. + * @param upsert whether to upsert. + * @param multi whether to issue a multi-update. + * @return the {@link BulkOperations} with the update registered. + */ + private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); + + this.models.add(Mono.just(update).map(it -> { + if (multi) { + return new SourceAwareWriteModelHolder(update, + new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + } + return new SourceAwareWriteModelHolder(update, + new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + })); + + return this; + } + + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } + + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } + + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } + + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } + + private Document getMappedObject(Object source) { + + if (source instanceof Document) { + return (Document) source; + } + + Document sink = new Document(); + + mongoOperations.getConverter().write(source, sink); + return sink; + } + + private Mono maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } + return Mono.just(holder.source()); + } + + private Mono maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName); + } + + private Mono maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName); + } + + private Mono maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName); + } + + /** + * {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as + * references to {@link QueryMapper} and {@link UpdateMapper}. + * + * @author Christoph Strobl + * @since 2.0 + */ + record ReactiveBulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable ReactiveEntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } + + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings("rawtypes") + public Mono callback(Class callbackType, T entity, String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings("rawtypes") + public Mono callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java index a2d027d162..8e78f421f4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,6 +22,7 @@ import java.util.Optional; import org.bson.Document; +import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; @@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import com.mongodb.client.model.IndexOptions; @@ -76,9 +78,9 @@ public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, S private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, QueryMapper queryMapper, Optional> type) { - Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!"); - Assert.notNull(collectionName, "Collection must not be null!"); - Assert.notNull(queryMapper, "QueryMapper must not be null!"); + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null"); + Assert.notNull(collectionName, "Collection must not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); this.mongoOperations = mongoOperations; this.collectionName = collectionName; @@ -86,35 +88,42 @@ private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) - */ - public Mono ensureIndex(final IndexDefinition indexDefinition) { + @Override + public Mono ensureIndex(IndexDefinition indexDefinition) { return mongoOperations.execute(collectionName, collection -> { - Document indexOptions = indexDefinition.getIndexOptions(); - - IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - - if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + MongoPersistentEntity entity = type + .map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val)) + .orElseGet(() -> lookupPersistentEntity(collectionName)); - Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - MongoPersistentEntity entity = type - .map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val)) - .orElseGet(() -> lookupPersistentEntity(collectionName)); + indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity); + indexOptions = addDefaultCollationIfRequired(indexOptions, entity); - ops = ops.partialFilterExpression( - queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity)); - } - - return collection.createIndex(indexDefinition.getIndexKeys(), ops); + return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions); }).next(); } + @Override + public Mono alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + return mongoOperations.execute(db -> { + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))) + .doOnNext(result -> { + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + }); + }).then(); + } + @Nullable private MongoPersistentEntity lookupPersistentEntity(String collection) { @@ -126,26 +135,42 @@ private MongoPersistentEntity lookupPersistentEntity(String collection) { .orElse(null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String) - */ - public Mono dropIndex(final String name) { + @Override + public Mono dropIndex(String name) { return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes() - */ + @Override public Mono dropAllIndexes() { return dropIndex("*"); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo() - */ + @Override public Flux getIndexInfo() { return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) // .map(IndexConverters.documentToIndexInfoConverter()::convert); } + + private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions, + @Nullable MongoPersistentEntity entity) { + + if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + return ops; + } + + Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + return ops.partialFilterExpression( + queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); + } + + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { + + if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { + return ops; + } + + return ops.collation(entity.getCollation().toMongoCollation()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java index 1792eca4f2..b236b4df28 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,9 +29,9 @@ import org.bson.Document; import org.bson.types.ObjectId; import org.springframework.dao.DataAccessException; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; @@ -42,13 +42,15 @@ import com.mongodb.client.MongoDatabase; /** - * Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}. + * Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}. * * @author Christoph Strobl * @author Oliver Gierke * @author Mark Paluch * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. */ +@Deprecated class DefaultScriptOperations implements ScriptOperations { private static final String SCRIPT_COLLECTION_NAME = "system.js"; @@ -63,41 +65,29 @@ class DefaultScriptOperations implements ScriptOperations { */ public DefaultScriptOperations(MongoOperations mongoOperations) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.ExecutableMongoScript) - */ @Override public NamedMongoScript register(ExecutableMongoScript script) { return register(new NamedMongoScript(generateScriptName(), script)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.NamedMongoScript) - */ @Override public NamedMongoScript register(NamedMongoScript script) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); mongoOperations.save(script, SCRIPT_COLLECTION_NAME); return script; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#execute(org.springframework.data.mongodb.core.script.ExecutableMongoScript, java.lang.Object[]) - */ @Override - public Object execute(final ExecutableMongoScript script, final Object... args) { + public Object execute(ExecutableMongoScript script, Object... args) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); return mongoOperations.execute(new DbCallback() { @@ -113,14 +103,10 @@ public Object doInDB(MongoDatabase db) throws MongoException, DataAccessExceptio }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#call(java.lang.String, java.lang.Object[]) - */ @Override - public Object call(final String scriptName, final Object... args) { + public Object call(String scriptName, Object... args) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); return mongoOperations.execute(new DbCallback() { @@ -133,22 +119,15 @@ public Object doInDB(MongoDatabase db) throws MongoException, DataAccessExceptio }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#exists(java.lang.String) - */ @Override public boolean exists(String scriptName) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); - return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME); + return mongoOperations.exists(query(where(FieldName.ID.name()).is(scriptName)), NamedMongoScript.class, + SCRIPT_COLLECTION_NAME); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#getScriptNames() - */ @Override public Set getScriptNames() { @@ -173,7 +152,7 @@ private Object[] convertScriptArgs(boolean quote, Object... args) { return args; } - List convertedValues = new ArrayList(args.length); + List convertedValues = new ArrayList<>(args.length); for (Object arg : args) { convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java index 016a3b0a28..8b4de14e05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java index 7d07ab775d..54f85051fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,8 @@ /** * An interface used by {@link MongoTemplate} for processing documents returned from a MongoDB query on a per-document - * basis. Implementations of this interface perform the actual work of prcoessing each document but don't need to worry - * about exception handling. {@MongoException}s will be caught and translated by the calling MongoTemplate An + * basis. Implementations of this interface perform the actual work of processing each document but don't need to worry + * about exception handling. {@link MongoException}s will be caught and translated by the calling MongoTemplate An * DocumentCallbackHandler is typically stateful: It keeps the result state within the object, to be available later for * later inspection. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java new file mode 100644 index 0000000000..601b6898b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 3.3 + */ +public final class EncryptionAlgorithms { + + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + public static final String RANGE = "Range"; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java new file mode 100644 index 0000000000..94352ad65c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java @@ -0,0 +1,60 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.lang.Nullable; + +/** + * Delegate class to encapsulate lifecycle event configuration and publishing. + * + * @author Mark Paluch + * @since 4.0 + * @see ApplicationEventPublisher + */ +class EntityLifecycleEventDelegate { + + private @Nullable ApplicationEventPublisher publisher; + private boolean eventsEnabled = true; + + public void setPublisher(@Nullable ApplicationEventPublisher publisher) { + this.publisher = publisher; + } + + public boolean isEventsEnabled() { + return eventsEnabled; + } + + public void setEventsEnabled(boolean eventsEnabled) { + this.eventsEnabled = eventsEnabled; + } + + /** + * Publish an application event if event publishing is enabled. + * + * @param event the application event. + */ + public void publishEvent(Object event) { + + if (canPublishEvent()) { + publisher.publishEvent(event); + } + } + + private boolean canPublishEvent() { + return publisher != null && eventsEnabled; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java new file mode 100644 index 0000000000..38269787cb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -0,0 +1,1176 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.time.Duration; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.env.Environment; +import org.springframework.core.env.EnvironmentCapable; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.mapping.IdentifierAccessor; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DurationUtil; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.EntityProjectionIntrospector; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.TargetAware; +import org.springframework.data.util.Optionals; +import org.springframework.expression.spel.support.SimpleEvaluationContext; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.model.ChangeStreamPreAndPostImagesOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.ValidationOptions; + +/** + * Common operations performed on an entity in the context of it's mapping metadata. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + * @author Ben Foster + * @author Ross Lawley + * @since 2.1 + * @see MongoTemplate + * @see ReactiveMongoTemplate + */ +class EntityOperations { + + private static final String ID_FIELD = FieldName.ID.name(); + + private final MappingContext, MongoPersistentProperty> context; + private final QueryMapper queryMapper; + + private final EntityProjectionIntrospector introspector; + + private final MongoJsonSchemaMapper schemaMapper; + + private @Nullable Environment environment; + + EntityOperations(MongoConverter converter) { + this(converter, new QueryMapper(converter)); + } + + EntityOperations(MongoConverter converter, QueryMapper queryMapper) { + this(converter, converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory(), + queryMapper); + } + + EntityOperations(MongoConverter converter, + MappingContext, MongoPersistentProperty> context, + CustomConversions conversions, ProjectionFactory projectionFactory, QueryMapper queryMapper) { + this.context = context; + this.queryMapper = queryMapper; + this.introspector = EntityProjectionIntrospector.create(projectionFactory, + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and(((target, underlyingType) -> !conversions.isSimpleType(target))), + context); + this.schemaMapper = new MongoJsonSchemaMapper(converter); + if (converter instanceof EnvironmentCapable environmentCapable) { + this.environment = environmentCapable.getEnvironment(); + } + } + + /** + * Creates a new {@link Entity} for the given bean. + * + * @param entity must not be {@literal null}. + * @return new instance of {@link Entity}. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + Entity forEntity(T entity) { + + Assert.notNull(entity, "Bean must not be null"); + + if (entity instanceof TargetAware targetAware) { + return new SimpleMappedEntity((Map) targetAware.getTarget(), this); + } + + if (entity instanceof String) { + return new UnmappedEntity(parse(entity.toString()), this); + } + + if (entity instanceof Map) { + return new SimpleMappedEntity((Map) entity, this); + } + + return MappedEntity.of(entity, context, this); + } + + /** + * Creates a new {@link AdaptibleEntity} for the given bean and {@link ConversionService}. + * + * @param entity must not be {@literal null}. + * @param conversionService must not be {@literal null}. + * @return new instance of {@link AdaptibleEntity}. + */ + @SuppressWarnings({ "unchecked", "rawtypes" }) + AdaptibleEntity forEntity(T entity, ConversionService conversionService) { + + Assert.notNull(entity, "Bean must not be null"); + Assert.notNull(conversionService, "ConversionService must not be null"); + + if (entity instanceof String) { + return new UnmappedEntity(parse(entity.toString()), this); + } + + if (entity instanceof Map) { + return new SimpleMappedEntity((Map) entity, this); + } + + return AdaptibleMappedEntity.of(entity, context, conversionService, this); + } + + /** + * @param source can be {@literal null}. + * @return {@literal true} if the given value is an {@literal array}, {@link Collection} or {@link Iterator}. + * @since 3.2 + */ + static boolean isCollectionLike(@Nullable Object source) { + + if (source == null) { + return false; + } + + return ObjectUtils.isArray(source) || source instanceof Collection || source instanceof Iterator; + } + + /** + * @param entityClass should not be null. + * @return the {@link MongoPersistentEntity#getCollection() collection name}. + */ + public String determineCollectionName(@Nullable Class entityClass) { + + if (entityClass == null) { + throw new InvalidDataAccessApiUsageException( + "No class parameter provided, entity collection can't be determined"); + } + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(entityClass); + + if (persistentEntity == null) { + throw new MappingException(String.format( + "Cannot determine collection name from type '%s'. Is it a store native type?", entityClass.getName())); + } + + return persistentEntity.getCollection(); + } + + public Query getByIdInQuery(Collection entities) { + + MultiValueMap byIds = new LinkedMultiValueMap<>(); + + entities.stream() // + .map(this::forEntity) // + .forEach(it -> byIds.add(it.getIdFieldName(), it.getId())); + + Criteria[] criterias = byIds.entrySet().stream() // + .map(it -> Criteria.where(it.getKey()).in(it.getValue())) // + .toArray(Criteria[]::new); + + return new Query(criterias.length == 1 ? criterias[0] : new Criteria().orOperator(criterias)); + } + + /** + * Returns the name of the identifier property. Considers mapping information but falls back to the MongoDB default of + * {@code _id} if no identifier property can be found. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + */ + public String getIdPropertyName(Class type) { + + Assert.notNull(type, "Type must not be null"); + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(type); + + if (persistentEntity != null && persistentEntity.getIdProperty() != null) { + return persistentEntity.getRequiredIdProperty().getName(); + } + + return ID_FIELD; + } + + /** + * Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties. + * + * @param domainType must not be {@literal null}. + * @return the name of the distanceField to use. {@literal dis} by default. + * @since 2.2 + */ + public String nearQueryDistanceFieldName(Class domainType) { + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(domainType); + if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) { + return "dis"; + } + + String distanceFieldName = "calculated-distance"; + int counter = 0; + while (persistentEntity.getPersistentProperty(distanceFieldName) != null) { + distanceFieldName += "-" + (counter++); + } + + return distanceFieldName; + } + + private static Document parse(String source) { + + try { + return Document.parse(source); + } catch (org.bson.json.JsonParseException o_O) { + throw new MappingException("Could not parse given String to save into a JSON document", o_O); + } catch (RuntimeException o_O) { + + // legacy 3.x exception + if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) { + throw new MappingException("Could not parse given String to save into a JSON document", o_O); + } + throw o_O; + } + } + + public TypedOperations forType(@Nullable Class entityClass) { + + if (entityClass != null) { + + MongoPersistentEntity entity = context.getPersistentEntity(entityClass); + + if (entity != null) { + return new TypedEntityOperations(entity, environment); + } + + } + return UntypedOperations.instance(); + } + + /** + * Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned + * type is a projection and what property paths are participating in the projection. + * + * @param resultType the type to project on. Must not be {@literal null}. + * @param entityType the source domain type. Must not be {@literal null}. + * @return the introspection result. + * @since 3.4 + * @see EntityProjectionIntrospector#introspect(Class, Class) + */ + public EntityProjection introspectProjection(Class resultType, Class entityType) { + + MongoPersistentEntity persistentEntity = queryMapper.getMappingContext().getPersistentEntity(entityType); + if (persistentEntity == null && !resultType.isInterface() || ClassUtils.isAssignable(Document.class, resultType)) { + return (EntityProjection) EntityProjection.nonProjecting(resultType); + } + return introspector.introspect(resultType, entityType); + } + + /** + * Convert {@link CollectionOptions} to {@link CreateCollectionOptions} using {@link Class entityType} to obtain + * mapping metadata. + * + * @param collectionOptions + * @param entityType + * @return + * @since 3.4 + */ + public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, + Class entityType) { + + Optional collation = Optionals.firstNonEmpty( + () -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> forType(entityType).getCollation());// + + CreateCollectionOptions result = new CreateCollectionOptions(); + collation.map(Collation::toMongoCollation).ifPresent(result::collation); + + if (collectionOptions == null) { + return result; + } + + collectionOptions.getCapped().ifPresent(result::capped); + collectionOptions.getSize().ifPresent(result::sizeInBytes); + collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); + collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); + + collectionOptions.getValidationOptions().ifPresent(it -> { + + ValidationOptions validationOptions = new ValidationOptions(); + + it.getValidationAction().ifPresent(validationOptions::validationAction); + it.getValidationLevel().ifPresent(validationOptions::validationLevel); + + it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); + + result.validationOptions(validationOptions); + }); + + collectionOptions.getTimeSeriesOptions().map(forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> { + + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions( + it.getTimeField()); + + if (StringUtils.hasText(it.getMetaField())) { + options.metaField(it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); + } + + if (!it.getExpireAfter().isNegative()) { + result.expireAfter(it.getExpireAfter().toSeconds(), TimeUnit.SECONDS); + } + + result.timeSeriesOptions(options); + }); + + collectionOptions.getChangeStreamOptions() // + .map(CollectionOptions.CollectionChangeStreamOptions::getPreAndPostImages) // + .map(ChangeStreamPreAndPostImagesOptions::new) // + .ifPresent(result::changeStreamPreAndPostImagesOptions); + + collectionOptions.getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .filter(Predicate.not(Document::isEmpty)) // + .ifPresent(result::encryptedFields); + + return result; + } + + private Document getMappedValidator(Validator validator, Class domainType) { + + Document validationRules = validator.toDocument(); + + if (validationRules.containsKey("$jsonSchema")) { + return schemaMapper.mapSchema(validationRules, domainType); + } + + return queryMapper.getMappedObject(validationRules, context.getPersistentEntity(domainType)); + } + + /** + * A representation of information about an entity. + * + * @author Oliver Gierke + * @author Christoph Strobl + * @since 2.1 + */ + interface Entity { + + /** + * Returns the field name of the identifier of the entity. + * + * @return + */ + String getIdFieldName(); + + /** + * Returns the identifier of the entity. + * + * @return + */ + Object getId(); + + /** + * Returns the property value for {@code key}. + * + * @param key + * @return + * @since 4.1 + */ + @Nullable + Object getPropertyValue(String key); + + /** + * Returns the {@link Query} to find the entity by its identifier. + * + * @return + */ + Query getByIdQuery(); + + /** + * Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}. + * + * @return the {@link Query} to use for removing the entity. Never {@literal null}. + * @since 2.2 + */ + default Query getRemoveByQuery() { + return isVersionedEntity() ? getQueryForVersion() : getByIdQuery(); + } + + /** + * Returns the {@link Query} to find the entity in its current version. + * + * @return + */ + Query getQueryForVersion(); + + /** + * Maps the backing entity into a {@link MappedDocument} using the given {@link MongoWriter}. + * + * @param writer must not be {@literal null}. + * @return + */ + MappedDocument toMappedDocument(MongoWriter writer); + + /** + * Asserts that the identifier type is updatable in case its not already set. + */ + default void assertUpdateableIdIfNotSet() {} + + /** + * Returns whether the entity is versioned, i.e. if it contains a version property. + * + * @return + */ + default boolean isVersionedEntity() { + return false; + } + + /** + * Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}. + * + * @return the entity version. Can be {@literal null}. + * @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check + * {@link #isVersionedEntity()}. + */ + @Nullable + Object getVersion(); + + /** + * Returns the underlying bean. + * + * @return + */ + T getBean(); + + /** + * Returns whether the entity is considered to be new. + * + * @return + * @since 2.1.2 + */ + boolean isNew(); + + /** + * @param sortObject + * @return + * @since 4.1 + * @throws IllegalStateException if a sort key yields {@literal null}. + */ + Map extractKeys(Document sortObject, Class sourceType); + + } + + /** + * Information and commands on an entity. + * + * @author Oliver Gierke + * @since 2.1 + */ + interface AdaptibleEntity extends Entity { + + /** + * Populates the identifier of the backing entity if it has an identifier property and there's no identifier + * currently present. + * + * @param id must not be {@literal null}. + * @return + */ + @Nullable + T populateIdIfNecessary(@Nullable Object id); + + /** + * Initializes the version property of the current entity if available. + * + * @return the entity with the version property updated if available. + */ + T initializeVersionProperty(); + + /** + * Increments the value of the version property if available. + * + * @return the entity with the version property incremented if available. + */ + T incrementVersion(); + + /** + * Returns the current version value if the entity has a version property. + * + * @return the current version or {@literal null} in case it's uninitialized. + * @throws IllegalStateException if the entity does not define a {@literal version} property. + */ + @Nullable + Number getVersion(); + } + + private static class UnmappedEntity> implements AdaptibleEntity { + + private final T map; + private final EntityOperations entityOperations; + + protected UnmappedEntity(T map, EntityOperations entityOperations) { + this.map = map; + this.entityOperations = entityOperations; + } + + @Override + public String getIdFieldName() { + return ID_FIELD; + } + + @Override + public Object getId() { + return getPropertyValue(ID_FIELD); + } + + @Override + public Object getPropertyValue(String key) { + return map.get(key); + } + + @Override + public Query getByIdQuery() { + return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD))); + } + + @Nullable + @Override + public T populateIdIfNecessary(@Nullable Object id) { + + map.put(ID_FIELD, id); + + return map; + } + + @Override + public Query getQueryForVersion() { + throw new MappingException("Cannot query for version on plain Documents"); + } + + @Override + public MappedDocument toMappedDocument(MongoWriter writer) { + return MappedDocument.of(map instanceof Document document // + ? document // + : new Document(map)); + } + + @Override + public T initializeVersionProperty() { + return map; + } + + @Override + @Nullable + public Number getVersion() { + return null; + } + + @Override + public T incrementVersion() { + return map; + } + + @Override + public T getBean() { + return map; + } + + @Override + public boolean isNew() { + return map.get(ID_FIELD) != null; + } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(ID_FIELD, getId()); + } + + for (String key : sortObject.keySet()) { + + Object value = resolveValue(key, sourceEntity); + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object resolveValue(String key, @Nullable MongoPersistentEntity sourceEntity) { + + if (sourceEntity == null) { + return BsonUtils.resolveValue(map, key); + } + PropertyPath from = PropertyPath.from(key, sourceEntity.getTypeInformation()); + PersistentPropertyPath persistentPropertyPath = entityOperations.context + .getPersistentPropertyPath(from); + return BsonUtils.resolveValue(map, persistentPropertyPath.toDotPath(MongoPersistentProperty::getFieldName)); + } + } + + private static class SimpleMappedEntity> extends UnmappedEntity { + + protected SimpleMappedEntity(T map, EntityOperations entityOperations) { + super(map, entityOperations); + } + + @Override + @SuppressWarnings("unchecked") + public MappedDocument toMappedDocument(MongoWriter writer) { + + T bean = getBean(); + bean = (T) (bean instanceof Document document// + ? document // + : new Document(bean)); + Document document = new Document(); + writer.write(bean, document); + + return MappedDocument.of(document); + } + } + + private static class MappedEntity implements Entity { + + private final MongoPersistentEntity entity; + private final IdentifierAccessor idAccessor; + private final PersistentPropertyAccessor propertyAccessor; + private final EntityOperations entityOperations; + + protected MappedEntity(MongoPersistentEntity entity, IdentifierAccessor idAccessor, + PersistentPropertyAccessor propertyAccessor, EntityOperations entityOperations) { + + this.entity = entity; + this.idAccessor = idAccessor; + this.propertyAccessor = propertyAccessor; + this.entityOperations = entityOperations; + } + + private static MappedEntity of(T bean, + MappingContext, MongoPersistentProperty> context, + EntityOperations entityOperations) { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); + IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); + + return new MappedEntity<>(entity, identifierAccessor, propertyAccessor, entityOperations); + } + + @Override + public String getIdFieldName() { + return entity.getRequiredIdProperty().getFieldName(); + } + + @Override + public Object getId() { + return idAccessor.getRequiredIdentifier(); + } + + @Override + public Object getPropertyValue(String key) { + return propertyAccessor.getProperty(entity.getRequiredPersistentProperty(key)); + } + + @Override + public Query getByIdQuery() { + + if (!entity.hasIdProperty()) { + throw new MappingException("No id property found for object of type " + entity.getType()); + } + + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); + + return Query.query(Criteria.where(idProperty.getName()).is(getId())); + } + + @Override + public Query getQueryForVersion() { + + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + return new Query(Criteria.where(idProperty.getName()).is(getId())// + .and(versionProperty.getName()).is(getVersion())); + } + + @Override + public MappedDocument toMappedDocument(MongoWriter writer) { + + T bean = propertyAccessor.getBean(); + + Document document = new Document(); + writer.write(bean, document); + + if (document.containsKey(ID_FIELD) && document.get(ID_FIELD) == null) { + document.remove(ID_FIELD); + } + + return MappedDocument.of(document); + } + + public void assertUpdateableIdIfNotSet() { + + if (!entity.hasIdProperty()) { + return; + } + + MongoPersistentProperty property = entity.getRequiredIdProperty(); + Object propertyValue = idAccessor.getIdentifier(); + + if (propertyValue != null) { + return; + } + + if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { + throw new InvalidDataAccessApiUsageException( + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), + entity.getType().getName())); + } + } + + @Override + public boolean isVersionedEntity() { + return entity.hasVersionProperty(); + } + + @Override + @Nullable + public Object getVersion() { + return propertyAccessor.getProperty(entity.getRequiredVersionProperty()); + } + + @Override + public T getBean() { + return propertyAccessor.getBean(); + } + + @Override + public boolean isNew() { + return entity.isNew(propertyAccessor.getBean()); + } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(entity.getRequiredIdProperty().getName(), getId()); + } + + for (String key : sortObject.keySet()) { + + Object value; + if (key.indexOf('.') != -1) { + + // follow the path across nested levels. + // TODO: We should have a MongoDB-specific property path abstraction to allow diving into Document. + value = getNestedPropertyValue(key); + } else { + value = getPropertyValue(key); + } + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object getNestedPropertyValue(String key) { + + String[] segments = key.split("\\."); + Entity currentEntity = this; + Object currentValue = BsonNull.VALUE; + + for (int i = 0; i < segments.length; i++) { + + String segment = segments[i]; + currentValue = currentEntity.getPropertyValue(segment); + + if (i < segments.length - 1) { + currentEntity = entityOperations.forEntity(currentValue); + } + } + + return currentValue != null ? currentValue : BsonNull.VALUE; + } + } + + private static class AdaptibleMappedEntity extends MappedEntity implements AdaptibleEntity { + + private final MongoPersistentEntity entity; + private final ConvertingPropertyAccessor propertyAccessor; + private final IdentifierAccessor identifierAccessor; + + private AdaptibleMappedEntity(MongoPersistentEntity entity, IdentifierAccessor identifierAccessor, + ConvertingPropertyAccessor propertyAccessor, EntityOperations entityOperations) { + + super(entity, identifierAccessor, propertyAccessor, entityOperations); + + this.entity = entity; + this.propertyAccessor = propertyAccessor; + this.identifierAccessor = identifierAccessor; + } + + private static AdaptibleEntity of(T bean, + MappingContext, MongoPersistentProperty> context, + ConversionService conversionService, EntityOperations entityOperations) { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); + IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); + + return new AdaptibleMappedEntity<>(entity, identifierAccessor, + new ConvertingPropertyAccessor<>(propertyAccessor, conversionService), entityOperations); + } + + @Nullable + @Override + public T populateIdIfNecessary(@Nullable Object id) { + + if (id == null) { + return propertyAccessor.getBean(); + } + + MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty == null) { + return propertyAccessor.getBean(); + } + + if (identifierAccessor.getIdentifier() != null) { + return propertyAccessor.getBean(); + } + + propertyAccessor.setProperty(idProperty, id); + return propertyAccessor.getBean(); + } + + @Override + @Nullable + public Number getVersion() { + + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + return propertyAccessor.getProperty(versionProperty, Number.class); + } + + @Override + public T initializeVersionProperty() { + + if (!entity.hasVersionProperty()) { + return propertyAccessor.getBean(); + } + + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + propertyAccessor.setProperty(versionProperty, versionProperty.getType().isPrimitive() ? 1 : 0); + + return propertyAccessor.getBean(); + } + + @Override + public T incrementVersion() { + + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + Number version = getVersion(); + Number nextVersion = version == null ? 0 : version.longValue() + 1; + + propertyAccessor.setProperty(versionProperty, nextVersion); + + return propertyAccessor.getBean(); + } + } + + /** + * Type-specific operations abstraction. + * + * @author Mark Paluch + * @param + * @since 2.2 + */ + interface TypedOperations { + + /** + * Return the optional {@link Collation} for the underlying entity. + * + * @return + */ + Optional getCollation(); + + /** + * Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for + * the underlying entity. + * + * @return + */ + Optional getCollation(Query query); + + /** + * Derive the applicable {@link CollectionOptions} for the given type. + * + * @return never {@literal null}. + * @since 3.3 + */ + CollectionOptions getCollectionOptions(); + + /** + * Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially + * annotated field names. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options); + + /** + * @return the name of the id field. + * @since 4.1 + */ + default String getIdKeyName() { + return ID_FIELD; + } + } + + /** + * {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom + * conversions). + */ + enum UntypedOperations implements TypedOperations { + + INSTANCE; + + UntypedOperations() {} + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static TypedOperations instance() { + return (TypedOperations) INSTANCE; + } + + @Override + public Optional getCollation() { + return Optional.empty(); + } + + @Override + public Optional getCollation(Query query) { + + if (query == null) { + return Optional.empty(); + } + + return query.getCollation(); + } + + @Override + public CollectionOptions getCollectionOptions() { + return CollectionOptions.empty(); + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) { + return options; + } + } + + /** + * {@link TypedOperations} backed by {@link MongoPersistentEntity}. + * + * @param + */ + static class TypedEntityOperations implements TypedOperations { + + private final MongoPersistentEntity entity; + + @Nullable private final Environment environment; + + protected TypedEntityOperations(MongoPersistentEntity entity, @Nullable Environment environment) { + + this.entity = entity; + this.environment = environment; + } + + @Override + public Optional getCollation() { + return Optional.ofNullable(entity.getCollation()); + } + + @Override + public Optional getCollation(Query query) { + + if (query.getCollation().isPresent()) { + return query.getCollation(); + } + + return Optional.ofNullable(entity.getCollation()); + } + + @Override + public CollectionOptions getCollectionOptions() { + + CollectionOptions collectionOptions = CollectionOptions.empty(); + if (entity.hasCollation()) { + collectionOptions = collectionOptions.collation(entity.getCollation()); + } + + if (entity.isAnnotationPresent(TimeSeries.class)) { + + TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + + if (entity.getPersistentProperty(timeSeries.timeField()) == null) { + throw new MappingException(String.format("Time series field '%s' does not exist in type %s", + timeSeries.timeField(), entity.getName())); + } + + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); + if (StringUtils.hasText(timeSeries.metaField())) { + + if (entity.getPersistentProperty(timeSeries.metaField()) == null) { + throw new MappingException( + String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName())); + } + + options = options.metaField(timeSeries.metaField()); + } + if (!Granularity.DEFAULT.equals(timeSeries.granularity())) { + options = options.granularity(timeSeries.granularity()); + } + + if (StringUtils.hasText(timeSeries.expireAfter())) { + + Duration timeout = computeIndexTimeout(timeSeries.expireAfter(), getEvaluationContextForEntity(entity)); + if (!timeout.isNegative()) { + options = options.expireAfter(timeout); + } + } + + collectionOptions = collectionOptions.timeSeries(options); + } + + return collectionOptions; + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) { + + TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField())); + + if (StringUtils.hasText(source.getMetaField())) { + target = target.metaField(mappedNameOrDefault(source.getMetaField())); + } + return target.granularity(source.getGranularity()).expireAfter(source.getExpireAfter()); + } + + @Override + public String getIdKeyName() { + return entity.getIdProperty().getName(); + } + + private String mappedNameOrDefault(String name) { + MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name); + return persistentProperty != null ? persistentProperty.getFieldName() : name; + } + + /** + * Get the {@link ValueEvaluationContext} for a given {@link PersistentEntity entity} the default one. + * + * @param persistentEntity can be {@literal null} + * @return the context to use. + */ + private ValueEvaluationContext getEvaluationContextForEntity(@Nullable PersistentEntity persistentEntity) { + + if (persistentEntity instanceof BasicMongoPersistentEntity mongoEntity) { + return mongoEntity.getValueEvaluationContext(null); + } + + return ValueEvaluationContext.of(this.environment, SimpleEvaluationContext.forReadOnlyDataBinding().build()); + } + + /** + * Compute the index timeout value by evaluating a potential + * {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value. + * + * @param timeoutValue must not be {@literal null}. + * @param evaluationContext must not be {@literal null}. + * @return never {@literal null} + * @throws IllegalArgumentException for invalid duration values. + */ + private static Duration computeIndexTimeout(String timeoutValue, ValueEvaluationContext evaluationContext) { + return DurationUtil.evaluate(timeoutValue, evaluationContext); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java index 421ce3feb1..67ed188655 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,10 @@ */ package org.springframework.data.mongodb.core; +import java.util.stream.Stream; + import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.util.CloseableIterator; /** * {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent @@ -88,12 +89,12 @@ interface TerminatingAggregation { /** * Apply pipeline operations as specified and stream all matching elements.
- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor} + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} * - * @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor} that needs to be closed. - * Never {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). */ - CloseableIterator stream(); + Stream stream(); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java index e6867a89df..ca5aa7a513 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,11 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; +import java.util.stream.Stream; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.util.CloseableIterator; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -39,27 +34,14 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper private final MongoTemplate template; - /** - * Create new instance of {@link ExecutableAggregationOperationSupport}. - * - * @param template must not be {@literal null}. - * @throws IllegalArgumentException if template is {@literal null}. - */ ExecutableAggregationOperationSupport(MongoTemplate template) { - - Assert.notNull(template, "Template must not be null!"); - this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableAggregation aggregateAndReturn(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableAggregationSupport<>(template, domainType, null, null); } @@ -68,55 +50,45 @@ public ExecutableAggregation aggregateAndReturn(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ExecutableAggregationSupport implements AggregationWithAggregation, ExecutableAggregation, TerminatingAggregation { - @NonNull MongoTemplate template; - @NonNull Class domainType; - @Nullable Aggregation aggregation; - @Nullable String collection; + private final MongoTemplate template; + private final Class domainType; + private final Aggregation aggregation; + private final String collection; + + public ExecutableAggregationSupport(MongoTemplate template, Class domainType, Aggregation aggregation, + String collection) { + this.template = template; + this.domainType = domainType; + this.aggregation = aggregation; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithCollection#inCollection(java.lang.String) - */ @Override public AggregationWithAggregation inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithAggregation#by(org.springframework.data.mongodb.core.aggregation.Aggregation) - */ @Override public TerminatingAggregation by(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#all() - */ @Override public AggregationResults all() { return template.aggregate(aggregation, getCollectionName(aggregation), domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#stream() - */ @Override - public CloseableIterator stream() { + public Stream stream() { return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType); } @@ -126,16 +98,14 @@ private String getCollectionName(Aggregation aggregation) { return collection; } - if (aggregation instanceof TypedAggregation) { - - TypedAggregation typedAggregation = (TypedAggregation) aggregation; + if (aggregation instanceof TypedAggregation typedAggregation) { if (typedAggregation.getInputType() != null) { - return template.determineCollectionName(typedAggregation.getInputType()); + return template.getCollectionName(typedAggregation.getInputType()); } } - return template.determineCollectionName(domainType); + return template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java index d7becb8cb6..3358ff2b17 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,11 @@ import java.util.stream.Stream; import org.springframework.dao.DataAccessException; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.lang.Nullable; @@ -43,7 +47,7 @@ * query(Human.class) * .inCollection("star-wars") * .as(Jedi.class) - * .matching(query(where("firstname").is("luke"))) + * .matching(where("firstname").is("luke")) * .all(); * * @@ -117,13 +121,34 @@ default Optional first() { /** * Stream all matching elements. * - * @return a {@link Stream} that wraps the a Mongo DB {@link com.mongodb.Cursor} that needs to be closed. Never - * {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). */ Stream stream(); /** - * Get the number of matching elements. + * Return a window of elements either starting or resuming at + * {@link org.springframework.data.domain.ScrollPosition}. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a window of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Window scroll(ScrollPosition scrollPosition); + + /** + * Get the number of matching elements.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link MongoOperations#estimatedCount(String)} for empty queries instead. * * @return total number of matching elements. */ @@ -170,6 +195,18 @@ interface FindWithQuery extends TerminatingFind { */ TerminatingFind matching(Query query); + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingFind matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + /** * Set the filter query for the geoNear execution. * @@ -291,9 +328,21 @@ interface DistinctWithQuery extends DistinctWithProjection { * * @param query must not be {@literal null}. * @return new instance of {@link TerminatingDistinct}. - * @throws IllegalArgumentException if resultType is {@literal null}. + * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingDistinct matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingDistinct matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java index 1987a9ccef..4e6c3547c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,24 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import java.util.List; import java.util.Optional; import java.util.stream.Stream; import org.bson.Document; + import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.data.util.CloseableIterator; -import org.springframework.data.util.StreamUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; +import com.mongodb.ReadPreference; import com.mongodb.client.FindIterable; /** @@ -51,27 +48,14 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation { private final MongoTemplate template; - /** - * Create new {@link ExecutableFindOperationSupport}. - * - * @param template must not be {@literal null}. - * @throws IllegalArgumentException if template is {@literal null}. - */ ExecutableFindOperationSupport(MongoTemplate template) { - - Assert.notNull(template, "Template must not be null!"); - this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class) - */ @Override public ExecutableFind query(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY); } @@ -81,57 +65,48 @@ public ExecutableFind query(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ExecutableFindSupport implements ExecutableFind, FindWithCollection, FindWithProjection, FindWithQuery { - @NonNull MongoTemplate template; - @NonNull Class domainType; - Class returnType; - @Nullable String collection; - Query query; + private final MongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final Query query; + + ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, @Nullable String collection, + Query query) { + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithCollection#inCollection(java.lang.String) - */ @Override public FindWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection#as(Class) - */ @Override public FindWithQuery as(Class returnType) { - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(returnType, "ReturnType must not be null"); return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingFind matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#oneValue() - */ @Override public T oneValue() { @@ -142,16 +117,12 @@ public T oneValue() { } if (result.size() > 1) { - throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1); + throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1); } return result.iterator().next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#firstValue() - */ @Override public T firstValue() { @@ -160,60 +131,41 @@ public T firstValue() { return ObjectUtils.isEmpty(result) ? null : result.iterator().next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#all() - */ @Override public List all() { return doFind(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#stream() - */ @Override public Stream stream() { - return StreamUtils.createStreamFromIterator(doStream()); + return doStream(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery) - */ @Override public TerminatingFindNear near(NearQuery nearQuery) { return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#count() - */ @Override public long count() { return template.count(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#exists() - */ @Override public boolean exists() { return template.exists(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindDistinct#distinct(java.lang.String) - */ @SuppressWarnings("unchecked") @Override public TerminatingDistinct distinct(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new DistinctOperationSupport(this, field); } @@ -223,8 +175,8 @@ private List doFind(@Nullable CursorPreparer preparer) { Document queryObject = query.getQueryObject(); Document fieldsObject = query.getFieldsObject(); - return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType, - getCursorPreparer(query, preparer)); + return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType, + returnType, getCursorPreparer(query, preparer)); } private List doFindDistinct(String field) { @@ -233,7 +185,7 @@ private List doFindDistinct(String field) { returnType == domainType ? (Class) Object.class : returnType); } - private CloseableIterator doStream() { + private Stream doStream() { return template.doStream(query, domainType, getCollectionName(), returnType); } @@ -242,7 +194,7 @@ private CursorPreparer getCursorPreparer(Query query, @Nullable CursorPreparer p } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } private String asString() { @@ -254,7 +206,7 @@ private String asString() { * @author Christoph Strobl * @since 2.0 */ - static class DelegatingQueryCursorPreparer implements CursorPreparer { + static class DelegatingQueryCursorPreparer implements SortingQueryCursorPreparer { private final @Nullable CursorPreparer delegate; private Optional limit = Optional.empty(); @@ -263,14 +215,10 @@ static class DelegatingQueryCursorPreparer implements CursorPreparer { this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.clientFindIterable) - */ @Override - public FindIterable prepare(FindIterable cursor) { + public FindIterable prepare(FindIterable iterable) { - FindIterable target = delegate != null ? delegate.prepare(cursor) : cursor; + FindIterable target = delegate != null ? delegate.prepare(iterable) : iterable; return limit.map(target::limit).orElse(target); } @@ -279,6 +227,18 @@ CursorPreparer limit(int limit) { this.limit = Optional.of(limit); return this; } + + @Override + @Nullable + public ReadPreference getReadPreference() { + return delegate.getReadPreference(); + } + + @Override + @Nullable + public Document getSortObject() { + return delegate instanceof SortingQueryCursorPreparer sqcp ? sqcp.getSortObject() : null; + } } /** @@ -296,35 +256,23 @@ public DistinctOperationSupport(ExecutableFindSupport delegate, String field) this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithProjection#as(java.lang.Class) - */ @Override @SuppressWarnings("unchecked") public TerminatingDistinct as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingDistinct matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingDistinct#all() - */ @Override public List all() { return delegate.doFindDistinct(field); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java index 8a9e0411d4..c2b08c7e59 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -63,17 +63,19 @@ interface TerminatingInsert extends TerminatingBulkInsert { * Insert exactly one object. * * @param object must not be {@literal null}. + * @return the inserted object. * @throws IllegalArgumentException if object is {@literal null}. */ - void one(T object); + T one(T object); /** * Insert a collection of objects. * * @param objects must not be {@literal null}. + * @return the inserted objects. * @throws IllegalArgumentException if objects is {@literal null}. */ - void all(Collection objects); + Collection all(Collection objects); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java index aee2fed70a..47b7127deb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,6 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import java.util.ArrayList; import java.util.Collection; @@ -41,27 +36,14 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation { private final MongoTemplate template; - /** - * Create new {@link ExecutableInsertOperationSupport}. - * - * @param template must not be {@literal null}. - * @throws IllegalArgumentException if template is {@literal null}. - */ ExecutableInsertOperationSupport(MongoTemplate template) { - - Assert.notNull(template, "Template must not be null!"); - this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.coreExecutableInsertOperation#insert(java.lan.Class) - */ @Override public ExecutableInsert insert(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableInsertSupport<>(template, domainType, null, null); } @@ -70,78 +52,64 @@ public ExecutableInsert insert(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ExecutableInsertSupport implements ExecutableInsert { - @NonNull MongoTemplate template; - @NonNull Class domainType; - @Nullable String collection; - @Nullable BulkMode bulkMode; + private final MongoTemplate template; + private final Class domainType; + @Nullable private final String collection; + @Nullable private final BulkMode bulkMode; + + ExecutableInsertSupport(MongoTemplate template, Class domainType, String collection, BulkMode bulkMode) { + + this.template = template; + this.domainType = domainType; + this.collection = collection; + this.bulkMode = bulkMode; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#insert(java.lang.Class) - */ @Override - public void one(T object) { + public T one(T object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); - template.insert(object, getCollectionName()); + return template.insert(object, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#all(java.util.Collection) - */ @Override - public void all(Collection objects) { + public Collection all(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); - template.insert(objects, getCollectionName()); + return template.insert(objects, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingBulkInsert#bulk(java.util.Collection) - */ @Override public BulkWriteResult bulk(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName()) .insert(new ArrayList<>(objects)).execute(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithCollection#inCollection(java.lang.String) - */ @Override public InsertWithBulkMode inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty."); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithBulkMode#withBulkMode(org.springframework.data.mongodb.core.BulkMode) - */ @Override public TerminatingBulkInsert withBulkMode(BulkMode bulkMode) { - Assert.notNull(bulkMode, "BulkMode must not be null!"); + Assert.notNull(bulkMode, "BulkMode must not be null"); return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java new file mode 100644 index 0000000000..2d13ad3ea0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java @@ -0,0 +1,215 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; + +/** + * {@link ExecutableMapReduceOperation} allows creation and execution of MongoDB mapReduce operations in a fluent API + * style. The starting {@literal domainType} is used for mapping an optional {@link Query} provided via {@code matching} + * into the MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping + * back the results from the {@link org.bson.Document}. However, it is possible to define an different + * {@literal returnType} via {@code as} to mapping the result.
+ * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
+ *     
+ *         mapReduce(Human.class)
+ *             .map("function() { emit(this.id, this.firstname) }")
+ *             .reduce("function(id, name) { return sum(id, name); }")
+ *             .inCollection("star-wars")
+ *             .as(Jedi.class)
+ *             .matching(query(where("lastname").is("skywalker")))
+ *             .all();
+ *     
+ * 
+ * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface ExecutableMapReduceOperation { + + /** + * Start creating a mapReduce operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableFind}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + MapReduceWithMapFunction mapReduce(Class domainType); + + /** + * Trigger mapReduce execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface TerminatingMapReduce { + + /** + * Get the mapReduce results. + * + * @return never {@literal null}. + */ + List all(); + } + + /** + * Provide the Javascript {@code function()} used to map matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithMapFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param mapFunction must not be {@literal null} nor empty. + * @return new instance of {@link MapReduceWithReduceFunction}. + * @throws IllegalArgumentException if {@literal mapFunction} is {@literal null} or empty. + */ + MapReduceWithReduceFunction map(String mapFunction); + + } + + /** + * Provide the Javascript {@code function()} used to reduce matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithReduceFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param reduceFunction must not be {@literal null} nor empty. + * @return new instance of {@link ExecutableMapReduce}. + * @throws IllegalArgumentException if {@literal reduceFunction} is {@literal null} or empty. + */ + ExecutableMapReduce reduce(String reduceFunction); + + } + + /** + * Collection override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithCollection extends MapReduceWithQuery { + + /** + * Explicitly set the name of the collection to perform the mapReduce operation on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link MapReduceWithProjection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + MapReduceWithProjection inCollection(String collection); + } + + /** + * Input document filter query (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithQuery extends TerminatingMapReduce { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingMapReduce matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingMapReduce matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithProjection extends MapReduceWithQuery { + + /** + * Define the target type fields should be mapped to.
+ * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + MapReduceWithQuery as(Class resultType); + } + + /** + * Additional mapReduce options (Optional). + * + * @author Christoph Strobl + * @since 2.1 + * @deprecated since 4.0 in favor of {@link org.springframework.data.mongodb.core.aggregation}. + */ + @Deprecated + interface MapReduceWithOptions { + + /** + * Set additional options to apply to the mapReduce operation. + * + * @param options must not be {@literal null}. + * @return new instance of {@link ExecutableMapReduce}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + ExecutableMapReduce with(MapReduceOptions options); + } + + /** + * {@link ExecutableMapReduce} provides methods for constructing mapReduce operations in a fluent way. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface ExecutableMapReduce extends MapReduceWithMapFunction, MapReduceWithReduceFunction, + MapReduceWithCollection, MapReduceWithProjection, MapReduceWithOptions { + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java new file mode 100644 index 0000000000..9f78693540 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java @@ -0,0 +1,179 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; + +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ExecutableMapReduceOperation}. + * + * @author Christoph Strobl + * @since 2.1 + */ +class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperation { + + private static final Query ALL_QUERY = new Query(); + + private final MongoTemplate template; + + ExecutableMapReduceOperationSupport(MongoTemplate template) { + + Assert.notNull(template, "Template must not be null"); + this.template = template; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class) + */ + @Override + public ExecutableMapReduceSupport mapReduce(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class ExecutableMapReduceSupport + implements ExecutableMapReduce, MapReduceWithOptions, MapReduceWithCollection, + MapReduceWithProjection, MapReduceWithQuery, MapReduceWithReduceFunction, MapReduceWithMapFunction { + + private final MongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final Query query; + private final @Nullable String mapFunction; + private final @Nullable String reduceFunction; + private final @Nullable MapReduceOptions options; + + ExecutableMapReduceSupport(MongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, Query query, @Nullable String mapFunction, @Nullable String reduceFunction, + @Nullable MapReduceOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + this.mapFunction = mapFunction; + this.reduceFunction = reduceFunction; + this.options = options; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all() + */ + @Override + public List all() { + return template.mapReduce(query, domainType, getCollectionName(), mapFunction, reduceFunction, options, + returnType); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithCollection#inCollection(java.lang.String) + */ + @Override + public MapReduceWithProjection inCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithQuery#query(org.springframework.data.mongodb.core.query.Query) + */ + @Override + public TerminatingMapReduce matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithProjection#as(java.lang.Class) + */ + @Override + public MapReduceWithQuery as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithOptions#with(org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) + */ + @Override + public ExecutableMapReduce with(MapReduceOptions options) { + + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithMapFunction#map(java.lang.String) + */ + @Override + public MapReduceWithReduceFunction map(String mapFunction) { + + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.MapReduceWithReduceFunction#reduce(java.lang.String) + */ + @Override + public ExecutableMapReduce reduce(String reduceFunction) { + + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); + + return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java index 2c55bc982a..a10cd0317f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import java.util.List; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; import com.mongodb.client.result.DeleteResult; @@ -54,41 +55,28 @@ public interface ExecutableRemoveOperation { ExecutableRemove remove(Class domainType); /** - * Collection override (optional). - * - * @param * @author Christoph Strobl * @since 2.0 */ - interface RemoveWithCollection extends RemoveWithQuery { + interface TerminatingRemove { /** - * Explicitly set the name of the collection to perform the query on.
- * Skip this step to use the default collection derived from the domain type. + * Remove all documents matching. * - * @param collection must not be {@literal null} nor {@literal empty}. - * @return new instance of {@link RemoveWithCollection}. - * @throws IllegalArgumentException if collection is {@literal null}. + * @return the {@link DeleteResult}. Never {@literal null}. */ - RemoveWithQuery inCollection(String collection); - } - - /** - * @author Christoph Strobl - * @since 2.0 - */ - interface TerminatingRemove { + DeleteResult all(); /** - * Remove all documents matching. + * Remove the first matching document. * * @return the {@link DeleteResult}. Never {@literal null}. */ - DeleteResult all(); + DeleteResult one(); /** * Remove and return all matching documents.
- * NOTE The entire list of documents will be fetched before sending the actual delete commands. + * NOTE: The entire list of documents will be fetched before sending the actual delete commands. * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete * operation. * @@ -97,6 +85,27 @@ interface TerminatingRemove { List findAndRemove(); } + /** + * Collection override (optional). + * + * @param + * @author Christoph Strobl + * @since 2.0 + */ + interface RemoveWithCollection extends RemoveWithQuery { + + /** + * Explicitly set the name of the collection to perform the query on.
+ * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link RemoveWithCollection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + RemoveWithQuery inCollection(String collection); + } + + /** * @author Christoph Strobl * @since 2.0 @@ -111,6 +120,18 @@ interface RemoveWithQuery extends TerminatingRemove { * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingRemove matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingRemove matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java index cba5343de8..8e84aa7dd6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,6 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import java.util.List; import org.springframework.data.mongodb.core.query.Query; @@ -42,27 +37,14 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation { private final MongoTemplate tempate; - /** - * Create new {@link ExecutableRemoveOperationSupport}. - * - * @param template must not be {@literal null}. - * @throws IllegalArgumentException if template is {@literal null}. - */ - ExecutableRemoveOperationSupport(MongoTemplate template) { - - Assert.notNull(template, "Template must not be null!"); - - this.tempate = template; + public ExecutableRemoveOperationSupport(MongoTemplate tempate) { + this.tempate = tempate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class) - */ @Override public ExecutableRemove remove(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null); } @@ -71,55 +53,46 @@ public ExecutableRemove remove(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ExecutableRemoveSupport implements ExecutableRemove, RemoveWithCollection { - @NonNull MongoTemplate template; - @NonNull Class domainType; - Query query; - @Nullable String collection; + private final MongoTemplate template; + private final Class domainType; + private final Query query; + @Nullable private final String collection; + + public ExecutableRemoveSupport(MongoTemplate template, Class domainType, Query query, String collection) { + this.template = template; + this.domainType = domainType; + this.query = query; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithCollection#inCollection(java.lang.String) - */ @Override public RemoveWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingRemove matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#all() - */ @Override public DeleteResult all() { + return template.doRemove(getCollectionName(), query, domainType, true); + } - String collectionName = getCollectionName(); - - return template.doRemove(collectionName, query, domainType); + @Override + public DeleteResult one() { + return template.doRemove(getCollectionName(), query, domainType, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#findAndRemove() - */ @Override public List findAndRemove() { @@ -129,7 +102,7 @@ public List findAndRemove() { } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java index 8951e34ecb..a5c63e9b67 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,15 +17,18 @@ import java.util.Optional; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; import com.mongodb.client.result.UpdateResult; -import org.springframework.lang.Nullable; /** - * {@link ExecutableUpdateOperation} allows creation and execution of MongoDB update / findAndModify operations in a - * fluent API style.
+ * {@link ExecutableUpdateOperation} allows creation and execution of MongoDB update / findAndModify / findAndReplace + * operations in a fluent API style.
* The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching}, as well as * the {@link Update} via {@code apply} into the MongoDB specific representations. The collection to operate on is by * default derived from the initial {@literal domainType} and can be defined there via @@ -57,6 +60,108 @@ public interface ExecutableUpdateOperation { */ ExecutableUpdate update(Class domainType); + /** + * Trigger findAndModify execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.0 + */ + interface TerminatingFindAndModify { + + /** + * Find, modify and return the first matching document. + * + * @return {@link Optional#empty()} if nothing found. + */ + default Optional findAndModify() { + return Optional.ofNullable(findAndModifyValue()); + } + + /** + * Find, modify and return the first matching document. + * + * @return {@literal null} if nothing found. + */ + @Nullable + T findAndModifyValue(); + } + + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + UpdateResult replaceFirst(); + } + + /** + * Trigger + * findOneAndReplace + * execution by calling one of the terminating methods. + * + * @author Mark Paluch + * @since 2.1 + */ + interface TerminatingFindAndReplace extends TerminatingReplace { + + /** + * Find, replace and return the first matching document. + * + * @return {@link Optional#empty()} if nothing found. + */ + default Optional findAndReplace() { + return Optional.ofNullable(findAndReplaceValue()); + } + + /** + * Find, replace and return the first matching document. + * + * @return {@literal null} if nothing found. + */ + @Nullable + T findAndReplaceValue(); + } + + /** + * Trigger update execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.0 + */ + interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModifyWithOptions { + + /** + * Update all matching documents in the collection. + * + * @return never {@literal null}. + */ + UpdateResult all(); + + /** + * Update the first document in the collection. + * + * @return never {@literal null}. + */ + UpdateResult first(); + + /** + * Creates a new document if no documents match the filter query or updates the matching ones. + * + * @return never {@literal null}. + */ + UpdateResult upsert(); + } + /** * Declare the {@link Update} to apply. * @@ -66,13 +171,26 @@ public interface ExecutableUpdateOperation { interface UpdateWithUpdate { /** - * Set the {@link Update} to be applied. + * Set the {@link UpdateDefinition} to be applied. * * @param update must not be {@literal null}. * @return new instance of {@link TerminatingUpdate}. * @throws IllegalArgumentException if update is {@literal null}. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - TerminatingUpdate apply(Update update); + TerminatingUpdate apply(UpdateDefinition update); + + /** + * Specify {@code replacement} object. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + * @since 2.1 + */ + FindAndReplaceWithProjection replaceWith(T replacement); } /** @@ -110,6 +228,18 @@ interface UpdateWithQuery extends UpdateWithUpdate { * @throws IllegalArgumentException if query is {@literal null}. */ UpdateWithUpdate matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link UpdateWithUpdate}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default UpdateWithUpdate matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** @@ -131,56 +261,59 @@ interface FindAndModifyWithOptions { } /** - * Trigger findAndModify execution by calling one of the terminating methods. + * @author Christoph Strobl + * @since 4.2 */ - interface TerminatingFindAndModify { + interface ReplaceWithOptions extends TerminatingReplace { /** - * Find, modify and return the first matching document. + * Explicitly define {@link ReplaceOptions}. * - * @return {@link Optional#empty()} if nothing found. - */ - default Optional findAndModify() { - return Optional.ofNullable(findAndModifyValue()); - } - - /** - * Find, modify and return the first matching document. - * - * @return {@literal null} if nothing found. + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. */ - @Nullable - T findAndModifyValue(); + TerminatingReplace withOptions(ReplaceOptions options); } /** - * Trigger update execution by calling one of the terminating methods. + * Define {@link FindAndReplaceOptions}. * + * @author Mark Paluch * @author Christoph Strobl - * @since 2.0 + * @since 2.1 */ - interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModifyWithOptions { + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { /** - * Update all matching documents in the collection. + * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. * - * @return never {@literal null}. + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. */ - UpdateResult all(); + FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options); + } - /** - * Update the first document in the collection. - * - * @return never {@literal null}. - */ - UpdateResult first(); + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithProjection extends FindAndReplaceWithOptions { /** - * Creates a new document if no documents match the filter query or updates the matching ones. + * Define the target type fields should be mapped to.
+ * Skip this step if you are anyway only interested in the original domain type. * - * @return never {@literal null}. + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link FindAndReplaceWithProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. */ - UpdateResult upsert(); + FindAndReplaceWithOptions as(Class resultType); + } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java index e42a0dc44b..593d863d39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,8 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -41,128 +36,166 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation { private final MongoTemplate template; - /** - * Creates new {@link ExecutableUpdateOperationSupport}. - * - * @param template must not be {@literal null}. - */ ExecutableUpdateOperationSupport(MongoTemplate template) { - - Assert.notNull(template, "Template must not be null!"); - this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class) - */ @Override public ExecutableUpdate update(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null); + return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); } /** * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ExecutableUpdateSupport - implements ExecutableUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate { - - @NonNull MongoTemplate template; - @NonNull Class domainType; - Query query; - @Nullable Update update; - @Nullable String collection; - @Nullable FindAndModifyOptions options; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(Update) - */ + implements ExecutableUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate, + FindAndReplaceWithOptions, TerminatingFindAndReplace, FindAndReplaceWithProjection { + + private final MongoTemplate template; + private final Class domainType; + private final Query query; + @Nullable private final UpdateDefinition update; + @Nullable private final String collection; + @Nullable private final FindAndModifyOptions findAndModifyOptions; + @Nullable private final FindAndReplaceOptions findAndReplaceOptions; + @Nullable private final Object replacement; + private final Class targetType; + + ExecutableUpdateSupport(MongoTemplate template, Class domainType, Query query, UpdateDefinition update, + String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions, + Object replacement, Class targetType) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.update = update; + this.collection = collection; + this.findAndModifyOptions = findAndModifyOptions; + this.findAndReplaceOptions = findAndReplaceOptions; + this.replacement = replacement; + this.targetType = targetType; + } + @Override - public TerminatingUpdate apply(Update update) { + public TerminatingUpdate apply(UpdateDefinition update) { - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(update, "Update must not be null"); - return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options); + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithCollection#inCollection(java.lang.String) - */ @Override public UpdateWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); - return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options); + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions) - */ @Override public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection replaceWith(T replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { + + Assert.notNull(options, "Options must not be null"); - return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options); + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + options, replacement, targetType); + } + + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public UpdateWithUpdate matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options); + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithOptions as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, resultType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#all() - */ @Override public UpdateResult all() { return doUpdate(true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#first() - */ @Override public UpdateResult first() { return doUpdate(false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#upsert() - */ @Override public UpdateResult upsert() { return doUpdate(true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndModify#findAndModifyValue() - */ @Override public @Nullable T findAndModifyValue() { - return template.findAndModify(query, update, options != null ? options : new FindAndModifyOptions(), domainType, getCollectionName()); + + return template.findAndModify(query, update, + findAndModifyOptions != null ? findAndModifyOptions : new FindAndModifyOptions(), targetType, + getCollectionName()); + } + + @Override + public @Nullable T findAndReplaceValue() { + + return (T) template.findAndReplace(query, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.empty(), domainType, + getCollectionName(), targetType); + } + + @Override + public UpdateResult replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); } private UpdateResult doUpdate(boolean multi, boolean upsert) { @@ -170,7 +203,7 @@ private UpdateResult doUpdate(boolean multi, boolean upsert) { } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java index ce5e13e5d8..51a2c5b86a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,18 +33,55 @@ public class FindAndModifyOptions { private @Nullable Collation collation; + private static final FindAndModifyOptions NONE = new FindAndModifyOptions() { + + private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed; Please use FindAndModifyOptions.options() instead"; + + @Override + public FindAndModifyOptions returnNew(boolean returnNew) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions upsert(boolean upsert) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions remove(boolean remove) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions collation(@Nullable Collation collation) { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + /** * Static factory method to create a FindAndModifyOptions instance * - * @return a new instance + * @return new instance of {@link FindAndModifyOptions}. */ public static FindAndModifyOptions options() { return new FindAndModifyOptions(); } /** - * @param options - * @return + * Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance. + * + * @return unmodifiable {@link FindAndModifyOptions} instance. + * @since 2.2 + */ + public static FindAndModifyOptions none() { + return NONE; + } + + /** + * Create new {@link FindAndModifyOptions} based on option of given {@literal source}. + * + * @param source can be {@literal null}. + * @return new instance of {@link FindAndModifyOptions}. * @since 2.0 */ public static FindAndModifyOptions of(@Nullable FindAndModifyOptions source) { @@ -80,8 +117,8 @@ public FindAndModifyOptions remove(boolean remove) { /** * Define the {@link Collation} specifying language-specific rules for string comparison. * - * @param collation - * @return + * @param collation can be {@literal null}. + * @return this. * @since 2.0 */ public FindAndModifyOptions collation(@Nullable Collation collation) { @@ -105,7 +142,7 @@ public boolean isRemove() { /** * Get the {@link Collation} specifying language-specific rules for string comparison. * - * @return + * @return never {@literal null}. * @since 2.0 */ public Optional getCollation() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java new file mode 100644 index 0000000000..266a0742c2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java @@ -0,0 +1,124 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Options for + * findOneAndReplace. + *
+ * Defaults to + *
+ *
returnNew
+ *
false
+ *
upsert
+ *
false
+ *
+ * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ +public class FindAndReplaceOptions extends ReplaceOptions { + + private boolean returnNew; + + private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() { + + private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed; Please use FindAndReplaceOptions.options() instead"; + + @Override + public FindAndReplaceOptions returnNew() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + + /** + * Static factory method to create a {@link FindAndReplaceOptions} instance. + *
+ *
returnNew
+ *
false
+ *
upsert
+ *
false
+ *
+ * + * @return new instance of {@link FindAndReplaceOptions}. + */ + public static FindAndReplaceOptions options() { + return new FindAndReplaceOptions(); + } + + /** + * Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance. + * + * @return unmodifiable {@link FindAndReplaceOptions} instance. + * @since 2.2 + */ + public static FindAndReplaceOptions none() { + return NONE; + } + + /** + * Static factory method to create a {@link FindAndReplaceOptions} instance with + *
+ *
returnNew
+ *
false
+ *
upsert
+ *
false
+ *
+ * + * @return new instance of {@link FindAndReplaceOptions}. + */ + public static FindAndReplaceOptions empty() { + return new FindAndReplaceOptions(); + } + + /** + * Return the replacement document. + * + * @return this. + */ + public FindAndReplaceOptions returnNew() { + + this.returnNew = true; + return this; + } + + /** + * Insert a new document if not exists. + * + * @return this. + */ + public FindAndReplaceOptions upsert() { + + super.upsert(); + return this; + } + + /** + * Get the bit indicating to return the replacement document. + * + * @return {@literal true} if set. + */ + public boolean isReturnNew() { + return returnNew; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java index edb902dea7..625a85950e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,69 @@ */ package org.springframework.data.mongodb.core; +import java.util.function.Function; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ReadPreference; import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MongoCollection; /** * Simple callback interface to allow customization of a {@link FindPublisher}. * * @author Mark Paluch + * @author Christoph Strobl + * @author Konstantin Volivach */ -interface FindPublisherPreparer { +public interface FindPublisherPreparer extends ReadPreferenceAware { + + /** + * Default {@link FindPublisherPreparer} just passing on the given {@link FindPublisher}. + * + * @since 2.2 + */ + FindPublisherPreparer NO_OP_PREPARER = (findPublisher -> findPublisher); /** * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. * * @param findPublisher must not be {@literal null}. */ - FindPublisher prepare(FindPublisher findPublisher); + FindPublisher prepare(FindPublisher findPublisher); + + /** + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a + * {@link FindPublisher} via the given {@link Function find} function. + * + * @param collection must not be {@literal null}. + * @param find must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + * @since 2.2 + */ + default FindPublisher initiateFind(MongoCollection collection, + Function, FindPublisher> find) { + + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); + + if (hasReadPreference()) { + collection = collection.withReadPreference(getReadPreference()); + } + + return prepare(find.apply(collection)); + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none defined. + * @since 2.2 + */ + @Override + @Nullable + default ReadPreference getReadPreference() { + return null; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java index ece0cb776c..906afddd4a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,4 +22,4 @@ * @since 2.0 */ public interface FluentMongoOperations extends ExecutableFindOperation, ExecutableInsertOperation, - ExecutableUpdateOperation, ExecutableRemoveOperation, ExecutableAggregationOperation {} + ExecutableUpdateOperation, ExecutableRemoveOperation, ExecutableAggregationOperation, ExecutableMapReduceOperation {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java index 62a37a0b14..654e7d4330 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -39,7 +39,7 @@ class GeoCommandStatistics { */ private GeoCommandStatistics(Document source) { - Assert.notNull(source, "Source document must not be null!"); + Assert.notNull(source, "Source document must not be null"); this.source = source; } @@ -47,11 +47,11 @@ private GeoCommandStatistics(Document source) { * Creates a new {@link GeoCommandStatistics} from the given command result extracting the statistics. * * @param commandResult must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static GeoCommandStatistics from(Document commandResult) { - Assert.notNull(commandResult, "Command result must not be null!"); + Assert.notNull(commandResult, "Command result must not be null"); Object stats = commandResult.get("stats"); return stats == null ? NONE : new GeoCommandStatistics((Document) stats); @@ -61,7 +61,7 @@ public static GeoCommandStatistics from(Document commandResult) { * Returns the average distance reported by the command result. Mitigating a removal of the field in case the command * didn't return any result introduced in MongoDB 3.2 RC1. * - * @return + * @return never {@literal null}, uses {@link Double#NaN} if {@literal avgDistance} does not exist. * @see MongoDB Jira SERVER-21024 */ public double getAverageDistance() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java new file mode 100644 index 0000000000..57abe9a529 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java @@ -0,0 +1,129 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; + +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Function object to apply a query hint. Can be an index name or a BSON document. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class HintFunction { + + private static final HintFunction EMPTY = new HintFunction(null); + + private final @Nullable Object hint; + + private HintFunction(@Nullable Object hint) { + this.hint = hint; + } + + /** + * Return an empty hint function. + * + * @return + */ + static HintFunction empty() { + return EMPTY; + } + + /** + * Create a {@link HintFunction} from a {@link Bson document} or {@link String index name}. + * + * @param hint + * @return + */ + static HintFunction from(@Nullable Object hint) { + return new HintFunction(hint); + } + + /** + * Return whether a hint is present. + * + * @return + */ + public boolean isPresent() { + return (hint instanceof String hintString && StringUtils.hasText(hintString)) || hint instanceof Bson; + } + + /** + * If a hint is not present, returns {@code true}, otherwise {@code false}. + * + * @return {@code true} if a hint is not present, otherwise {@code false}. + */ + public boolean isEmpty() { + return !isPresent(); + } + + /** + * Apply the hint to consumers depending on the hint format if {@link #isPresent() present}. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @param + */ + public void ifPresent(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + return; + } + apply(registryProvider, stringConsumer, bsonConsumer); + } + + /** + * Apply the hint to consumers depending on the hint format. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @return + * @param + */ + public R apply(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + throw new IllegalStateException("No hint present"); + } + + if (hint instanceof Bson bson) { + return bsonConsumer.apply(bson); + } + + if (hint instanceof String hintString) { + + if (BsonUtils.isJsonDocument(hintString)) { + return bsonConsumer.apply(BsonUtils.parse(hintString, registryProvider)); + } + return stringConsumer.apply(hintString); + } + + throw new IllegalStateException( + "Unable to read hint of type %s".formatted(hint != null ? hint.getClass() : "null")); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index 343d2527de..f5856100d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,9 +18,11 @@ import java.util.concurrent.TimeUnit; import org.bson.Document; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; @@ -89,7 +91,7 @@ private static Converter getIndexDefinitionIndexO ops = ops.bits((Integer) indexOptions.get("bits")); } if (indexOptions.containsKey("bucketSize")) { - ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); + MongoCompatibilityAdapter.indexOptionsAdapter(ops).setBucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); } if (indexOptions.containsKey("default_language")) { ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); @@ -115,6 +117,14 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } + if (indexOptions.containsKey("wildcardProjection")) { + ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); + } + + if (indexOptions.containsKey("hidden")) { + ops = ops.hidden((Boolean) indexOptions.get("hidden")); + } + return ops; }; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java new file mode 100644 index 0000000000..da4766343a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java @@ -0,0 +1,156 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collection; +import java.util.List; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.util.StreamUtils; + +/** + * A MongoDB document in its mapped state. I.e. after a source document has been mapped using mapping information of the + * entity the source document was supposed to represent. + * + * @author Oliver Gierke + * @since 2.1 + */ +public class MappedDocument { + + private static final String ID_FIELD = FieldName.ID.name(); + private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1); + + private final Document document; + + private MappedDocument(Document document) { + this.document = document; + } + + public static MappedDocument of(Document document) { + return new MappedDocument(document); + } + + public static Document getIdOnlyProjection() { + return ID_ONLY_PROJECTION; + } + + public static Document getIdIn(Collection ids) { + return new Document(ID_FIELD, new Document("$in", ids)); + } + + public static List toIds(Collection documents) { + + return documents.stream()// + .map(it -> it.get(ID_FIELD))// + .collect(StreamUtils.toUnmodifiableList()); + } + + public boolean hasId() { + return document.containsKey(ID_FIELD); + } + + public boolean hasNonNullId() { + return hasId() && document.get(ID_FIELD) != null; + } + + public Object getId() { + return document.get(ID_FIELD); + } + + public T getId(Class type) { + return document.get(ID_FIELD, type); + } + + public boolean isIdPresent(Class type) { + return type.isInstance(getId()); + } + + public Bson getIdFilter() { + return new Document(ID_FIELD, document.get(ID_FIELD)); + } + + public Object get(String key) { + return document.get(key); + } + + public UpdateDefinition updateWithoutId() { + return new MappedUpdate(Update.fromDocument(document, ID_FIELD)); + } + + public Document getDocument() { + return this.document; + } + + /** + * Updates the documents {@link #ID_FIELD}. + * + * @param value the {@literal _id} value to set. + * @since 3.4.3 + */ + public void updateId(Object value) { + document.put(ID_FIELD, value); + } + + /** + * An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been + * mapped to the specific domain type. + * + * @author Christoph Strobl + * @since 2.2 + */ + static class MappedUpdate implements UpdateDefinition { + + private final Update delegate; + + MappedUpdate(Update delegate) { + this.delegate = delegate; + } + + @Override + public Document getUpdateObject() { + return delegate.getUpdateObject(); + } + + @Override + public boolean modifies(String key) { + return delegate.modifies(key); + } + + @Override + public void inc(String version) { + delegate.inc(version); + } + + @Override + public Boolean isIsolated() { + return delegate.isIsolated(); + } + + @Override + public List getArrayFilters() { + return delegate.getArrayFilters(); + } + + @Override + public boolean hasArrayFilters() { + return delegate.hasArrayFilters(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java new file mode 100644 index 0000000000..bc26dfb68c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -0,0 +1,452 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.bson.Document; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.data.util.TypeInformation; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain + * domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names} + * and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { + + private final MongoConverter converter; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final Predicate filter; + private final LinkedMultiValueMap> mergeProperties; + + /** + * Create a new instance of {@link MappingMongoJsonSchemaCreator}. + * + * @param converter must not be {@literal null}. + */ + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter) { + + this(converter, (MappingContext, MongoPersistentProperty>) converter.getMappingContext(), + (property) -> true, new LinkedMultiValueMap<>()); + } + + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter, + MappingContext, MongoPersistentProperty> mappingContext, + Predicate filter, LinkedMultiValueMap> mergeProperties) { + + Assert.notNull(converter, "Converter must not be null"); + this.converter = converter; + this.mappingContext = mappingContext; + this.filter = filter; + this.mergeProperties = mergeProperties; + } + + @Override + public MongoJsonSchemaCreator filter(Predicate filter) { + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, mergeProperties); + } + + @Override + public PropertySpecifier property(String path) { + return types -> withTypesFor(path, types); + } + + /** + * Specify additional types to be considered when rendering the schema for the given path. + * + * @param path path the path using {@literal dot '.'} notation. + * @param types must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.4 + */ + public MongoJsonSchemaCreator withTypesFor(String path, Class... types) { + + LinkedMultiValueMap> clone = mergeProperties.clone(); + for (Class type : types) { + clone.add(path, type); + } + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone); + } + + @Override + public MongoJsonSchema createSchemaFor(Class type) { + + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); + MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + schemaBuilder.encryptionMetadata(getEncryptionMetadata(entity, encrypted)); + } + + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); + schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); + + return schemaBuilder.build(); + } + + private static Document getEncryptionMetadata(MongoPersistentEntity entity, Encrypted encrypted) { + + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); + } + + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } + + return encryptionMetadata; + } + + private List computePropertiesForEntity(List path, + MongoPersistentEntity entity) { + + List schemaProperties = new ArrayList<>(); + + for (MongoPersistentProperty nested : entity) { + + List currentPath = new ArrayList<>(path); + + String stringPath = currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")); + stringPath = StringUtils.hasText(stringPath) ? (stringPath + "." + nested.getName()) : nested.getName(); + if (!filter.test(new PropertyContext(stringPath, nested))) { + if (!mergeProperties.containsKey(stringPath)) { + continue; + } + } + + if (path.contains(nested)) { // cycle guard + schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), + Object.class, false)); + break; + } + + currentPath.add(nested); + schemaProperties.add(computeSchemaForProperty(currentPath)); + } + + return schemaProperties; + } + + private JsonSchemaProperty computeSchemaForProperty(List path) { + + String stringPath = path.stream().map(MongoPersistentProperty::getName).collect(Collectors.joining(".")); + MongoPersistentProperty property = CollectionUtils.lastElement(path); + + boolean required = isRequiredProperty(property); + Class rawTargetType = computeTargetType(property); // target type before conversion + Class targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type + + if ((rawTargetType.isPrimitive() || ClassUtils.isPrimitiveArray(rawTargetType)) && targetType == Object.class + || ClassUtils.isAssignable(targetType, rawTargetType)) { + targetType = rawTargetType; + } + + if (!isCollection(property) && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) { + if (property.isEntity() || mergeProperties.containsKey(stringPath)) { + List targetProperties = new ArrayList<>(); + + if (property.isEntity()) { + targetProperties.add(createObjectSchemaPropertyForEntity(path, property, required)); + } + if (mergeProperties.containsKey(stringPath)) { + for (Class theType : mergeProperties.get(stringPath)) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(theType)); + + targetProperties.add(createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required)); + } + } + JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next() + : JsonSchemaProperty.merged(targetProperties); + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + } + + String fieldName = computePropertyFieldName(property); + + JsonSchemaProperty schemaProperty; + if (isCollection(property)) { + schemaProperty = createArraySchemaProperty(fieldName, property, required); + } else if (property.isMap()) { + schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required); + } else if (ClassUtils.isAssignable(Enum.class, targetType)) { + schemaProperty = createEnumSchemaProperty(fieldName, targetType, required); + } else { + schemaProperty = createSchemaProperty(fieldName, targetType, required); + } + + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + + private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property, + boolean required) { + + ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName); + + if (isSpecificType(property)) { + schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty); + } + + return createPotentiallyRequiredSchemaProperty(schemaProperty, required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property, + ArrayJsonSchemaProperty schemaProperty) { + + MongoPersistentEntity persistentEntity = mappingContext + .getPersistentEntity(property.getTypeInformation().getRequiredComponentType()); + + if (persistentEntity != null) { + + List nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity); + + if (nestedProperties.isEmpty()) { + return schemaProperty; + } + + return schemaProperty + .items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0]))); + } + + if (ClassUtils.isAssignable(Enum.class, property.getActualType())) { + + List possibleValues = getPossibleEnumValues((Class) property.getActualType()); + + return schemaProperty + .items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues)); + } + + return schemaProperty.items(JsonSchemaObject.of(property.getActualType())); + } + + private boolean isSpecificType(MongoPersistentProperty property) { + return !TypeInformation.OBJECT.equals(property.getTypeInformation().getActualType()); + } + + private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property, + JsonSchemaProperty schemaProperty) { + + Encrypted encrypted = property.findAnnotation(Encrypted.class); + if (encrypted == null) { + return schemaProperty; + } + + EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty); + if (StringUtils.hasText(encrypted.algorithm())) { + enc = enc.algorithm(encrypted.algorithm()); + } + if (!ObjectUtils.isEmpty(encrypted.keyId())) { + enc = enc.keys(property.getEncryptionKeyIds()); + } + + Queryable queryable = property.findAnnotation(Queryable.class); + if (queryable == null || !StringUtils.hasText(queryable.queryType())) { + return enc; + } + + QueryCharacteristic characteristic = new QueryCharacteristic() { + + @Override + public String queryType() { + return queryable.queryType(); + } + + @Override + public Document toDocument() { + + Document options = QueryCharacteristic.super.toDocument(); + + if (queryable.contentionFactor() >= 0) { + options.put("contention", queryable.contentionFactor()); + } + + if (StringUtils.hasText(queryable.queryAttributes())) { + options.putAll(Document.parse(queryable.queryAttributes())); + } + + return options; + } + }; + return new QueryableJsonSchemaProperty(enc, QueryCharacteristics.of(characteristic)); + } + + private JsonSchemaProperty createObjectSchemaPropertyForEntity(List path, + MongoPersistentProperty property, boolean required) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(property)); + + return createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class targetType, boolean required) { + + List possibleValues = getPossibleEnumValues((Class) targetType); + + targetType = computeTargetType(targetType, possibleValues); + return createSchemaProperty(fieldName, targetType, required, possibleValues); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) { + return createSchemaProperty(fieldName, type, required, Collections.emptyList()); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required, + Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues); + + return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required); + } + + private TypedJsonSchemaObject createSchemaObject(Object type, Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = type instanceof Type typeObject ? JsonSchemaObject.of(typeObject) + : JsonSchemaObject.of(Class.class.cast(type)); + + if (!CollectionUtils.isEmpty(possibleValues)) { + schemaObject = schemaObject.possibleValues(possibleValues); + } + return schemaObject; + } + + private String computePropertyFieldName(PersistentProperty property) { + + return property instanceof MongoPersistentProperty mongoPersistentProperty ? mongoPersistentProperty.getFieldName() + : property.getName(); + } + + private boolean isRequiredProperty(PersistentProperty property) { + return property.getType().isPrimitive(); + } + + private Class computeTargetType(PersistentProperty property) { + + if (!(property instanceof MongoPersistentProperty mongoProperty)) { + return property.getType(); + } + + if (!property.getOwner().isIdProperty(property)) { + return mongoProperty.getFieldType(); + } + + if (mongoProperty.hasExplicitWriteTarget()) { + return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass(); + } + + return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType(); + } + + private static Class computeTargetType(Class fallback, List possibleValues) { + return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass(); + } + + private > List getPossibleEnumValues(Class targetType) { + + EnumSet enumSet = EnumSet.allOf(targetType); + List possibleValues = new ArrayList<>(enumSet.size()); + + for (Object enumValue : enumSet) { + possibleValues.add(converter.convertToMongoType(enumValue)); + } + + return possibleValues; + } + + private static boolean isCollection(MongoPersistentProperty property) { + return property.isCollectionLike() && !property.getType().equals(byte[].class); + } + + static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) { + return required ? JsonSchemaProperty.required(property) : property; + } + + class PropertyContext implements JsonSchemaPropertyContext { + + private final String path; + private final MongoPersistentProperty property; + + public PropertyContext(String path, MongoPersistentProperty property) { + this.path = path; + this.property = property; + } + + @Override + public String getPath() { + return path; + } + + @Override + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + public MongoPersistentEntity resolveEntity(MongoPersistentProperty property) { + return (MongoPersistentEntity) mappingContext.getPersistentEntity(property); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java index a9d277261c..fdfeaa81ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -57,8 +57,8 @@ public class MongoAction { public MongoAction(@Nullable WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation, String collectionName, @Nullable Class entityType, @Nullable Document document, @Nullable Document query) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null"); this.defaultWriteConcern = defaultWriteConcern; this.mongoActionOperation = mongoActionOperation; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java index 9d722018af..509d10887b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,9 +21,10 @@ * * @author Mark Pollack * @author Oliver Gierke + * @author Christoph Strobl * @see MongoAction */ public enum MongoActionOperation { - REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK; + REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK, REPLACE } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java index 6b697c01c7..5fcc6c9599 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.util.Assert; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; /** @@ -30,37 +30,34 @@ * @author Thomas Darimont * @author Mark Paluch * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Mongo Admin Operations") public class MongoAdmin implements MongoAdminOperations { private final MongoClient mongoClient; - public MongoAdmin(MongoClient mongoClient) { + /** + * @param client the underlying {@link com.mongodb.client.MongoClient} used for data access. + * @since 2.2 + */ + public MongoAdmin(MongoClient client) { - Assert.notNull(mongoClient, "MongoClient must not be null!"); - this.mongoClient = mongoClient; + Assert.notNull(client, "Client must not be null"); + this.mongoClient = client; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#dropDatabase(java.lang.String) - */ @ManagedOperation public void dropDatabase(String databaseName) { getDB(databaseName).drop(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#createDatabase(java.lang.String) - */ @ManagedOperation public void createDatabase(String databaseName) { getDB(databaseName); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#getDatabaseStats(java.lang.String) - */ @ManagedOperation public String getDatabaseStats(String databaseName) { return getDB(databaseName).runCommand(new Document("dbStats", 1).append("scale", 1024)).toJson(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java index 70ef0f443f..ec03302f7e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ * @author Mark Pollack * @author Oliver Gierke */ +@Deprecated(since = "4.5", forRemoval = true) public interface MongoAdminOperations { @ManagedOperation diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java index 078ab8cc67..c5fee9cf54 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,66 +16,70 @@ package org.springframework.data.mongodb.core; import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.bson.UuidRepresentation; import org.springframework.beans.factory.config.AbstractFactoryBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.SpringDataMongoDB; import org.springframework.lang.Nullable; import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.connection.ClusterSettings; +import com.mongodb.connection.ConnectionPoolSettings; +import com.mongodb.connection.ServerSettings; +import com.mongodb.connection.SocketSettings; +import com.mongodb.connection.SslSettings; +import com.mongodb.event.ClusterListener; /** * Convenient factory for configuring MongoDB. * * @author Christoph Strobl * @author Mark Paluch - * @since 1.7 */ public class MongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - - private @Nullable MongoClientOptions mongoClientOptions; + private @Nullable MongoClientSettings mongoClientSettings; private @Nullable String host; private @Nullable Integer port; - private List replicaSetSeeds = Collections.emptyList(); - private List credentials = Collections.emptyList(); + private @Nullable List credential = null; + private @Nullable ConnectionString connectionString; + private @Nullable String replicaSet = null; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; /** - * Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}. + * Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}. * * @param mongoClientOptions */ - public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) { - this.mongoClientOptions = mongoClientOptions; + public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) { + this.mongoClientSettings = mongoClientOptions; } /** * Set the list of credentials to be used when creating {@link MongoClient}. * - * @param credentials can be {@literal null}. + * @param credential can be {@literal null}. */ - public void setCredentials(@Nullable MongoCredential[] credentials) { - this.credentials = filterNonNullElementsAsList(credentials); - } - - /** - * Set the list of {@link ServerAddress} to build up a replica set for. - * - * @param replicaSetSeeds can be {@literal null}. - */ - public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) { - this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds); + public void setCredential(@Nullable MongoCredential[] credential) { + this.credential = Arrays.asList(credential); } /** @@ -96,50 +100,40 @@ public void setPort(int port) { this.port = port; } + public void setConnectionString(@Nullable ConnectionString connectionString) { + this.connectionString = connectionString; + } + + public void setReplicaSet(@Nullable String replicaSet) { + this.replicaSet = replicaSet; + } + /** * Configures the {@link PersistenceExceptionTranslator} to use. * * @param exceptionTranslator */ public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoClient.class; - } - - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ + @Override @Nullable public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return exceptionTranslator.translateExceptionIfPossible(ex); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ @Override - protected MongoClient createInstance() throws Exception { - - if (mongoClientOptions == null) { - mongoClientOptions = MongoClientOptions.builder().build(); - } + public Class getObjectType() { + return MongoClient.class; + } - return createMongoClient(); + @Override + protected MongoClient createInstance() throws Exception { + return createMongoClient(computeClientSetting()); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#destroyInstance(java.lang.Object) - */ @Override protected void destroyInstance(@Nullable MongoClient instance) throws Exception { @@ -148,43 +142,207 @@ protected void destroyInstance(@Nullable MongoClient instance) throws Exception } } - private MongoClient createMongoClient() throws UnknownHostException { + /** + * Create {@link MongoClientSettings} based on configuration and priority (lower is better). + *
    + *
  1. {@link MongoClientFactoryBean#mongoClientSettings}
  2. + *
  3. {@link MongoClientFactoryBean#connectionString}
  4. + *
  5. default {@link MongoClientSettings}
  6. + *
+ * + * @since 3.0 + */ + protected MongoClientSettings computeClientSetting() { - if (!CollectionUtils.isEmpty(replicaSetSeeds)) { - return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions); + if (connectionString != null && (StringUtils.hasText(host) || port != null)) { + throw new IllegalStateException("ConnectionString and host/port configuration exclude one another"); } - return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions); - } + ConnectionString connectionString = this.connectionString != null ? this.connectionString + : new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()), + getOrDefault(port, "" + ServerAddress.defaultPort()))); + + Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString); + builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY); + + if (mongoClientSettings != null) { + + MongoClientSettings defaultSettings = MongoClientSettings.builder().build(); + + SslSettings sslSettings = mongoClientSettings.getSslSettings(); + ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings(); + ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings(); + SocketSettings socketSettings = mongoClientSettings.getSocketSettings(); + ServerSettings serverSettings = mongoClientSettings.getServerSettings(); - private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException { + builder = builder // + .applicationName(computeSettingsValue(defaultSettings.getApplicationName(), + mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) // + .applyToSslSettings(settings -> { - ServerAddress defaultAddress = new ServerAddress(); + applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled, + defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled())); + applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed, + defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed()))); + settings.context(sslSettings.getContext()); + }).applyToClusterSettings(settings -> { - return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(), - port != null ? port.intValue() : defaultAddress.getPort()); + applySettings(settings::hosts, + computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings, + connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList()))); + + applySettings(settings::requiredReplicaSetName, + computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(), + clusterSettings, connectionString.getRequiredReplicaSetName())); + + applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost, + defaultSettings.getClusterSettings(), clusterSettings, null)); + + applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode, + defaultSettings.getClusterSettings(), clusterSettings, null)); + + applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS), + defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold())); + + applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType, + defaultSettings.getClusterSettings(), clusterSettings, null)); + applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getClusterSettings(), clusterSettings, + connectionString.getServerSelectionTimeout())); + + applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector, + defaultSettings.getClusterSettings(), clusterSettings, null)); + List clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners, + defaultSettings.getClusterSettings(), clusterSettings, null); + if (clusterListeners != null) { + clusterListeners.forEach(settings::addClusterListener); + } + }) // + .applyToConnectionPoolSettings(settings -> { + + applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null)); + + applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxConnectionIdleTime())); + + applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxConnectionLifeTime())); + + applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxWaitTime())); + + applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS), + computeSettingsValue( + (ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null)); + + applySettings(settings::minSize, + computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(), + connectionPoolSettings, connectionString.getMinConnectionPoolSize())); + applySettings(settings::maxSize, + computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(), + connectionPoolSettings, connectionString.getMaxConnectionPoolSize())); + }) // + .applyToSocketSettings(settings -> { + + applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS), + computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout())); + + applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS), + computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout())); + applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize, + defaultSettings.getSocketSettings(), socketSettings, null)); + applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize, + defaultSettings.getSocketSettings(), socketSettings, null)); + }) // + .applyToServerSettings(settings -> { + + applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getServerSettings(), serverSettings, null)); + + applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency())); + settings.applySettings(serverSettings); + }) // + .autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) // + .codecRegistry(mongoClientSettings.getCodecRegistry()); // + + applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(), + mongoClientSettings.getReadConcern(), connectionString.getReadConcern())); + applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(), + mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern())); + applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(), + mongoClientSettings.getReadPreference(), connectionString.getReadPreference())); + applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(), + mongoClientSettings.getRetryReads(), connectionString.getRetryReads())); + applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(), + mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue())); + applySettings(builder::uuidRepresentation, + computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY)); + } + + if (!CollectionUtils.isEmpty(credential)) { + builder = builder.credential(credential.iterator().next()); + } + + if (StringUtils.hasText(replicaSet)) { + builder.applyToClusterSettings((settings) -> { + settings.requiredReplicaSetName(replicaSet); + }); + } + + return builder.build(); } - /** - * Returns the given array as {@link List} with all {@literal null} elements removed. - * - * @param elements the elements to filter , can be {@literal null}. - * @return a new unmodifiable {@link List#} from the given elements without {@literal null}s. - */ - private static List filterNonNullElementsAsList(@Nullable T[] elements) { + private void applySettings(Consumer settingsBuilder, @Nullable T value) { - if (elements == null) { - return Collections.emptyList(); + if (ObjectUtils.isEmpty(value)) { + return; } + settingsBuilder.accept(value); + } - List candidateElements = new ArrayList(); + private T computeSettingsValue(Function function, S defaultValueHolder, S settingsValueHolder, + @Nullable T connectionStringValue) { + return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder), + connectionStringValue); + } + + private T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) { - for (T element : elements) { - if (element != null) { - candidateElements.add(element); - } + boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings); + boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString); + + if (!fromSettingsIsDefault) { + return fromSettings; } + return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue; + } - return Collections.unmodifiableList(candidateElements); + private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } + + private String getOrDefault(Object value, String defaultValue) { + + if(value == null) { + return defaultValue; + } + String sValue = value.toString(); + return StringUtils.hasText(sValue) ? sValue : defaultValue; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java deleted file mode 100644 index 3481a9f80e..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java +++ /dev/null @@ -1,314 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import javax.net.SocketFactory; -import javax.net.ssl.SSLSocketFactory; - -import org.springframework.beans.factory.config.AbstractFactoryBean; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.lang.Nullable; - -import com.mongodb.DBDecoderFactory; -import com.mongodb.DBEncoderFactory; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; -import com.mongodb.ReadPreference; -import com.mongodb.WriteConcern; - -/** - * A factory bean for construction of a {@link MongoClientOptions} instance. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @author Mark Paluch - * @since 1.7 - */ -public class MongoClientOptionsFactoryBean extends AbstractFactoryBean { - - private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build(); - - private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription(); - private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost(); - private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost(); - private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS - .getThreadsAllowedToBlockForConnectionMultiplier(); - private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime(); - private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime(); - private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime(); - private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout(); - private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout(); - private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive(); - private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference(); - private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory(); - private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory(); - private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern(); - private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory(); - private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled(); - private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans(); - private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency(); - private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency(); - private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout(); - private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout(); - private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName(); - private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout(); - - private boolean ssl; - private @Nullable SSLSocketFactory sslSocketFactory; - - /** - * Set the {@link MongoClient} description. - * - * @param description - */ - public void setDescription(@Nullable String description) { - this.description = description; - } - - /** - * Set the minimum number of connections per host. - * - * @param minConnectionsPerHost - */ - public void setMinConnectionsPerHost(int minConnectionsPerHost) { - this.minConnectionsPerHost = minConnectionsPerHost; - } - - /** - * Set the number of connections allowed per host. Will block if run out. Default is 10. System property - * {@code MONGO.POOLSIZE} can override - * - * @param connectionsPerHost - */ - public void setConnectionsPerHost(int connectionsPerHost) { - this.connectionsPerHost = connectionsPerHost; - } - - /** - * Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is - * 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an - * exception will be thrown. - * - * @param threadsAllowedToBlockForConnectionMultiplier - */ - public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) { - this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier; - } - - /** - * Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes) - * - * @param maxWaitTime - */ - public void setMaxWaitTime(int maxWaitTime) { - this.maxWaitTime = maxWaitTime; - } - - /** - * The maximum idle time for a pooled connection. - * - * @param maxConnectionIdleTime - */ - public void setMaxConnectionIdleTime(int maxConnectionIdleTime) { - this.maxConnectionIdleTime = maxConnectionIdleTime; - } - - /** - * Set the maximum life time for a pooled connection. - * - * @param maxConnectionLifeTime - */ - public void setMaxConnectionLifeTime(int maxConnectionLifeTime) { - this.maxConnectionLifeTime = maxConnectionLifeTime; - } - - /** - * Set the connect timeout in milliseconds. 0 is default and infinite. - * - * @param connectTimeout - */ - public void setConnectTimeout(int connectTimeout) { - this.connectTimeout = connectTimeout; - } - - /** - * Set the socket timeout. 0 is default and infinite. - * - * @param socketTimeout - */ - public void setSocketTimeout(int socketTimeout) { - this.socketTimeout = socketTimeout; - } - - /** - * Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. - * - * @param socketKeepAlive - */ - public void setSocketKeepAlive(boolean socketKeepAlive) { - this.socketKeepAlive = socketKeepAlive; - } - - /** - * Set the {@link ReadPreference}. - * - * @param readPreference - */ - public void setReadPreference(@Nullable ReadPreference readPreference) { - this.readPreference = readPreference; - } - - /** - * Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB - * object. - * - * @param writeConcern - */ - public void setWriteConcern(@Nullable WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /** - * @param socketFactory - */ - public void setSocketFactory(@Nullable SocketFactory socketFactory) { - this.socketFactory = socketFactory; - } - - /** - * Set the frequency that the driver will attempt to determine the current state of each server in the cluster. - * - * @param heartbeatFrequency - */ - public void setHeartbeatFrequency(int heartbeatFrequency) { - this.heartbeatFrequency = heartbeatFrequency; - } - - /** - * In the event that the driver has to frequently re-check a server's availability, it will wait at least this long - * since the previous check to avoid wasted effort. - * - * @param minHeartbeatFrequency - */ - public void setMinHeartbeatFrequency(int minHeartbeatFrequency) { - this.minHeartbeatFrequency = minHeartbeatFrequency; - } - - /** - * Set the connect timeout for connections used for the cluster heartbeat. - * - * @param heartbeatConnectTimeout - */ - public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) { - this.heartbeatConnectTimeout = heartbeatConnectTimeout; - } - - /** - * Set the socket timeout for connections used for the cluster heartbeat. - * - * @param heartbeatSocketTimeout - */ - public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) { - this.heartbeatSocketTimeout = heartbeatSocketTimeout; - } - - /** - * Configures the name of the replica set. - * - * @param requiredReplicaSetName - */ - public void setRequiredReplicaSetName(String requiredReplicaSetName) { - this.requiredReplicaSetName = requiredReplicaSetName; - } - - /** - * This controls if the driver should us an SSL connection. Defaults to |@literal false}. - * - * @param ssl - */ - public void setSsl(boolean ssl) { - this.ssl = ssl; - } - - /** - * Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here, - * {@link SSLSocketFactory#getDefault()} will be used. - * - * @param sslSocketFactory - */ - public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) { - - this.sslSocketFactory = sslSocketFactory; - this.ssl = sslSocketFactory != null; - } - - /** - * Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of - * 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative - * value means to wait indefinitely. - * - * @param serverSelectionTimeout in msec. - */ - public void setServerSelectionTimeout(int serverSelectionTimeout) { - this.serverSelectionTimeout = serverSelectionTimeout; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - @SuppressWarnings("ConstantConditions") - @Override - protected MongoClientOptions createInstance() throws Exception { - - SocketFactory socketFactoryToUse = ssl - ? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault()) - : this.socketFactory; - - return MongoClientOptions.builder() // - .alwaysUseMBeans(this.alwaysUseMBeans) // - .connectionsPerHost(this.connectionsPerHost) // - .connectTimeout(connectTimeout) // - .cursorFinalizerEnabled(cursorFinalizerEnabled) // - .dbDecoderFactory(dbDecoderFactory) // - .dbEncoderFactory(dbEncoderFactory) // - .description(description) // - .heartbeatConnectTimeout(heartbeatConnectTimeout) // - .heartbeatFrequency(heartbeatFrequency) // - .heartbeatSocketTimeout(heartbeatSocketTimeout) // - .maxConnectionIdleTime(maxConnectionIdleTime) // - .maxConnectionLifeTime(maxConnectionLifeTime) // - .maxWaitTime(maxWaitTime) // - .minConnectionsPerHost(minConnectionsPerHost) // - .minHeartbeatFrequency(minHeartbeatFrequency) // - .readPreference(readPreference) // - .requiredReplicaSetName(requiredReplicaSetName) // - .serverSelectionTimeout(serverSelectionTimeout) // - .socketFactory(socketFactoryToUse) // - .socketKeepAlive(socketKeepAlive) // - .socketTimeout(socketTimeout) // - .threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) // - .writeConcern(writeConcern).build(); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoClientOptions.class; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java new file mode 100644 index 0000000000..02913b4303 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java @@ -0,0 +1,515 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.net.ssl.SSLContext; + +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; +import com.mongodb.ServerApi; +import com.mongodb.WriteConcern; +import com.mongodb.connection.ClusterConnectionMode; +import com.mongodb.connection.ClusterType; +import com.mongodb.connection.TransportSettings; + +/** + * A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class MongoClientSettingsFactoryBean extends AbstractFactoryBean { + + private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build(); + + private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry(); + + @Nullable private Object streamFactoryFactory = MongoCompatibilityAdapter + .clientSettingsAdapter(DEFAULT_MONGO_SETTINGS).getStreamFactoryFactory(); + @Nullable private TransportSettings transportSettings; + + private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference(); + private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern(); + private @Nullable Boolean retryReads = null; + + private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern(); + private @Nullable Boolean retryWrites = null; + + private @Nullable String applicationName = null; + + private @Nullable UuidRepresentation uUidRepresentation = null; + + // --> Socket Settings + + private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings() + .getConnectTimeout(TimeUnit.MILLISECONDS); + private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS); + private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize(); + private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize(); + + // --> Cluster Settings + + private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost(); + private List clusterHosts = Collections.emptyList(); + private @Nullable ClusterConnectionMode clusterConnectionMode = null; + private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType(); + private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getRequiredReplicaSetName(); + private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getLocalThreshold(TimeUnit.MILLISECONDS); + private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getServerSelectionTimeout(TimeUnit.MILLISECONDS); + + // --> ConnectionPoolSettings + + private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize(); + private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize(); + private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxWaitTime(TimeUnit.MILLISECONDS); + private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxConnectionLifeTime(TimeUnit.MILLISECONDS); + private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxConnectionIdleTime(TimeUnit.MILLISECONDS); + private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaintenanceInitialDelay(TimeUnit.MILLISECONDS); + private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaintenanceFrequency(TimeUnit.MILLISECONDS); + + // --> SSL Settings + + private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled(); + private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed(); + private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled() + ? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName() + : ""; + + // encryption and retry + + private @Nullable AutoEncryptionSettings autoEncryptionSettings; + private @Nullable ServerApi serverApi; + + /** + * @param socketConnectTimeoutMS in msec + * @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit) + */ + public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) { + this.socketConnectTimeoutMS = socketConnectTimeoutMS; + } + + /** + * @param socketReadTimeoutMS in msec + * @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit) + */ + public void setSocketReadTimeoutMS(int socketReadTimeoutMS) { + this.socketReadTimeoutMS = socketReadTimeoutMS; + } + + /** + * @param socketReceiveBufferSize + * @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int) + */ + public void setSocketReceiveBufferSize(int socketReceiveBufferSize) { + this.socketReceiveBufferSize = socketReceiveBufferSize; + } + + /** + * @param socketSendBufferSize + * @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int) + */ + public void setSocketSendBufferSize(int socketSendBufferSize) { + this.socketSendBufferSize = socketSendBufferSize; + } + + // --> Server Settings + + private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings() + .getHeartbeatFrequency(TimeUnit.MILLISECONDS); + private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings() + .getMinHeartbeatFrequency(TimeUnit.MILLISECONDS); + + /** + * @param serverHeartbeatFrequencyMS in msec + * @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit) + */ + public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) { + this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS; + } + + /** + * @param serverMinHeartbeatFrequencyMS in msec + * @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit) + */ + public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) { + this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS; + } + + // --> Cluster Settings + + /** + * @param clusterSrvHost + * @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String) + */ + public void setClusterSrvHost(String clusterSrvHost) { + this.clusterSrvHost = clusterSrvHost; + } + + /** + * @param clusterHosts + * @see com.mongodb.connection.ClusterSettings.Builder#hosts(List) + */ + public void setClusterHosts(ServerAddress[] clusterHosts) { + this.clusterHosts = Arrays.asList(clusterHosts); + } + + /** + * ???? + * + * @param clusterConnectionMode + * @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode) + */ + public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) { + this.clusterConnectionMode = clusterConnectionMode; + } + + /** + * @param custerRequiredClusterType + * @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType) + */ + public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) { + this.custerRequiredClusterType = custerRequiredClusterType; + } + + /** + * @param clusterRequiredReplicaSetName + * @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String) + */ + public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) { + this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName; + } + + /** + * @param clusterLocalThresholdMS in msec + * @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit) + */ + public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) { + this.clusterLocalThresholdMS = clusterLocalThresholdMS; + } + + /** + * @param clusterServerSelectionTimeoutMS in msec + * @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit) + */ + public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) { + this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS; + } + + // --> ConnectionPoolSettings + + /** + * @param poolMaxSize + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int) + */ + public void setPoolMaxSize(int poolMaxSize) { + this.poolMaxSize = poolMaxSize; + } + + /** + * @param poolMinSize + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int) + */ + public void setPoolMinSize(int poolMinSize) { + this.poolMinSize = poolMinSize; + } + + /** + * @param poolMaxWaitTimeMS in mesec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit) + */ + public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) { + this.poolMaxWaitTimeMS = poolMaxWaitTimeMS; + } + + /** + * @param poolMaxConnectionLifeTimeMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit) + */ + public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) { + this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS; + } + + /** + * @param poolMaxConnectionIdleTimeMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit) + */ + public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) { + this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS; + } + + /** + * @param poolMaintenanceInitialDelayMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit) + */ + public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) { + this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS; + } + + /** + * @param poolMaintenanceFrequencyMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit) + */ + public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) { + this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS; + } + + // --> SSL Settings + + /** + * @param sslEnabled + * @see com.mongodb.connection.SslSettings.Builder#enabled(boolean) + */ + public void setSslEnabled(Boolean sslEnabled) { + this.sslEnabled = sslEnabled; + } + + /** + * @param sslInvalidHostNameAllowed + * @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean) + */ + public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) { + this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed; + } + + /** + * @param sslProvider + * @see com.mongodb.connection.SslSettings.Builder#context(SSLContext) + * @see SSLContext#getInstance(String) + */ + public void setSslProvider(String sslProvider) { + this.sslProvider = sslProvider; + } + + // encryption and retry + + /** + * @param applicationName + * @see MongoClientSettings.Builder#applicationName(String) + */ + public void setApplicationName(@Nullable String applicationName) { + this.applicationName = applicationName; + } + + /** + * @param retryReads + * @see MongoClientSettings.Builder#retryReads(boolean) + */ + public void setRetryReads(@Nullable Boolean retryReads) { + this.retryReads = retryReads; + } + + /** + * @param readConcern + * @see MongoClientSettings.Builder#readConcern(ReadConcern) + */ + public void setReadConcern(ReadConcern readConcern) { + this.readConcern = readConcern; + } + + /** + * @param writeConcern + * @see MongoClientSettings.Builder#writeConcern(WriteConcern) + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * @param retryWrites + * @see MongoClientSettings.Builder#retryWrites(boolean) + */ + public void setRetryWrites(@Nullable Boolean retryWrites) { + this.retryWrites = retryWrites; + } + + /** + * @param readPreference + * @see MongoClientSettings.Builder#readPreference(ReadPreference) + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + /** + * @param streamFactoryFactory + * @deprecated since 4.3, will be removed in the MongoDB 5.0 driver in favor of + * {@code com.mongodb.connection.TransportSettings}. + */ + @Deprecated(since = "4.3") + public void setStreamFactoryFactory(Object streamFactoryFactory) { + this.streamFactoryFactory = streamFactoryFactory; + } + + public void setTransportSettings(@Nullable TransportSettings transportSettings) { + this.transportSettings = transportSettings; + } + + /** + * @param codecRegistry + * @see MongoClientSettings.Builder#codecRegistry(CodecRegistry) + */ + public void setCodecRegistry(CodecRegistry codecRegistry) { + this.codecRegistry = codecRegistry; + } + + /** + * @param uUidRepresentation + */ + public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) { + this.uUidRepresentation = uUidRepresentation; + } + + /** + * @param autoEncryptionSettings can be {@literal null}. + * @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings) + */ + public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) { + this.autoEncryptionSettings = autoEncryptionSettings; + } + + /** + * @param serverApi can be {@literal null}. + * @see MongoClientSettings.Builder#serverApi(ServerApi) + * @since 3.3 + */ + public void setServerApi(@Nullable ServerApi serverApi) { + this.serverApi = serverApi; + } + + @Override + public Class getObjectType() { + return MongoClientSettings.class; + } + + @Override + protected MongoClientSettings createInstance() { + + Builder builder = MongoClientSettings.builder() // + .readPreference(readPreference) // + .writeConcern(writeConcern) // + .readConcern(readConcern) // + .codecRegistry(codecRegistry) // + .applicationName(applicationName) // + .autoEncryptionSettings(autoEncryptionSettings) // + .applyToClusterSettings((settings) -> { + + settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS); + if (clusterConnectionMode != null) { + settings.mode(clusterConnectionMode); + } + settings.requiredReplicaSetName(clusterRequiredReplicaSetName); + + if (!CollectionUtils.isEmpty(clusterHosts)) { + settings.hosts(clusterHosts); + } + settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS); + settings.requiredClusterType(custerRequiredClusterType); + + if (StringUtils.hasText(clusterSrvHost)) { + settings.srvHost(clusterSrvHost); + } + }) // + .applyToConnectionPoolSettings((settings) -> { + + settings.minSize(poolMinSize); + settings.maxSize(poolMaxSize); + settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS); + settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS); + settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS); + // settings.maxWaitQueueSize(poolMaxWaitQueueSize); + settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS); + settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS); + }) // + .applyToServerSettings((settings) -> { + + settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS); + settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS); + }) // + .applyToSocketSettings((settings) -> { + + settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS); + settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS); + settings.receiveBufferSize(socketReceiveBufferSize); + settings.sendBufferSize(socketSendBufferSize); + }) // + .applyToSslSettings((settings) -> { + + settings.enabled(sslEnabled); + if (sslEnabled) { + + settings.invalidHostNameAllowed(sslInvalidHostNameAllowed); + try { + settings.context( + StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault()); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + }); + + if (transportSettings != null) { + builder.transportSettings(transportSettings); + } + + if (streamFactoryFactory != null) { + MongoCompatibilityAdapter.clientSettingsBuilderAdapter(builder).setStreamFactoryFactory(streamFactoryFactory); + } + + if (retryReads != null) { + builder = builder.retryReads(retryReads); + } + + if (retryWrites != null) { + builder = builder.retryWrites(retryWrites); + } + if (uUidRepresentation != null) { + builder = builder.uuidRepresentation(uUidRepresentation); + } + if (serverApi != null) { + builder = builder.serverApi(serverApi); + } + + return builder.build(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java index dbebc02c31..df58a36770 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,9 @@ package org.springframework.data.mongodb.core; import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.WriteResult; +import com.mongodb.WriteConcernResult; /** * Mongo-specific {@link DataIntegrityViolationException}. @@ -30,39 +29,39 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation private static final long serialVersionUID = -186980521176764046L; - private final WriteResult writeResult; + private final WriteConcernResult writeResult; private final MongoActionOperation actionOperation; /** - * Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}. + * Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}. * * @param message the exception message - * @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}. + * @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}. * @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}. */ - public MongoDataIntegrityViolationException(String message, WriteResult writeResult, - MongoActionOperation actionOperation) { + public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult, + MongoActionOperation actionOperation) { super(message); - Assert.notNull(writeResult, "WriteResult must not be null!"); - Assert.notNull(actionOperation, "MongoActionOperation must not be null!"); + Assert.notNull(writeResult, "WriteResult must not be null"); + Assert.notNull(actionOperation, "MongoActionOperation must not be null"); this.writeResult = writeResult; this.actionOperation = actionOperation; } /** - * Returns the {@link WriteResult} that caused the exception. + * Returns the {@link WriteConcernResult} that caused the exception. * * @return the writeResult */ - public WriteResult getWriteResult() { + public WriteConcernResult getWriteResult() { return writeResult; } /** - * Returns the {@link MongoActionOperation} in which the current exception occured. + * Returns the {@link MongoActionOperation} in which the current exception occurred. * * @return the actionOperation */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java new file mode 100644 index 0000000000..eab6b5d7f4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java @@ -0,0 +1,266 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.WriteConcern; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as + * database name and exception translator.
+ * Not intended to be used directly. + * + * @author Christoph Strobl + * @author Mark Paluch + * @param Client type. + * @since 3.0 + * @see SimpleMongoClientDatabaseFactory + */ +public abstract class MongoDatabaseFactorySupport implements MongoDatabaseFactory { + + private final C mongoClient; + private final String databaseName; + private final boolean mongoInstanceCreated; + + private PersistenceExceptionTranslator exceptionTranslator; + private @Nullable WriteConcern writeConcern; + + /** + * Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName}, + * {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + * @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of + * {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}. + * @param exceptionTranslator must not be {@literal null}. + */ + protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated, + PersistenceExceptionTranslator exceptionTranslator) { + + Assert.notNull(mongoClient, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); + Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); + + this.mongoClient = mongoClient; + this.databaseName = databaseName; + this.mongoInstanceCreated = mongoInstanceCreated; + this.exceptionTranslator = exceptionTranslator; + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; + } + + /** + * Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created. + * + * @param writeConcern the writeConcern to set. + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + return getMongoDatabase(getDefaultDatabaseName()); + } + + @Override + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + + Assert.hasText(dbName, "Database name must not be empty"); + + MongoDatabase db = doGetMongoDatabase(dbName); + + if (writeConcern == null) { + return db; + } + + return db.withWriteConcern(writeConcern); + } + + /** + * Get the actual {@link MongoDatabase} from the client. + * + * @param dbName must not be {@literal null} or empty. + * @return + */ + protected abstract MongoDatabase doGetMongoDatabase(String dbName); + + public void destroy() throws Exception { + if (mongoInstanceCreated) { + closeClient(); + } + } + + @Override + public MongoDatabaseFactory withSession(ClientSession session) { + return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this); + } + + /** + * Close the client instance. + */ + protected abstract void closeClient(); + + /** + * @return the Mongo client object. + */ + protected C getMongoClient() { + return mongoClient; + } + + /** + * @return the database name. + */ + protected String getDefaultDatabaseName() { + return databaseName; + } + + /** + * {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a + * {@link SessionAwareMethodInterceptor}. + * + * @author Christoph Strobl + * @since 2.1 + */ + static final class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory { + + private final ClientSession session; + private final MongoDatabaseFactory delegate; + + public ClientSessionBoundMongoDbFactory(ClientSession session, MongoDatabaseFactory delegate) { + this.session = session; + this.delegate = delegate; + } + + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + return proxyMongoDatabase(delegate.getMongoDatabase()); + } + + @Override + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + return proxyMongoDatabase(delegate.getMongoDatabase(dbName)); + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return delegate.getExceptionTranslator(); + } + + @Override + public ClientSession getSession(ClientSessionOptions options) { + return delegate.getSession(options); + } + + @Override + public MongoDatabaseFactory withSession(ClientSession session) { + return delegate.withSession(session); + } + + @Override + public boolean isTransactionActive() { + return session != null && session.hasActiveTransaction(); + } + + private MongoDatabase proxyMongoDatabase(MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, + MongoCollection collection) { + return createProxyInstance(session, collection, MongoCollection.class); + } + + private T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class targetType) { + + ProxyFactory factory = new ProxyFactory(); + factory.setTarget(target); + factory.setInterfaces(targetType); + factory.setOpaque(true); + + factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, + this::proxyDatabase, MongoCollection.class, this::proxyCollection)); + + return targetType.cast(factory.getProxy(target.getClass().getClassLoader())); + } + + public ClientSession getSession() { + return this.session; + } + + public MongoDatabaseFactory getDelegate() { + return this.delegate; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o; + + if (!ObjectUtils.nullSafeEquals(this.session, that.session)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(this.session); + result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate); + return result; + } + + public String toString() { + return "MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + ", delegate=" + + this.getDelegate() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java new file mode 100644 index 0000000000..7aef5a3a82 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java @@ -0,0 +1,112 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collections; +import java.util.Map; + +import org.bson.BsonDocument; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.MongoClientSettings; + +/** + * {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}. + * + * @author Christoph Strobl + * @since 2.2 + */ +public class MongoEncryptionSettingsFactoryBean implements FactoryBean { + + private boolean bypassAutoEncryption; + private String keyVaultNamespace; + private Map extraOptions; + private MongoClientSettings keyVaultClientSettings; + private Map> kmsProviders; + private Map schemaMap; + + /** + * @param bypassAutoEncryption + * @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean) + */ + public void setBypassAutoEncryption(boolean bypassAutoEncryption) { + this.bypassAutoEncryption = bypassAutoEncryption; + } + + /** + * @param extraOptions + * @see AutoEncryptionSettings.Builder#extraOptions(Map) + */ + public void setExtraOptions(Map extraOptions) { + this.extraOptions = extraOptions; + } + + /** + * @param keyVaultNamespace + * @see AutoEncryptionSettings.Builder#keyVaultNamespace(String) + */ + public void setKeyVaultNamespace(String keyVaultNamespace) { + this.keyVaultNamespace = keyVaultNamespace; + } + + /** + * @param keyVaultClientSettings + * @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings) + */ + public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) { + this.keyVaultClientSettings = keyVaultClientSettings; + } + + /** + * @param kmsProviders + * @see AutoEncryptionSettings.Builder#kmsProviders(Map) + */ + public void setKmsProviders(Map> kmsProviders) { + this.kmsProviders = kmsProviders; + } + + /** + * @param schemaMap + * @see AutoEncryptionSettings.Builder#schemaMap(Map) + */ + public void setSchemaMap(Map schemaMap) { + this.schemaMap = schemaMap; + } + + @Override + public AutoEncryptionSettings getObject() { + + return AutoEncryptionSettings.builder() // + .bypassAutoEncryption(bypassAutoEncryption) // + .keyVaultNamespace(keyVaultNamespace) // + .keyVaultMongoClientSettings(keyVaultClientSettings) // + .kmsProviders(orEmpty(kmsProviders)) // + .extraOptions(orEmpty(extraOptions)) // + .schemaMap(orEmpty(schemaMap)) // + .build(); + } + + private Map orEmpty(@Nullable Map source) { + return source != null ? source : Collections.emptyMap(); + } + + @Override + public Class getObjectType() { + return AutoEncryptionSettings.class; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java index b6bcdb589b..1ec7d3ffc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.core; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; import org.bson.BsonInvalidOperationException; + import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.dao.DataIntegrityViolationException; @@ -28,16 +27,17 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException; import org.springframework.dao.PermissionDeniedDataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.BulkOperationException; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.TransientClientSessionException; import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.util.MongoDbErrorCodes; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; -import com.mongodb.BulkWriteException; import com.mongodb.MongoBulkWriteException; import com.mongodb.MongoException; import com.mongodb.MongoServerException; +import com.mongodb.MongoSocketException; import com.mongodb.bulk.BulkWriteError; /** @@ -48,28 +48,34 @@ * @author Oliver Gierke * @author Michal Vich * @author Christoph Strobl + * @author Brice Vandeputte */ public class MongoExceptionTranslator implements PersistenceExceptionTranslator { - private static final Set DULICATE_KEY_EXCEPTIONS = new HashSet( - Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException")); + public static final MongoExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private static final Set RESOURCE_FAILURE_EXCEPTIONS = new HashSet( - Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound", - "MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException")); + private static final Set DUPLICATE_KEY_EXCEPTIONS = Set.of("MongoException.DuplicateKey", + "DuplicateKeyException"); - private static final Set RESOURCE_USAGE_EXCEPTIONS = new HashSet( - Arrays.asList("MongoInternalException")); + private static final Set RESOURCE_FAILURE_EXCEPTIONS = Set.of("MongoException.Network", + "MongoSocketException", "MongoException.CursorNotFound", "MongoCursorNotFoundException", + "MongoServerSelectionException", "MongoTimeoutException"); - private static final Set DATA_INTEGRETY_EXCEPTIONS = new HashSet( - Arrays.asList("WriteConcernException", "MongoWriteException", "MongoBulkWriteException")); + private static final Set RESOURCE_USAGE_EXCEPTIONS = Set.of("MongoInternalException"); - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ + private static final Set DATA_INTEGRITY_EXCEPTIONS = Set.of("WriteConcernException", "MongoWriteException", + "MongoBulkWriteException"); + + private static final Set SECURITY_EXCEPTIONS = Set.of("MongoCryptException"); + + @Override @Nullable public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return doTranslateException(ex); + } + + @Nullable + DataAccessException doTranslateException(RuntimeException ex) { // Check for well-known MongoException subclasses. @@ -77,9 +83,13 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex); } + if (ex instanceof MongoSocketException) { + return new DataAccessResourceFailureException(ex.getMessage(), ex); + } + String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass())); - if (DULICATE_KEY_EXCEPTIONS.contains(exception)) { + if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) { return new DuplicateKeyException(ex.getMessage(), ex); } @@ -91,15 +101,15 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex); } - if (DATA_INTEGRETY_EXCEPTIONS.contains(exception)) { + if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) { if (ex instanceof MongoServerException) { - if (((MongoServerException) ex).getCode() == 11000) { + if (MongoDbErrorCodes.isDataDuplicateKeyError(ex)) { return new DuplicateKeyException(ex.getMessage(), ex); } - if (ex instanceof MongoBulkWriteException) { - for (BulkWriteError x : ((MongoBulkWriteException) ex).getWriteErrors()) { - if (x.getCode() == 11000) { + if (ex instanceof MongoBulkWriteException bulkException) { + for (BulkWriteError writeError : bulkException.getWriteErrors()) { + if (MongoDbErrorCodes.isDuplicateKeyCode(writeError.getCode())) { return new DuplicateKeyException(ex.getMessage(), ex); } } @@ -109,31 +119,74 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return new DataIntegrityViolationException(ex.getMessage(), ex); } - if (ex instanceof BulkWriteException) { - return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex); - } - // All other MongoExceptions - if (ex instanceof MongoException) { - - int code = ((MongoException) ex).getCode(); - - if (MongoDbErrorCodes.isDuplicateKeyCode(code)) { - throw new DuplicateKeyException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) { - throw new DataAccessResourceFailureException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001 - || code == 12010 || code == 12011 || code == 12012) { - throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) { - throw new PermissionDeniedDataAccessException(ex.getMessage(), ex); + if (ex instanceof MongoException mongoException) { + + int code = mongoException.getCode(); + + if (MongoDbErrorCodes.isDuplicateKeyError(mongoException)) { + return new DuplicateKeyException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isDataAccessResourceError(mongoException)) { + return new DataAccessResourceFailureException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isInvalidDataAccessApiUsageError(mongoException) || code == 12001 || code == 12010 + || code == 12011 || code == 12012) { + return new InvalidDataAccessApiUsageException(ex.getMessage(), ex); + } + if (MongoDbErrorCodes.isPermissionDeniedError(mongoException)) { + return new PermissionDeniedDataAccessException(ex.getMessage(), ex); } + if (MongoDbErrorCodes.isDataIntegrityViolationError(mongoException)) { + return new DataIntegrityViolationException(mongoException.getMessage(), mongoException); + } + if (MongoDbErrorCodes.isClientSessionFailure(mongoException)) { + return isTransientFailure(mongoException) ? new TransientClientSessionException(ex.getMessage(), ex) + : new ClientSessionException(ex.getMessage(), ex); + } + if (ex.getCause() != null && SECURITY_EXCEPTIONS.contains(ClassUtils.getShortName(ex.getCause().getClass()))) { + return new PermissionDeniedDataAccessException(ex.getMessage(), ex); + } + return new UncategorizedMongoDbException(ex.getMessage(), ex); } + // may interfere with OmitStackTraceInFastThrow (enabled by default). + // see https://jira.spring.io/browse/DATAMONGO-1905 + if (ex instanceof IllegalStateException) { + for (StackTraceElement elm : ex.getStackTrace()) { + if (elm.getClassName().contains("ClientSession")) { + return new ClientSessionException(ex.getMessage(), ex); + } + } + } + // If we get here, we have an exception that resulted from user code, // rather than the persistence provider, so we return null to indicate // that translation should not occur. return null; } + + /** + * Check if a given exception holds an error label indicating a transient failure. + * + * @param e the exception to inspect. + * @return {@literal true} if the given {@link Exception} is a {@link MongoException} holding one of the transient + * exception error labels. + * @see MongoException#hasErrorLabel(String) + * @since 4.4 + */ + public boolean isTransientFailure(Exception e) { + + if (e instanceof MongoException mongoException) { + return mongoException.hasErrorLabel(MongoException.TRANSIENT_TRANSACTION_ERROR_LABEL) + || mongoException.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL); + } + + if (e.getCause() != e && e.getCause() instanceof Exception ex) { + return isTransientFailure(ex); + } + + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java new file mode 100644 index 0000000000..66b1cf209e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -0,0 +1,250 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.util.Assert; + +/** + * {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the + * following mapping rules. + *

+ * Required Properties + *

+ *
    + *
  • Properties of primitive type
  • + *
+ * Ignored Properties + *
    + *
  • All properties annotated with {@link org.springframework.data.annotation.Transient}
  • + *
+ * Property Type Mapping + *
    + *
  • {@link java.lang.Object} -> {@code type : 'object'}
  • + *
  • {@link java.util.Arrays} -> {@code type : 'array'}
  • + *
  • {@link java.util.Collection} -> {@code type : 'array'}
  • + *
  • {@link java.util.Map} -> {@code type : 'object'}
  • + *
  • {@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}
  • + *
  • Simple Types -> {@code type : 'the corresponding bson type' }
  • + *
  • Domain Types -> {@code type : 'object', properties : {the types properties} }
  • + *
+ *
+ * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into + * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more + * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. + * {@link Encrypted} properties will contain {@literal encrypt} information. + * + * @author Christoph Strobl + * @since 2.2 + */ +public interface MongoJsonSchemaCreator { + + /** + * Create the {@link MongoJsonSchema} for the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + */ + MongoJsonSchema createSchemaFor(Class type); + + /** + * Create a merged {@link MongoJsonSchema} out of the individual schemas of the given types by merging their + * properties into one large {@link MongoJsonSchema schema}. + * + * @param types must not be {@literal null} nor contain {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergedSchemaFor(Class... types) { + + MongoJsonSchema[] schemas = Arrays.stream(types).map(this::createSchemaFor).toArray(MongoJsonSchema[]::new); + return MongoJsonSchema.merge(schemas); + } + + /** + * Filter matching {@link JsonSchemaProperty properties}. + * + * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + MongoJsonSchemaCreator filter(Predicate filter); + + /** + * Entry point to specify additional behavior for a given path. + * + * @param path the path using {@literal dot '.'} notation. + * @return new instance of {@link PropertySpecifier}. + * @since 3.4 + */ + PropertySpecifier property(String path); + + /** + * The context in which a specific {@link #getProperty()} is encountered during schema creation. + * + * @since 3.3 + */ + interface JsonSchemaPropertyContext { + + /** + * The path to a given field/property in dot notation. + * + * @return never {@literal null}. + */ + String getPath(); + + /** + * The current property. + * + * @return never {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Obtain the {@link MongoPersistentEntity} for a given property. + * + * @param property must not be {@literal null}. + * @param + * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check + * {@link PersistentProperty#isEntity()} first. + */ + @Nullable + MongoPersistentEntity resolveEntity(MongoPersistentProperty property); + + } + + /** + * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones. + * + * @return new instance of {@link Predicate}. + * @since 3.3 + */ + static Predicate encryptedOnly() { + + return new Predicate() { + + // cycle guard + private final Set seen = new HashSet<>(); + + @Override + public boolean test(JsonSchemaPropertyContext context) { + return extracted(context.getProperty(), context); + } + + private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) { + if (property.isAnnotationPresent(Encrypted.class)) { + return true; + } + + if (!property.isEntity() || seen.contains(property)) { + return false; + } + + seen.add(property); + + for (MongoPersistentProperty nested : context.resolveEntity(property)) { + if (extracted(nested, context)) { + return true; + } + } + return false; + } + }; + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given + * {@link MongoConverter}. + * + * @param mongoConverter must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + */ + static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { + + Assert.notNull(mongoConverter, "MongoConverter must not be null"); + return new MappingMongoJsonSchemaCreator(mongoConverter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential + * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}. + * + * @param mappingContext must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create(MappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We + * recommend to use {@link #create(MappingContext)}. + * + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * @author Christoph Strobl + * @since 3.4 + */ + interface PropertySpecifier { + + /** + * Set additional type parameters for polymorphic ones. + * + * @param types must not be {@literal null}. + * @return the source + */ + MongoJsonSchemaCreator withTypes(Class... types); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java index 038e998e45..65396bc7fe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,30 +18,44 @@ import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.stream.Stream; import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.index.IndexOperations; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.util.CloseableIterator; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.util.Lock; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; -import com.mongodb.Cursor; +import com.mongodb.ClientSessionOptions; import com.mongodb.ReadPreference; +import com.mongodb.client.ClientSession; import com.mongodb.client.MongoCollection; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; @@ -49,7 +63,10 @@ /** * Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but * a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK - * proxy). + * proxy).
+ * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB + * specific documentation to learn more about Multi + * Document Transactions. * * @author Thomas Risberg * @author Mark Pollack @@ -60,6 +77,7 @@ * @author Thomas Darimont * @author Maninder Singh * @author Mark Paluch + * @author Woojin Shin */ public interface MongoOperations extends FluentMongoOperations { @@ -67,12 +85,13 @@ public interface MongoOperations extends FluentMongoOperations { * The collection name used for the specified class by this template. * * @param entityClass must not be {@literal null}. - * @return + * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. */ String getCollectionName(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to + * Execute a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to * obtain the {@link Document} holding the actual command. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. * @@ -104,7 +123,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Execute a MongoDB query and iterate over the query results on a per-document basis with a DocumentCallbackHandler. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from. * @param dch the handler that will extract results, one document at a time. @@ -112,8 +131,7 @@ public interface MongoOperations extends FluentMongoOperations { void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch); /** - * Executes a {@link DbCallback} translating any exceptions as necessary. - *

+ * Executes a {@link DbCallback} translating any exceptions as necessary.
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -125,8 +143,7 @@ public interface MongoOperations extends FluentMongoOperations { T execute(DbCallback action); /** - * Executes the given {@link CollectionCallback} on the entity collection of the specified class. - *

+ * Executes the given {@link CollectionCallback} on the entity collection of the specified class.
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -138,8 +155,7 @@ public interface MongoOperations extends FluentMongoOperations { T execute(Class entityClass, CollectionCallback action); /** - * Executes the given {@link CollectionCallback} on the collection of the given name. - *

+ * Executes the given {@link CollectionCallback} on the collection of the given name.
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -151,36 +167,95 @@ public interface MongoOperations extends FluentMongoOperations { @Nullable T execute(String collectionName, CollectionCallback action); + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} + * with given {@literal sessionOptions} to each and every command issued against MongoDB. + * + * @param sessionOptions must not be {@literal null}. + * @return new instance of {@link SessionScoped}. Never {@literal null}. + * @since 2.1 + */ + SessionScoped withSession(ClientSessionOptions sessionOptions); + + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
+ * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the + * {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}. + * + * @param sessionProvider must not be {@literal null}. + * @since 2.1 + */ + default SessionScoped withSession(Supplier sessionProvider) { + + Assert.notNull(sessionProvider, "SessionProvider must not be null"); + + return new SessionScoped() { + + private final Lock lock = Lock.of(new ReentrantLock()); + private @Nullable ClientSession session; + + @Override + public T execute(SessionCallback action, Consumer onComplete) { + + lock.executeWithoutResult(() -> { + + if (session == null) { + session = sessionProvider.get(); + } + }); + + try { + return action.doInSession(MongoOperations.this.withSession(session)); + } finally { + onComplete.accept(session); + } + } + }; + } + + /** + * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
+ * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. + * + * @param session must not be {@literal null}. + * @return {@link ClientSession} bound instance of {@link MongoOperations}. + * @since 2.1 + */ + MongoOperations withSession(ClientSession session); + /** * Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB - * {@link Cursor}. + * {@link com.mongodb.client.FindIterable}. *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityType must not be {@literal null}. * @param element return type - * @return will never be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.7 */ - CloseableIterator stream(Query query, Class entityType); + Stream stream(Query query, Class entityType); /** * Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed - * by a Mongo DB {@link Cursor}. + * by a Mongo DB {@link com.mongodb.client.FindIterable}. *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityType must not be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @param element return type - * @return will never be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.10 */ - CloseableIterator stream(Query query, Class entityType, String collectionName); + Stream stream(Query query, Class entityType, String collectionName); /** * Create an uncapped collection with a name based on the provided entity class. @@ -216,6 +291,58 @@ public interface MongoOperations extends FluentMongoOperations { */ MongoCollection createCollection(String collectionName, @Nullable CollectionOptions collectionOptions); + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + /** * A set of collection names. * @@ -224,18 +351,19 @@ public interface MongoOperations extends FluentMongoOperations { Set getCollectionNames(); /** - * Get a collection by name, creating it if it doesn't exist. - *

+ * Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and + * is created on first interaction with the server. Collections can be explicitly created via + * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) + * exists} first.
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. - * @return an existing collection or a newly created one. + * @return an existing collection or one created on first server interaction. */ MongoCollection getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

+ * Check to see if a collection with a name indicated by the entity class exists.
* Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -244,8 +372,7 @@ public interface MongoOperations extends FluentMongoOperations { boolean collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

+ * Check to see if a collection with a given name exists.
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -254,8 +381,7 @@ public interface MongoOperations extends FluentMongoOperations { boolean collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

+ * Drop the collection with the name indicated by the entity class.
* Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -263,8 +389,7 @@ public interface MongoOperations extends FluentMongoOperations { void dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

+ * Drop the collection with the given name.
* Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -286,11 +411,13 @@ public interface MongoOperations extends FluentMongoOperations { IndexOperations indexOps(Class entityClass); /** - * Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level. + * Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level. * - * @return + * @return never {@literal null}. * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ + @Deprecated ScriptOperations scriptOps(); /** @@ -318,18 +445,16 @@ public interface MongoOperations extends FluentMongoOperations { * Returns a new {@link BulkOperations} for the given entity type and collection name. * * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. - * @param entityClass the name of the entity class. Can be {@literal null}. + * @param entityType the name of the entity class. Can be {@literal null}. * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. * @return {@link BulkOperations} on the named collection associated with the given entity class. */ BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); /** - * Query for a list of objects of type T from the collection used by the entity class. - *

+ * Query for a list of objects of type T from the collection used by the entity class.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -339,11 +464,9 @@ public interface MongoOperations extends FluentMongoOperations { List findAll(Class entityClass); /** - * Query for a list of objects of type T from the specified collection. - *

+ * Query for a list of objects of type T from the specified collection.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -353,36 +476,6 @@ public interface MongoOperations extends FluentMongoOperations { */ List findAll(Class entityClass, String collectionName); - /** - * Execute a group operation over the entire collection. The group operation entity class should match the 'shape' of - * the returned object that takes int account the initial document structure as well as any finalize functions. - * - * @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are - * considered. - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parametrized type of the returned list - * @return The results of the group operation - */ - GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass); - - /** - * Execute a group operation restricting the rows to those which match the provided Criteria. The group operation - * entity class should match the 'shape' of the returned object that takes int account the initial document structure - * as well as any finalize functions. - * - * @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are - * considered. - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parametrized type of the returned list - * @return The results of the group operation - */ - GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass); - /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the * inputCollection is derived from the inputType of the aggregation. @@ -435,11 +528,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. + * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of - * the aggregation. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. The name of the inputCollection is derived from + * the inputType of the aggregation. *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. @@ -448,35 +541,37 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * {@literal null}. * @param collectionName The name of the input collection to use for the aggreation. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); + Stream aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. - *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is - * derived from the inputType of the aggregation. - *

+ * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class and are returned as stream. The name of the + * inputCollection is derived from the inputType of the aggregation. + *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be * {@literal null}. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(TypedAggregation aggregation, Class outputType); + Stream aggregateStream(TypedAggregation aggregation, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. - *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class. - *

+ * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -485,17 +580,18 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or * empty. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(Aggregation aggregation, Class inputType, Class outputType); + Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. - *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class. - *

+ * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -504,10 +600,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or * empty. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, Class outputType); + Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType); /** * Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE @@ -517,7 +614,9 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param reduceFunction The JavaScript reduce function * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); @@ -530,7 +629,9 @@ MapReduceResults mapReduce(String inputCollectionName, String mapFunction * @param mapReduceOptions Options that specify detailed map-reduce behavior. * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass); @@ -544,7 +645,9 @@ MapReduceResults mapReduce(String inputCollectionName, String mapFunction * @param reduceFunction The JavaScript reduce function * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); @@ -558,7 +661,9 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * @param mapReduceOptions Options that specify detailed map-reduce behavior * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass); @@ -567,37 +672,63 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * information to determine the collection the query is ran against. Note, that MongoDB limits the number of results * by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of * results. + *

+ * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

+ * + *
+	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
+	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
+	 * AggregationResults<Document> results = aggregate(geoNear, Document.class);
+	 * 
* * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @return + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated GeoResults geoNear(NearQuery near, Class entityClass); /** * Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a * particular number of results. + *

+ * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

+ * + *
+	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
+	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
+	 * AggregationResults<Document> results = aggregate(geoNear, Document.class);
+	 * 
* * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @param collectionName the collection to trigger the query against. If no collection name is given the entity class * will be inspected. Must not be {@literal null} nor empty. * @return + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated GeoResults geoNear(NearQuery near, Class entityClass, String collectionName); /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

+ * specified type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @return the converted object. @@ -607,15 +738,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

+ * type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @param collectionName name of the collection to retrieve the objects from. @@ -629,7 +758,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. */ @@ -638,7 +767,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. * @return {@literal true} if the query yields a result. */ @@ -647,7 +776,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. @@ -655,15 +784,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin boolean exists(Query query, @Nullable Class entityClass, String collectionName); /** - * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. - *

+ * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. * @return the List of converted objects. @@ -671,15 +798,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin List find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a List of the specified type. - *

+ * Map the results of an ad-hoc query on the specified collection to a List of the specified type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. @@ -687,6 +812,57 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin */ List find(Query query, Class entityClass, String collectionName); + /** + * Query for a window of objects of type T from the specified collection.
+ * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
+ * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
+ * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
+ * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. @@ -769,79 +945,268 @@ default List findDistinct(Query query, String field, String collection, C } /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @return the converted object that was updated before it was updated or {@literal null}, if not found. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ @Nullable - T findAndModify(Query query, Update update, Class entityClass); + T findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated before it was updated or {@literal null}, if not found. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ @Nullable - T findAndModify(Query query, Update update, Class entityClass, String collectionName); + T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. + * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. * @param entityClass the parametrized type. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as * it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ @Nullable - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify - * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking + * Triggers findAndModify + * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as * it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ @Nullable - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName); + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
+ * The collection name is derived from the {@literal replacement} type.
+ * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
+ * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, String collectionName) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { + return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class entityType, + String collectionName) { + + return findAndReplace(query, replacement, options, entityType, collectionName, entityType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection + * from. Must not be {@literal null}. + * @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of + * {@code Object.class} instead. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + @Nullable + default T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + Class resultType) { + + return findAndReplace(query, replacement, options, entityType, + getCollectionName(ClassUtils.getUserClass(entityType)), resultType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
+ * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of + * {@code Object.class} instead. + * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + @Nullable + T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType); + /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

- * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ * database.
+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @return the converted object @@ -852,14 +1217,13 @@ T findAndModify(Query query, Update update, FindAndModifyOptions options, Cl /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @param collectionName name of the collection to retrieve the objects from. @@ -870,289 +1234,541 @@ T findAndModify(Query query, Update update, FindAndModifyOptions options, Cl /** * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ long count(Query query, Class entityClass); /** * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} * must solely consist of document field references as we lack type information to map potential property references - * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #exactCount(Query, String) + * @see #estimatedCount(String) */ long count(Query query, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity - * class to map the given {@link Query}. + * class to map the given {@link Query}.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. + * @see #count(Query, Class, String) + * @see #estimatedCount(String) */ long count(Query query, @Nullable Class entityClass, String collectionName); /** - * Insert the object into the collection for the entity type of the object to save. - *

- * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

- * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. - *

- *

- * Insert is used to initially store the object into the database. To update an existing object use the save method. + * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, + * based on collection statistics.
+ * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param entityClass must not be {@literal null}. + * @return the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.1 + */ + default long estimatedCount(Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return estimatedCount(getCollectionName(entityClass)); + } + + /** + * Estimate the number of documents in the given collection based on collection statistics.
+ * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param collectionName must not be {@literal null}. + * @return the estimated number of documents. + * @since 3.1 + */ + long estimatedCount(String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default long exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default long exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + long exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
+ * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
+ * Insert is used to initially store the object into the database. To update an existing object use the + * {@link #save(Object)} method. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ - void insert(Object objectToSave); + T insert(T objectToSave); /** - * Insert the object into the specified collection. - *

+ * Insert the object into the specified collection.
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* Insert is used to initially store the object into the database. To update an existing object use the save method. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. */ - void insert(Object objectToSave, String collectionName); + T insert(T objectToSave, String collectionName); /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

+ * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the batch of objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the inserted objects that. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ - void insert(Collection batchToSave, Class entityClass); + Collection insert(Collection batchToSave, Class entityClass); /** * Insert a batch of objects into the specified collection in a single batch write to the database. + *

+ * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the list of objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the inserted objects that. */ - void insert(Collection batchToSave, String collectionName); + Collection insert(Collection batchToSave, String collectionName); /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

+ * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * - * @param collectionToSave the list of objects to save. Must not be {@literal null}. + * @param objectsToSave the list of objects to save. Must not be {@literal null}. + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ - void insertAll(Collection objectsToSave); + Collection insertAll(Collection objectsToSave); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

+ * object is not already present, that is an 'upsert'.
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

- * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. + * Spring's + * Type Conversion" for more details. + *

+ * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ - void save(Object objectToSave); + T save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

+ * is an 'upsert'.
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

- * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

+ * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. + * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ - void save(Object objectToSave, String collectionName); + T save(T objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

+ * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @see Update + * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ - UpdateResult upsert(Query query, Update update, Class entityClass); + UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document.
* NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific + * support.
+ * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult upsert(Query query, Update update, String collectionName); + UpdateResult upsert(Query query, UpdateDefinition update, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult upsert(Query query, Update update, Class entityClass, String collectionName); + UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates the first object that is found in the collection of the entity class that matches the query document with * the provided update document. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class that determines the collection to use. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @see Update + * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ - UpdateResult updateFirst(Query query, Update update, Class entityClass); + UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass); /** * Updates the first object that is found in the specified collection that matches the query document criteria with * the provided updated document.
* NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateFirst(Query query, Update update, String collectionName); + UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName); /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document.
- * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * the provided updated document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateFirst(Query query, Update update, Class entityClass, String collectionName); + UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see Update + * @see AggregationUpdate + * @since 3.0 */ - UpdateResult updateMulti(Query query, Update update, Class entityClass); + UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass); /** * Updates all objects that are found in the specified collection that matches the query document criteria with the * provided updated document.
* NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific + * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateMulti(Query query, Update update, String collectionName); + UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateMulti(Query query, Update update, Class entityClass, String collectionName); + UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}.
+ * Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged() + * acknowledged} remove operation was successful or not. * * @param object must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ DeleteResult remove(Object object); /** - * Removes the given object from the given collection. + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}.
+ * Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged() + * acknowledged} remove operation was successful or not. * * @param object must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ DeleteResult remove(Object object, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class that determines the collection to use. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query} or {@literal entityClass} is {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ DeleteResult remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class of the pojo to be operated on. Can be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query}, {@literal entityClass} or {@literal collectionName} is * {@literal null}. @@ -1165,8 +1781,9 @@ T findAndModify(Query query, Update update, FindAndModifyOptions options, Cl * NOTE: Any additional support for field mapping is not available due to the lack of domain type * information. Use {@link #remove(Query, Class, String)} to get full type specific support. * - * @param query the query document that specifies the criteria used to remove a record. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query} or {@literal collectionName} is {@literal null}. */ @@ -1178,7 +1795,8 @@ T findAndModify(Query query, Update update, FindAndModifyOptions options, Cl * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. * * @param query the query document that specifies the criteria used to find and remove documents. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ @@ -1190,27 +1808,101 @@ T findAndModify(Query query, Update update, FindAndModifyOptions options, Cl * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. * @return the {@link List} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. * * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
+ * The collection name is derived from the {@literal replacement} type.
+ * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Returns the underlying {@link MongoConverter}. * - * @return + * @return never {@literal null}. */ MongoConverter getConverter(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java new file mode 100644 index 0000000000..37001faa4e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApi.Builder; +import com.mongodb.ServerApiVersion; + +/** + * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoServerApiFactoryBean implements FactoryBean { + + private String version; + private @Nullable Boolean deprecationErrors; + private @Nullable Boolean strict; + + /** + * @param version the version string either as the enum name or the server version value. + * @see ServerApiVersion + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * @param deprecationErrors + * @see ServerApi.Builder#deprecationErrors(boolean) + */ + public void setDeprecationErrors(@Nullable Boolean deprecationErrors) { + this.deprecationErrors = deprecationErrors; + } + + /** + * @param strict + * @see ServerApi.Builder#strict(boolean) + */ + public void setStrict(@Nullable Boolean strict) { + this.strict = strict; + } + + @Nullable + @Override + public ServerApi getObject() throws Exception { + + Builder builder = ServerApi.builder().version(version()); + + if (deprecationErrors != null) { + builder = builder.deprecationErrors(deprecationErrors); + } + if (strict != null) { + builder = builder.strict(strict); + } + return builder.build(); + } + + @Nullable + @Override + public Class getObjectType() { + return ServerApi.class; + } + + private ServerApiVersion version() { + try { + // lookup by name eg. 'V1' + return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version); + } catch (IllegalArgumentException e) { + // or just the version number, eg. just '1' + return ServerApiVersion.findByValue(version); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index 8362a13416..fd547c61a0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,22 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import lombok.AccessLevel; -import lombok.AllArgsConstructor; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - import java.io.IOException; +import java.math.BigDecimal; +import java.math.RoundingMode; import java.util.*; -import java.util.Map.Entry; import java.util.concurrent.TimeUnit; +import java.util.function.BiPredicate; import java.util.stream.Collectors; +import java.util.stream.Stream; -import org.bson.BsonValue; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.Document; -import org.bson.codecs.Codec; import org.bson.conversions.Bson; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -42,34 +38,42 @@ import org.springframework.context.ApplicationEventPublisherAware; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.core.convert.ConversionService; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.annotation.Id; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Metric; import org.springframework.data.mapping.MappingException; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.SessionSynchronization; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.CollectionPreparerDelegate; import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; +import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.QueryOperations.CountContext; +import org.springframework.data.mongodb.core.QueryOperations.DeleteContext; +import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; +import org.springframework.data.mongodb.core.QueryOperations.QueryContext; +import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.mongodb.core.aggregation.Fields; -import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; @@ -81,81 +85,77 @@ import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.DefaultSearchIndexOperations; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.index.SearchIndexOperations; +import org.springframework.data.mongodb.core.index.SearchIndexOperationsProvider; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; -import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; +import org.springframework.data.mongodb.core.mapping.event.*; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; +import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.mongodb.util.MongoClientVersion; -import org.springframework.data.projection.ProjectionInformation; -import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.CloseableIterator; import org.springframework.data.util.Optionals; -import org.springframework.data.util.Pair; -import org.springframework.data.util.StreamUtils; -import org.springframework.jca.cci.core.ConnectionCallback; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; -import com.mongodb.Cursor; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.ClientSessionOptions; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; import com.mongodb.client.AggregateIterable; +import com.mongodb.client.ClientSession; import com.mongodb.client.DistinctIterable; import com.mongodb.client.FindIterable; import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import com.mongodb.client.MongoIterable; -import com.mongodb.client.model.CountOptions; -import com.mongodb.client.model.CreateCollectionOptions; -import com.mongodb.client.model.DeleteOptions; -import com.mongodb.client.model.Filters; -import com.mongodb.client.model.FindOneAndDeleteOptions; -import com.mongodb.client.model.FindOneAndUpdateOptions; -import com.mongodb.client.model.ReturnDocument; -import com.mongodb.client.model.UpdateOptions; -import com.mongodb.client.model.ValidationAction; -import com.mongodb.client.model.ValidationLevel; +import com.mongodb.client.model.*; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; -import com.mongodb.util.JSONParseException; /** - * Primary implementation of {@link MongoOperations}. + * Primary implementation of {@link MongoOperations}. It simplifies the use of imperative MongoDB usage and helps to + * avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindIterable} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link MongoDatabaseFactory} reference, or get prepared in an application context and given to services as bean + * reference. + *

+ * Note: The {@link MongoDatabaseFactory} should always be configured as a bean in the application context, in the first + * case given to the service directly, in the second case to the prepared template. + *

{@link ReadPreference} and {@link com.mongodb.ReadConcern}

+ *

+ * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

+ * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. * * @author Thomas Risberg * @author Graeme Rocher @@ -176,50 +176,56 @@ * @author Borislav Rangelov * @author duozhilin * @author Andreas Zink + * @author Cimon Lucas + * @author Michael J. Simons + * @author Roman Puchkovskiy + * @author Yadhukrishna S Pai + * @author Anton Barkan + * @author Bartłomiej Mazur + * @author Michael Krog + * @author Jakub Zurawa + * @author Florian Lüdiger */ -@SuppressWarnings("deprecation") -public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider { +public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider, + SearchIndexOperationsProvider, ReadPreferenceAware { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class); - private static final String ID_FIELD = "_id"; + private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; - private static final Collection ITERABLE_CLASSES; - - static { - - Set iterableClasses = new HashSet(); - iterableClasses.add(List.class.getName()); - iterableClasses.add(Collection.class.getName()); - iterableClasses.add(Iterator.class.getName()); - - ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); - } private final MongoConverter mongoConverter; private final MappingContext, MongoPersistentProperty> mappingContext; - private final MongoDbFactory mongoDbFactory; + private final MongoDatabaseFactory mongoDbFactory; private final PersistenceExceptionTranslator exceptionTranslator; private final QueryMapper queryMapper; private final UpdateMapper updateMapper; private final JsonSchemaMapper schemaMapper; - private final SpelAwareProxyProjectionFactory projectionFactory; + private final EntityOperations operations; + private final PropertyOperations propertyOperations; + private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; private @Nullable ReadPreference readPreference; private @Nullable ApplicationEventPublisher eventPublisher; + private @Nullable EntityCallbacks entityCallbacks; private @Nullable ResourceLoader resourceLoader; private @Nullable MongoPersistentEntityIndexCreator indexCreator; + private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + + private CountExecution countExecution = this::doExactCount; + /** - * Constructor used for a basic template configuration + * Constructor used for a basic template configuration. * * @param mongoClient must not be {@literal null}. * @param databaseName must not be {@literal null} or empty. + * @since 2.1 */ public MongoTemplate(MongoClient mongoClient, String databaseName) { - this(new SimpleMongoDbFactory(mongoClient, databaseName), null); + this(new SimpleMongoClientDatabaseFactory(mongoClient, databaseName), (MongoConverter) null); } /** @@ -227,8 +233,8 @@ public MongoTemplate(MongoClient mongoClient, String databaseName) { * * @param mongoDbFactory must not be {@literal null}. */ - public MongoTemplate(MongoDbFactory mongoDbFactory) { - this(mongoDbFactory, null); + public MongoTemplate(MongoDatabaseFactory mongoDbFactory) { + this(mongoDbFactory, (MongoConverter) null); } /** @@ -237,9 +243,9 @@ public MongoTemplate(MongoDbFactory mongoDbFactory) { * @param mongoDbFactory must not be {@literal null}. * @param mongoConverter */ - public MongoTemplate(MongoDbFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) { + public MongoTemplate(MongoDatabaseFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) { - Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); this.mongoDbFactory = mongoDbFactory; this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); @@ -247,20 +253,50 @@ public MongoTemplate(MongoDbFactory mongoDbFactory, @Nullable MongoConverter mon this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); - this.projectionFactory = new SpelAwareProxyProjectionFactory(); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); + this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, + mongoDbFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); // We always have a mapping context in the converter, whether it's a simple one or not mappingContext = this.mongoConverter.getMappingContext(); // We create indexes based on mapping events - if (mappingContext instanceof MongoMappingContext) { - indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, this); - eventPublisher = new MongoMappingEventPublisher(indexCreator); - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mappingContext instanceof MongoMappingContext mappingContext) { + + if (mappingContext.isAutoIndexCreation()) { + + indexCreator = new MongoPersistentEntityIndexCreator(mappingContext, this); + eventPublisher = new MongoMappingEventPublisher(indexCreator); + mappingContext.setApplicationEventPublisher(eventPublisher); } } } + private MongoTemplate(MongoDatabaseFactory dbFactory, MongoTemplate that) { + + this.mongoDbFactory = dbFactory; + this.exceptionTranslator = that.exceptionTranslator; + this.sessionSynchronization = that.sessionSynchronization; + + // we need to (re)create the MappingMongoConverter as we need to have it use a DbRefResolver that operates within + // the sames session. Otherwise loading referenced objects would happen outside of it. + if (that.mongoConverter instanceof MappingMongoConverter mappingMongoConverter) { + this.mongoConverter = mappingMongoConverter.with(dbFactory); + } else { + this.mongoConverter = that.mongoConverter; + } + + this.queryMapper = that.queryMapper; + this.updateMapper = that.updateMapper; + this.schemaMapper = that.schemaMapper; + this.mappingContext = that.mappingContext; + this.operations = that.operations; + this.propertyOperations = that.propertyOperations; + this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; + } + /** * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the * default of {@link #DEFAULT_WRITE_RESULT_CHECKING}. @@ -273,8 +309,7 @@ public void setWriteResultChecking(@Nullable WriteResultChecking resultChecking) /** * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} - * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no - * {@link WriteConcern} will be used. + * configured on the {@link MongoDatabaseFactory} will apply. * * @param writeConcern */ @@ -302,24 +337,102 @@ public void setReadPreference(@Nullable ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + @Override + public boolean hasReadPreference() { + return this.readPreference != null; + } + + @Override + public ReadPreference getReadPreference() { + return this.readPreference; + } + + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; + eventDelegate.setPublisher(eventPublisher); - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (entityCallbacks == null) { + setEntityCallbacks(EntityCallbacks.create(applicationContext)); + } + + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); } resourceLoader = applicationContext; + } + + /** + * Set the {@link EntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}. + *
+ * Overrides potentially existing {@link EntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 2.2 + */ + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiPredicate estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionPreparer, collectionName, filter, options) -> { + + if (!estimationFilter.test(filter, options)) { + return doExactCount(collectionPreparer, collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } - projectionFactory.setBeanFactory(applicationContext); - projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); + return doEstimatedCount(collectionPreparer, collectionName, estimatedDocumentCountOptions); + }; + } else { + this.countExecution = this::doExactCount; + } } /** @@ -341,8 +454,8 @@ private void prepareIndexCreator(ApplicationContext context) { } } - if (context instanceof ConfigurableApplicationContext && indexCreator != null) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext && indexCreator != null) { + configurableApplicationContext.addApplicationListener(indexCreator); } } @@ -351,119 +464,82 @@ private void prepareIndexCreator(ApplicationContext context) { * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeAsStream(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override - public CloseableIterator stream(final Query query, final Class entityType) { - - return stream(query, entityType, determineCollectionName(entityType)); + public Stream stream(Query query, Class entityType) { + return stream(query, entityType, getCollectionName(entityType)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#stream(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - public CloseableIterator stream(final Query query, final Class entityType, final String collectionName) { + public Stream stream(Query query, Class entityType, String collectionName) { return doStream(query, entityType, collectionName, entityType); } - protected CloseableIterator doStream(final Query query, final Class entityType, final String collectionName, - Class returnType) { + @SuppressWarnings("ConstantConditions") + protected Stream doStream(Query query, Class entityType, String collectionName, Class returnType) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityType, "Entity type must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityType, "Entity type must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(returnType, "ReturnType must not be null"); - return execute(collectionName, new CollectionCallback>() { + return execute(collectionName, (CollectionCallback>) collection -> { - @Override - public CloseableIterator doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); - MongoPersistentEntity persistentEntity = mappingContext.getRequiredPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(returnType, entityType); - Document mappedFields = getMappedFieldsObject(query.getFieldsObject(), persistentEntity, returnType); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity); + Document mappedQuery = queryContext.getMappedQuery(persistentEntity); + Document mappedFields = queryContext.getMappedFields(persistentEntity, projection); - FindIterable cursor = new QueryCursorPreparer(query, entityType) - .prepare(collection.find(mappedQuery).projection(mappedFields)); + CollectionPreparerDelegate readPreference = createDelegate(query); + FindIterable cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection, + col -> readPreference.prepare(col).find(mappedQuery, Document.class).projection(mappedFields)); - return new CloseableIterableCursorAdapter(cursor, exceptionTranslator, - new ProjectingReadCallback<>(mongoConverter, entityType, returnType, collectionName)); - } + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName)).stream(); }); } @Override public String getCollectionName(Class entityClass) { - return this.determineCollectionName(entityClass); + return this.operations.determineCollectionName(entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(java.lang.String) - */ @Override - public Document executeCommand(final String jsonCommand) { + @SuppressWarnings("ConstantConditions") + public Document executeCommand(String jsonCommand) { - Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty!"); + Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty"); - return execute(new DbCallback() { - public Document doInDB(MongoDatabase db) throws MongoException, DataAccessException { - return db.runCommand(Document.parse(jsonCommand), Document.class); - } - }); + return execute(db -> db.runCommand(Document.parse(jsonCommand), Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(org.bson.Document) - */ @Override - public Document executeCommand(final Document command) { - - Assert.notNull(command, "Command must not be null!"); + @SuppressWarnings("ConstantConditions") + public Document executeCommand(Document command) { - Document result = execute(new DbCallback() { - public Document doInDB(MongoDatabase db) throws MongoException, DataAccessException { - return db.runCommand(command, Document.class); - } - }); + Assert.notNull(command, "Command must not be null"); - return result; + return execute(db -> db.runCommand(command, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) - */ @Override + @SuppressWarnings("ConstantConditions") public Document executeCommand(Document command, @Nullable ReadPreference readPreference) { - Assert.notNull(command, "Command must not be null!"); - - Document result = execute(new DbCallback() { - public Document doInDB(MongoDatabase db) throws MongoException, DataAccessException { - return readPreference != null ? db.runCommand(command, readPreference, Document.class) - : db.runCommand(command, Document.class); - } - }); + Assert.notNull(command, "Command must not be null"); - return result; + return execute(db -> readPreference != null // + ? db.runCommand(command, readPreference, Document.class) // + : db.runCommand(command, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeQuery(org.springframework.data.mongodb.core.query.Query, java.lang.String, org.springframework.data.mongodb.core.DocumentCallbackHandler) - */ @Override public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) { executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null)); @@ -477,238 +553,265 @@ public void executeQuery(Query query, String collectionName, DocumentCallbackHan * specification, must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from * @param documentCallbackHandler the handler that will extract results, one document at a time - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set, + * (apply limits, skips and so on). */ protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler documentCallbackHandler, @Nullable CursorPreparer preparer) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null"); Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), Optional.empty()); Document sortObject = query.getSortObject(); Document fieldsObject = query.getFieldsObject(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing query: {} sort: {} fields: {} in collection: {}", serializeToJsonSafely(queryObject), - sortObject, fieldsObject, collectionName); + LOGGER.debug(String.format("Executing query: %s fields: %s sort: %s in collection: %s", + serializeToJsonSafely(queryObject), fieldsObject, serializeToJsonSafely(sortObject), collectionName)); } - this.executeQueryInternal(new FindCallback(queryObject, fieldsObject), preparer, documentCallbackHandler, - collectionName); + this.executeQueryInternal(new FindCallback(createDelegate(query), queryObject, fieldsObject, null), + preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, documentCallbackHandler, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(org.springframework.data.mongodb.core.DbCallback) - */ + @Override public T execute(DbCallback action) { - Assert.notNull(action, "DbCallbackmust not be null!"); + Assert.notNull(action, "DbCallback must not be null"); try { - MongoDatabase db = this.getDb(); + MongoDatabase db = prepareDatabase(this.doGetDatabase()); return action.doInDB(db); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.DbCallback) - */ + @Override public T execute(Class entityClass, CollectionCallback callback) { - Assert.notNull(entityClass, "EntityClass must not be null!"); - return execute(determineCollectionName(entityClass), callback); + Assert.notNull(entityClass, "EntityClass must not be null"); + return execute(getCollectionName(entityClass), callback); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.DbCallback) - */ + @Override public T execute(String collectionName, CollectionCallback callback) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(callback, "CollectionCallback must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(callback, "CollectionCallback must not be null"); try { - MongoCollection collection = getAndPrepareCollection(getDb(), collectionName); + MongoCollection collection = getAndPrepareCollection(doGetDatabase(), collectionName); return callback.doInCollection(collection); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class) + @Override + public SessionScoped withSession(ClientSessionOptions options) { + + Assert.notNull(options, "ClientSessionOptions must not be null"); + + return withSession(() -> mongoDbFactory.getSession(options)); + } + + @Override + public MongoTemplate withSession(ClientSession session) { + + Assert.notNull(session, "ClientSession must not be null"); + + return new SessionBoundMongoTemplate(session, MongoTemplate.this); + } + + /** + * Define if {@link MongoTemplate} should participate in transactions. Default is set to + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION}.
+ * NOTE: MongoDB transactions require at least MongoDB 4.0. + * + * @since 2.1 */ + public void setSessionSynchronization(SessionSynchronization sessionSynchronization) { + this.sessionSynchronization = sessionSynchronization; + } + + @Override public MongoCollection createCollection(Class entityClass) { - return createCollection(determineCollectionName(entityClass)); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public MongoCollection createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions) { - Assert.notNull(entityClass, "EntityClass must not be null!"); - return doCreateCollection(determineCollectionName(entityClass), convertToDocument(collectionOptions, entityClass)); + Assert.notNull(entityClass, "EntityClass must not be null"); + + return doCreateCollection(getCollectionName(entityClass), + operations.convertToCreateCollectionOptions(collectionOptions, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String) - */ - public MongoCollection createCollection(final String collectionName) { + @Override + public MongoCollection createCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); return doCreateCollection(collectionName, new Document()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) - */ - public MongoCollection createCollection(final String collectionName, - final @Nullable CollectionOptions collectionOptions) { + @Override + public MongoCollection createCollection(String collectionName, + @Nullable CollectionOptions collectionOptions) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - return doCreateCollection(collectionName, convertToDocument(collectionOptions)); + Assert.notNull(collectionName, "CollectionName must not be null"); + return doCreateCollection(collectionName, + operations.convertToCreateCollectionOptions(collectionOptions, Object.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#getCollection(java.lang.String) - */ - public MongoCollection getCollection(final String collectionName) { + @Override + public MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); + } - return execute(new DbCallback>() { - public MongoCollection doInDB(MongoDatabase db) throws MongoException, DataAccessException { - return db.getCollection(collectionName, Document.class); - } + @Override + public MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private MongoCollection createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected MongoCollection doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + db.createView(name, source, pipeline, viewOptions); + return db.getCollection(name); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.Class) - */ + @Override + @SuppressWarnings("ConstantConditions") + public MongoCollection getCollection(String collectionName) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + + return execute(db -> db.getCollection(collectionName, Document.class)); + } + + @Override public boolean collectionExists(Class entityClass) { - return collectionExists(determineCollectionName(entityClass)); + return collectionExists(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.String) - */ - public boolean collectionExists(final String collectionName) { + @Override + @SuppressWarnings("ConstantConditions") + public boolean collectionExists(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); - return execute(new DbCallback() { - public Boolean doInDB(MongoDatabase db) throws MongoException, DataAccessException { - for (String name : db.listCollectionNames()) { - if (name.equals(collectionName)) { - return true; - } + return execute(db -> { + + for (String name : MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db).listCollectionNames()) { + if (name.equals(collectionName)) { + return true; } - return false; } + return false; }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.Class) - */ + @Override public void dropCollection(Class entityClass) { - dropCollection(determineCollectionName(entityClass)); + dropCollection(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.String) - */ + @Override public void dropCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); - execute(collectionName, new CollectionCallback() { - public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - collection.drop(); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Dropped collection [{}]", collection.getNamespace().getCollectionName()); - } - return null; + execute(collectionName, (CollectionCallback) collection -> { + collection.drop(); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropped collection [%s]", + collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName)); } + return null; }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.String) - */ + @Override public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(getMongoDbFactory(), collectionName, queryMapper); + return indexOps(collectionName, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class) - */ + @Override + public IndexOperations indexOps(String collectionName, @Nullable Class type) { + return new DefaultIndexOperations(this, collectionName, type); + } + + @Override public IndexOperations indexOps(Class entityClass) { - return new DefaultIndexOperations(getMongoDbFactory(), determineCollectionName(entityClass), queryMapper, - entityClass); + return indexOps(getCollectionName(entityClass), entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.String) - */ - public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) { - return bulkOps(bulkMode, null, collectionName); + @Override + public SearchIndexOperations searchIndexOps(String collectionName) { + return searchIndexOps(null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class) - */ + @Override + public SearchIndexOperations searchIndexOps(Class type) { + return new DefaultSearchIndexOperations(this, type); + } + + @Override + public SearchIndexOperations searchIndexOps(@Nullable Class type, String collectionName) { + return new DefaultSearchIndexOperations(this, collectionName, type); + } + + @Override + public BulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); + } + + @Override public BulkOperations bulkOps(BulkMode bulkMode, Class entityClass) { - return bulkOps(bulkMode, entityClass, determineCollectionName(entityClass)); + return bulkOps(bulkMode, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class, java.lang.String) - */ + @Override public BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { - Assert.notNull(mode, "BulkMode must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - DefaultBulkOperations operations = new DefaultBulkOperations(this, collectionName, new BulkOperationContext(mode, - Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper)); + DefaultBulkOperations operations = new DefaultBulkOperations(this, collectionName, + new BulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper, + eventPublisher, entityCallbacks)); - operations.setExceptionTranslator(exceptionTranslator); operations.setDefaultWriteConcern(writeConcern); return operations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#scriptOps() - */ @Override public ScriptOperations scriptOps() { return new DefaultScriptOperations(this); @@ -719,19 +822,21 @@ public ScriptOperations scriptOps() { @Nullable @Override public T findOne(Query query, Class entityClass) { - return findOne(query, entityClass, determineCollectionName(entityClass)); + return findOne(query, entityClass, getCollectionName(entityClass)); } @Nullable @Override public T findOne(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - if (ObjectUtils.isEmpty(query.getSortObject()) && !query.getCollation().isPresent()) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass); + if (ObjectUtils.isEmpty(query.getSortObject())) { + + return doFindOne(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + new QueryCursorPreparer(query, entityClass), entityClass); } else { query.limit(1); List results = find(query, entityClass, collectionName); @@ -741,7 +846,7 @@ public T findOne(Query query, Class entityClass, String collectionName) { @Override public boolean exists(Query query, Class entityClass) { - return exists(query, entityClass, determineCollectionName(entityClass)); + return exists(query, entityClass, getCollectionName(entityClass)); } @Override @@ -750,109 +855,138 @@ public boolean exists(Query query, String collectionName) { } @Override + @SuppressWarnings("ConstantConditions") public boolean exists(Query query, @Nullable Class entityClass, String collectionName) { if (query == null) { throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); } - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass)); + QueryContext queryContext = queryOperations.createQueryContext(query); + Document mappedQuery = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); return execute(collectionName, - new ExistsCallback(mappedQuery, query.getCollation().map(Collation::toMongoCollation).orElse(null))); + new ExistsCallback(createDelegate(query), mappedQuery, queryContext.getCollation(entityClass).orElse(null))); } // Find methods that take a Query to express the query and that return a List of objects. - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List find(Query query, Class entityClass) { - return find(query, entityClass, determineCollectionName(entityClass)); + return find(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List find(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + return doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), entityClass, new QueryCursorPreparer(query, entityClass)); } - @Nullable @Override - public T findById(Object id, Class entityClass) { - return findById(id, entityClass, determineCollectionName(entityClass)); + public Window scroll(Query query, Class entityType) { + + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); } - @Nullable @Override - public T findById(Object id, Class entityClass, String collectionName) { + public Window scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, collectionName); + } - Assert.notNull(id, "Id must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Window doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityClass); - String idKey = ID_FIELD; + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); - if (persistentEntity != null) { - if (persistentEntity.getIdProperty() != null) { - idKey = persistentEntity.getIdProperty().getName(); - } + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; + + if (query.hasKeyset()) { + + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); + + List result = doFind(collectionName, createDelegate(query), keysetPaginationQuery.query(), + keysetPaginationQuery.fields(), sourceClass, + new QueryCursorPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback); + + return ScrollUtils.createWindow(query, result, sourceClass, operations); } - return doFindOne(collectionName, new Document(idKey, id), new Document(), entityClass); + List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + sourceClass, new QueryCursorPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), + callback); + + return ScrollUtils.createWindow(result, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class) - */ + @Nullable @Override - public List findDistinct(Query query, String field, Class entityClass, Class resultClass) { - return findDistinct(query, field, determineCollectionName(entityClass), entityClass, resultClass); + public T findById(Object id, Class entityClass) { + return findById(id, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.String, java.lang.Class, java.lang.Class) - */ + @Nullable @Override - @SuppressWarnings("unchecked") - public List findDistinct(Query query, String field, String collectionName, Class entityClass, - Class resultClass) { + public T findById(Object id, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(resultClass, "ResultClass must not be null!"); + Assert.notNull(id, "Id must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - MongoPersistentEntity entity = entityClass != Object.class ? getPersistentEntity(entityClass) : null; + String idKey = operations.getIdPropertyName(entityClass); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity); - String mappedFieldName = queryMapper.getMappedFields(new Document(field, 1), entity).keySet().iterator().next(); + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), new Document(), + entityClass); + } + + @Override + public List findDistinct(Query query, String field, Class entityClass, Class resultClass) { + return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); + } + + @Override + @SuppressWarnings("unchecked") + public List findDistinct(Query query, String field, String collectionName, Class entityClass, + Class resultClass) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); + + MongoPersistentEntity entity = entityClass != Object.class ? getPersistentEntity(entityClass) : null; + DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); + + Document mappedQuery = distinctQueryContext.getMappedQuery(entity); + String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); + Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); - Class mongoDriverCompatibleType = getMongoDbFactory().getCodecFor(resultClass).map(Codec::getEncoderClass) - .orElse((Class) BsonValue.class); + MongoIterable result = execute(collectionName, (collection) -> { - MongoIterable result = execute((db) -> { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); + } - DistinctIterable iterable = db.getCollection(collectionName).distinct(mappedFieldName, mappedQuery, - mongoDriverCompatibleType); + collection = createDelegate(query).prepare(collection); - return query.getCollation().map(Collation::toMongoCollation).map(iterable::collation).orElse(iterable); + DistinctIterable iterable = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); + distinctQueryContext.applyCollation(entityClass, iterable::collation); + + return iterable; }); if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { @@ -861,7 +995,7 @@ public List findDistinct(Query query, String field, String collectionName DefaultDbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); result = result.map((source) -> converter.mapValueToTargetType(source, - getMostSpecificConversionTargetType(resultClass, entityClass, field), dbRefResolver)); + distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass), dbRefResolver)); } try { @@ -871,39 +1005,12 @@ public List findDistinct(Query query, String field, String collectionName } } - /** - * @param userType must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param field must not be {@literal null}. - * @return the most specific conversion target type depending on user preference and domain type property. - * @since 2.1 - */ - private static Class getMostSpecificConversionTargetType(Class userType, Class domainType, String field) { - - Class conversionTargetType = userType; - try { - - Class propertyType = PropertyPath.from(field, domainType).getLeafProperty().getLeafType(); - - // use the more specific type but favor UserType over property one - if (ClassUtils.isAssignable(userType, propertyType)) { - conversionTargetType = propertyType; - } - - } catch (PropertyReferenceException e) { - // just don't care about it as we default to Object.class anyway. - } - - return conversionTargetType; - } - @Override public GeoResults geoNear(NearQuery near, Class entityClass) { - return geoNear(near, entityClass, determineCollectionName(entityClass)); + return geoNear(near, entityClass, getCollectionName(entityClass)); } @Override - @SuppressWarnings("unchecked") public GeoResults geoNear(NearQuery near, Class domainType, String collectionName) { return geoNear(near, domainType, collectionName, domainType); } @@ -911,138 +1018,170 @@ public GeoResults geoNear(NearQuery near, Class domainType, String col public GeoResults geoNear(NearQuery near, Class domainType, String collectionName, Class returnType) { if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); } if (domainType == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); } - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(returnType, "ReturnType must not be null"); - String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(domainType); - Document nearDocument = near.toDocument(); + String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(domainType); + String distanceField = operations.nearQueryDistanceFieldName(domainType); - Document command = new Document("geoNear", collection); - command.putAll(nearDocument); + Builder optionsBuilder = AggregationOptions.builder().collation(near.getCollation()); - if (nearDocument.containsKey("query")) { - Document query = (Document) nearDocument.get("query"); - command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(domainType))); + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); } - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command), - domainType, collectionName); + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); } - Document commandResult = executeCommand(command, this.readPreference); - List results = (List) commandResult.get("results"); - results = results == null ? Collections.emptyList() : results; + Aggregation $geoNear = TypedAggregation.newAggregation(domainType, Aggregation.geoNear(near, distanceField)) + .withOptions(optionsBuilder.build()); - DocumentCallback> callback = new GeoNearResultDocumentCallback( - new ProjectingReadCallback<>(mongoConverter, domainType, returnType, collectionName), near.getMetric()); - List> result = new ArrayList>(results.size()); + AggregationResults results = aggregate($geoNear, collection, Document.class); + EntityProjection projection = operations.introspectProjection(returnType, domainType); - int index = 0; - long elementsToSkip = near.getSkip() != null ? near.getSkip() : 0; + DocumentCallback> callback = new GeoNearResultDocumentCallback<>(distanceField, + new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); - for (Object element : results) { + List> result = new ArrayList<>(results.getMappedResults().size()); - /* - * As MongoDB currently (2.4.4) doesn't support the skipping of elements in near queries - * we skip the elements ourselves to avoid at least the document 2 object mapping overhead. - * - * @see MongoDB Jira: SERVER-3925 - */ - if (index >= elementsToSkip) { - result.add(callback.doWith((Document) element)); - } - index++; - } + BigDecimal aggregate = BigDecimal.ZERO; + for (Document element : results) { - if (elementsToSkip > 0) { - // as we skipped some elements we have to calculate the averageDistance ourselves: - return new GeoResults(result, near.getMetric()); + GeoResult geoResult = callback.doWith(element); + aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue())); + result.add(geoResult); } - GeoCommandStatistics stats = GeoCommandStatistics.from(commandResult); - return new GeoResults(result, new Distance(stats.getAverageDistance(), near.getMetric())); + Distance avgDistance = new Distance( + result.size() == 0 ? 0 : aggregate.divide(new BigDecimal(result.size()), RoundingMode.HALF_UP).doubleValue(), + near.getMetric()); + + return new GeoResults<>(result, avgDistance); } @Nullable @Override - public T findAndModify(Query query, Update update, Class entityClass) { - return findAndModify(query, update, new FindAndModifyOptions(), entityClass, determineCollectionName(entityClass)); + public T findAndModify(Query query, UpdateDefinition update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); } @Nullable @Override - public T findAndModify(Query query, Update update, Class entityClass, String collectionName) { + public T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); } @Nullable @Override - public T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass) { - return findAndModify(query, update, options, entityClass, determineCollectionName(entityClass)); + public T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass) { + return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); } @Nullable @Override - public T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + public T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); - Assert.notNull(options, "Options must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + Assert.notNull(options, "Options must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and FindAndModifyOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); }); - query.getCollation().ifPresent(optionsToUse::collation); + if (!options.getCollation().isPresent()) { + operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); + } - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), + return doFindAndModify(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); } + @Override + public T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + CollectionPreparerDelegate collectionPreparer = createDelegate(query); + Document mappedQuery = queryContext.getMappedQuery(entity); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedSort = queryContext.getMappedSort(entity); + + replacement = maybeCallBeforeConvert(replacement, collectionName); + Document mappedReplacement = operations.forEntity(replacement).toMappedDocument(this.mongoConverter).getDocument(); + + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + T saved = doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, + queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, projection); + + if (saved != null) { + maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName)); + return maybeCallAfterSave(saved, mappedReplacement, collectionName); + } + + return saved; + } + // Find methods that take a Query to express the query and that return a single object that is also removed from the // collection in the database. @Nullable @Override public T findAndRemove(Query query, Class entityClass) { - return findAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAndRemove(query, entityClass, getCollectionName(entityClass)); } @Nullable @Override public T findAndRemove(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), query.getCollation().orElse(null), entityClass); + return doFindAndRemove(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null), + entityClass); } @Override public long count(Query query, Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); - return count(query, entityClass, determineCollectionName(entityClass)); + Assert.notNull(entityClass, "Entity class must not be null"); + return count(query, entityClass, getCollectionName(entityClass)); } @Override - public long count(final Query query, String collectionName) { + public long count(Query query, String collectionName) { return count(query, null, collectionName); } @@ -1050,63 +1189,125 @@ public long count(final Query query, String collectionName) { * (non-Javadoc) * @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) */ + @Override public long count(Query query, @Nullable Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + CountContext countContext = queryOperations.countQueryContext(query); - Document document = queryMapper.getMappedObject(query.getQueryObject(), - Optional.ofNullable(entityClass).map(it -> mappingContext.getPersistentEntity(entityClass))); + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); - return execute(collectionName, collection -> collection.count(document)); + CollectionPreparerDelegate readPreference = createDelegate(query); + return doCount(readPreference, collectionName, mappedQuery, options); + } + + protected long doCount(CollectionPreparer collectionPreparer, String collectionName, Document filter, + CountOptions options) { + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return countExecution.countDocuments(collectionPreparer, collectionName, filter, options); } /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object) + * @see org.springframework.data.mongodb.core.MongoOperations#estimatedCount(java.lang.String) */ @Override - public void insert(Object objectToSave) { + public long estimatedCount(String collectionName) { + return doEstimatedCount(CollectionPreparerDelegate.of(this), collectionName, new EstimatedDocumentCountOptions()); + } - Assert.notNull(objectToSave, "ObjectToSave must not be null!"); + protected long doEstimatedCount(CollectionPreparer> collectionPreparer, + String collectionName, EstimatedDocumentCountOptions options) { + return execute(collectionName, + collection -> collectionPreparer.prepare(collection).estimatedDocumentCount(options)); + } + + @Override + public long exactCount(Query query, @Nullable Class entityClass, String collectionName) { - ensureNotIterable(objectToSave); - insert(objectToSave, determineEntityCollectionName(objectToSave)); + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(createDelegate(query), collectionName, mappedQuery, options); + } + + protected long doExactCount(CollectionPreparer> collectionPreparer, String collectionName, + Document filter, CountOptions options) { + return execute(collectionName, collection -> collectionPreparer.prepare(collection) + .countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + + return + // only empty filter for estimatedCount + filter.isEmpty() && + // no skip, no limit,... + isEmptyOptions(options) && + // transaction active? + !MongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object, java.lang.String) - */ @Override - public void insert(Object objectToSave, String collectionName) { + public T insert(T objectToSave) { - Assert.notNull(objectToSave, "ObjectToSave must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(objectToSave, "ObjectToSave must not be null"); - ensureNotIterable(objectToSave); - doInsert(collectionName, objectToSave, this.mongoConverter); + ensureNotCollectionLike(objectToSave); + return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - protected void ensureNotIterable(@Nullable Object o) { - if (null != o) { - if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) { - throw new IllegalArgumentException("Cannot use a collection here."); - } + @Override + @SuppressWarnings("unchecked") + public T insert(T objectToSave, String collectionName) { + + Assert.notNull(objectToSave, "ObjectToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + ensureNotCollectionLike(objectToSave); + return (T) doInsert(collectionName, objectToSave, this.mongoConverter); + } + + /** + * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or + * {@link Iterator}. + * + * @param source can be {@literal null}. + * @since 3.2. + */ + protected void ensureNotCollectionLike(@Nullable Object source) { + + if (EntityOperations.isCollectionLike(source)) { + throw new IllegalArgumentException("Cannot use a collection here"); } } /** * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like - * slaveOk() etc. Can be overridden in sub-classes. + * withCodecRegistry() etc. Can be overridden in sub-classes. * * @param collection */ protected MongoCollection prepareCollection(MongoCollection collection) { - if (this.readPreference != null) { + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { return collection.withReadPreference(readPreference); } + return collection; } @@ -1116,7 +1317,7 @@ protected MongoCollection prepareCollection(MongoCollection * In case of using MongoDB Java driver version 3 the returned {@link WriteConcern} will be defaulted to * {@link WriteConcern#ACKNOWLEDGED} when {@link WriteResultChecking} is set to {@link WriteResultChecking#EXCEPTION}. * - * @param writeConcern any WriteConcern already configured or null + * @param mongoAction any MongoAction already configured or null * @return The prepared WriteConcern or null */ @Nullable @@ -1129,102 +1330,68 @@ protected WriteConcern prepareWriteConcern(MongoAction mongoAction) { @Nullable private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) { - if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking) - && MongoClientVersion.isMongo3Driver()) { + if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject() instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } return wc; } - protected void doInsert(String collectionName, T objectToSave, MongoWriter writer) { + protected T doInsert(String collectionName, T objectToSave, MongoWriter writer) { - initializeVersionProperty(objectToSave); - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); - assertUpdateableIdIfNotSet(objectToSave); + BeforeConvertEvent event = new BeforeConvertEvent<>(objectToSave, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + toConvert = maybeCallBeforeConvert(toConvert, collectionName); - Document dbDoc = toDocument(objectToSave, writer); + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); - Object id = insertDocument(collectionName, dbDoc, objectToSave.getClass()); + T initialized = entity.initializeVersionProperty(); + Document dbDoc = entity.toMappedDocument(writer).getDocument(); - populateIdIfNecessary(objectToSave, id); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); - } + maybeEmitEvent(new BeforeSaveEvent<>(initialized, dbDoc, collectionName)); + initialized = maybeCallBeforeSave(initialized, dbDoc, collectionName); + Object id = insertDocument(collectionName, dbDoc, initialized.getClass()); - /** - * @param objectToSave - * @param writer - * @return - */ - private Document toDocument(T objectToSave, MongoWriter writer) { - - if (objectToSave instanceof Document) { - return (Document) objectToSave; - } - - if (!(objectToSave instanceof String)) { - Document dbDoc = new Document(); - writer.write(objectToSave, dbDoc); - - if (dbDoc.containsKey(ID_FIELD) && dbDoc.get(ID_FIELD) == null) { - dbDoc.remove(ID_FIELD); - } - return dbDoc; - } else { - try { - return Document.parse((String) objectToSave); - } catch (JSONParseException e) { - throw new MappingException("Could not parse given String to save into a JSON document!", e); - } catch (org.bson.json.JsonParseException e) { - throw new MappingException("Could not parse given String to save into a JSON document!", e); - } - } - } - - private void initializeVersionProperty(Object entity) { - - MongoPersistentEntity persistentEntity = getPersistentEntity(entity.getClass()); - - if (persistentEntity != null && persistentEntity.hasVersionProperty()) { - - MongoPersistentProperty versionProperty = persistentEntity.getRequiredVersionProperty(); - - ConvertingPropertyAccessor accessor = new ConvertingPropertyAccessor(persistentEntity.getPropertyAccessor(entity), - mongoConverter.getConversionService()); - accessor.setProperty(versionProperty, 0); - } + T saved = populateIdIfNecessary(initialized, id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); + return maybeCallAfterSave(saved, dbDoc, collectionName); } @Override - public void insert(Collection batchToSave, Class entityClass) { + @SuppressWarnings("unchecked") + public Collection insert(Collection batchToSave, Class entityClass) { - Assert.notNull(batchToSave, "BatchToSave must not be null!"); + Assert.notNull(batchToSave, "BatchToSave must not be null"); - doInsertBatch(determineCollectionName(entityClass), batchToSave, this.mongoConverter); + return (Collection) doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); } @Override - public void insert(Collection batchToSave, String collectionName) { + @SuppressWarnings("unchecked") + public Collection insert(Collection batchToSave, String collectionName) { - Assert.notNull(batchToSave, "BatchToSave must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(batchToSave, "BatchToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - doInsertBatch(collectionName, batchToSave, this.mongoConverter); + return (Collection) doInsertBatch(collectionName, batchToSave, this.mongoConverter); } @Override - public void insertAll(Collection objectsToSave) { + @SuppressWarnings("unchecked") + public Collection insertAll(Collection objectsToSave) { - Assert.notNull(objectsToSave, "ObjectsToSave must not be null!"); - doInsertAll(objectsToSave, this.mongoConverter); + Assert.notNull(objectsToSave, "ObjectsToSave must not be null"); + return (Collection) doInsertAll(objectsToSave, this.mongoConverter); } - protected void doInsertAll(Collection listToSave, MongoWriter writer) { + @SuppressWarnings("unchecked") + protected Collection doInsertAll(Collection listToSave, MongoWriter writer) { - Map> elementsByCollection = new HashMap>(); + Map> elementsByCollection = new HashMap<>(); + List savedObjects = new ArrayList<>(listToSave.size()); for (T element : listToSave) { @@ -1232,164 +1399,185 @@ protected void doInsertAll(Collection listToSave, MongoWriter entity = mappingContext.getRequiredPersistentEntity(element.getClass()); - - String collection = entity.getCollection(); - List collectionElements = elementsByCollection.get(collection); - - if (null == collectionElements) { - collectionElements = new ArrayList(); - elementsByCollection.put(collection, collectionElements); - } + String collection = getCollectionName(ClassUtils.getUserClass(element)); + List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); collectionElements.add(element); } for (Map.Entry> entry : elementsByCollection.entrySet()) { - doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter); + savedObjects.addAll((Collection) doInsertBatch(entry.getKey(), entry.getValue(), this.mongoConverter)); } + + return savedObjects; } - protected void doInsertBatch(String collectionName, Collection batchToSave, MongoWriter writer) { + protected Collection doInsertBatch(String collectionName, Collection batchToSave, + MongoWriter writer) { - Assert.notNull(writer, "MongoWriter must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); - List documentList = new ArrayList(); - for (T o : batchToSave) { + List documentList = new ArrayList<>(batchToSave.size()); + List initializedBatchToSave = new ArrayList<>(batchToSave.size()); + for (T uninitialized : batchToSave) { - initializeVersionProperty(o); - maybeEmitEvent(new BeforeConvertEvent(o, collectionName)); + BeforeConvertEvent event = new BeforeConvertEvent<>(uninitialized, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + toConvert = maybeCallBeforeConvert(toConvert, collectionName); - Document document = toDocument(o, writer); + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - maybeEmitEvent(new BeforeSaveEvent(o, document, collectionName)); - documentList.add(document); + T initialized = entity.initializeVersionProperty(); + Document document = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, document, collectionName)); + initialized = maybeCallBeforeSave(initialized, document, collectionName); + + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(uninitialized.getClass()); + + documentList.add(mappedDocument.getDocument()); + initializedBatchToSave.add(initialized); } List ids = insertDocumentList(collectionName, documentList); + List savedObjects = new ArrayList<>(documentList.size()); int i = 0; - for (T obj : batchToSave) { + for (T obj : initializedBatchToSave) { + if (i < ids.size()) { - populateIdIfNecessary(obj, ids.get(i)); - maybeEmitEvent(new AfterSaveEvent(obj, documentList.get(i), collectionName)); + T saved = populateIdIfNecessary(obj, ids.get(i)); + Document doc = documentList.get(i); + maybeEmitEvent(new AfterSaveEvent<>(saved, doc, collectionName)); + savedObjects.add(maybeCallAfterSave(saved, doc, collectionName)); + } else { + savedObjects.add(obj); } i++; } + + return savedObjects; } @Override - public void save(Object objectToSave) { + public T save(T objectToSave) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - save(objectToSave, determineEntityCollectionName(objectToSave)); + Assert.notNull(objectToSave, "Object to save must not be null"); + return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } @Override - public void save(Object objectToSave, String collectionName) { - - Assert.notNull(objectToSave, "Object to save must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + @SuppressWarnings("unchecked") + public T save(T objectToSave, String collectionName) { - MongoPersistentEntity entity = getPersistentEntity(objectToSave.getClass()); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + ensureNotCollectionLike(objectToSave); - if (entity != null && entity.hasVersionProperty()) { - doSaveVersioned(objectToSave, entity, collectionName); - return; - } + AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); - doSave(collectionName, objectToSave, this.mongoConverter); + return source.isVersionedEntity() // + ? doSaveVersioned(source, collectionName) // + : (T) doSave(collectionName, objectToSave, this.mongoConverter); } - private T doSaveVersioned(T objectToSave, MongoPersistentEntity entity, String collectionName) { - - ConvertingPropertyAccessor convertingAccessor = new ConvertingPropertyAccessor( - entity.getPropertyAccessor(objectToSave), mongoConverter.getConversionService()); + @SuppressWarnings("unchecked") + private T doSaveVersioned(AdaptibleEntity source, String collectionName) { - MongoPersistentProperty property = entity.getRequiredVersionProperty(); - Number number = convertingAccessor.getProperty(property, Number.class); + if (source.isNew()) { + return (T) doInsert(collectionName, source.getBean(), this.mongoConverter); + } - if (number != null) { + // Create query for entity with the id and old version + Query query = source.getQueryForVersion(); - // Bump version number - convertingAccessor.setProperty(property, number.longValue() + 1); + // Bump version number + T toSave = source.incrementVersion(); - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); - assertUpdateableIdIfNotSet(objectToSave); + toSave = maybeEmitEvent(new BeforeConvertEvent(toSave, collectionName)).getSource(); + toSave = maybeCallBeforeConvert(toSave, collectionName); - Document document = new Document(); + if (source.getBean() != toSave) { + source = operations.forEntity(toSave, mongoConverter.getConversionService()); + } - this.mongoConverter.write(objectToSave, document); + source.assertUpdateableIdIfNotSet(); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, document, collectionName)); - Update update = Update.fromDocument(document, ID_FIELD); + MappedDocument mapped = source.toMappedDocument(mongoConverter); - // Create query for entity with the id and old version - MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); - Object id = entity.getIdentifierAccessor(objectToSave).getRequiredIdentifier(); - Query query = new Query(Criteria.where(idProperty.getName()).is(id).and(property.getName()).is(number)); + maybeEmitEvent(new BeforeSaveEvent<>(toSave, mapped.getDocument(), collectionName)); + toSave = maybeCallBeforeSave(toSave, mapped.getDocument(), collectionName); + UpdateDefinition update = mapped.updateWithoutId(); - UpdateResult result = doUpdate(collectionName, query, update, objectToSave.getClass(), false, false); + UpdateResult result = doUpdate(collectionName, query, update, toSave.getClass(), false, false); - if (result.getModifiedCount() == 0) { - throw new OptimisticLockingFailureException( - String.format("Cannot save entity %s with version %s to collection %s. Has it been modified meanwhile?", id, - number, collectionName)); - } - maybeEmitEvent(new AfterSaveEvent(objectToSave, document, collectionName)); + if (result.getModifiedCount() == 0) { - return objectToSave; + throw new OptimisticLockingFailureException( + String.format("Cannot save entity %s with version %s to collection %s; Has it been modified meanwhile", + source.getId(), source.getVersion(), collectionName)); } + maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName)); - doInsert(collectionName, objectToSave, this.mongoConverter); - return objectToSave; + return maybeCallAfterSave(toSave, mapped.getDocument(), collectionName); } protected T doSave(String collectionName, T objectToSave, MongoWriter writer) { - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); - assertUpdateableIdIfNotSet(objectToSave); + objectToSave = maybeEmitEvent(new BeforeConvertEvent<>(objectToSave, collectionName)).getSource(); + objectToSave = maybeCallBeforeConvert(objectToSave, collectionName); - Document dbDoc = toDocument(objectToSave, writer); + AdaptibleEntity entity = operations.forEntity(objectToSave, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); + MappedDocument mapped = entity.toMappedDocument(writer); + Document dbDoc = mapped.getDocument(); + + maybeEmitEvent(new BeforeSaveEvent<>(objectToSave, dbDoc, collectionName)); + objectToSave = maybeCallBeforeSave(objectToSave, dbDoc, collectionName); Object id = saveDocument(collectionName, dbDoc, objectToSave.getClass()); - populateIdIfNecessary(objectToSave, id); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); + T saved = populateIdIfNecessary(objectToSave, id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); - return objectToSave; + return maybeCallAfterSave(saved, dbDoc, collectionName); } - protected Object insertDocument(final String collectionName, final Document document, final Class entityClass) { + @SuppressWarnings("ConstantConditions") + protected Object insertDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting Document containing fields: {} in collection: {}", document.keySet(), collectionName); + LOGGER.debug(String.format("Inserting Document containing fields: %s in collection: %s", document.keySet(), + collectionName)); } - return execute(collectionName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, - entityClass, document, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - if (writeConcernToUse == null) { - collection.insertOne(document); - } else { - collection.withWriteConcern(writeConcernToUse).insertOne(document); - } - return document.get(ID_FIELD); + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(entityClass); + + return execute(collectionName, collection -> { + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, + mappedDocument.getDocument(), null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + if (writeConcernToUse == null) { + collection.insertOne(mappedDocument.getDocument()); + } else { + collection.withWriteConcern(writeConcernToUse).insertOne(mappedDocument.getDocument()); } + + return operations.forEntity(mappedDocument.getDocument()).getId(); }); } - protected List insertDocumentList(final String collectionName, final List documents) { + protected List insertDocumentList(String collectionName, List documents) { if (documents.isEmpty()) { return Collections.emptyList(); } if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting list of Documents containing {} items", documents.size()); + LOGGER.debug(String.format("Inserting list of Documents containing %s items", documents.size())); } execute(collectionName, collection -> { @@ -1407,353 +1595,293 @@ protected List insertDocumentList(final String collectionName, final Lis return null; }); - return documents.stream()// - .map(it -> it.get(ID_FIELD))// - .collect(StreamUtils.toUnmodifiableList()); + return MappedDocument.toIds(documents); } - protected Object saveDocument(final String collectionName, final Document dbDoc, final Class entityClass) { + protected Object saveDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving Document containing fields: {}", dbDoc.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", dbDoc.keySet())); } - return execute(collectionName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, - dbDoc, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + return execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, + dbDoc, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + MappedDocument mapped = MappedDocument.of(dbDoc); + + MongoCollection collectionToUse = writeConcernToUse == null // + ? collection // + : collection.withWriteConcern(writeConcernToUse); + + if (!mapped.hasId()) { + + mapped = queryOperations.createInsertContext(mapped).prepareId(mappingContext.getPersistentEntity(entityClass)); + collectionToUse.insertOne(mapped.getDocument()); + } else { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); + Document replacement = updateContext.getMappedUpdate(entity); + Document filter = updateContext.getReplacementQuery(); + if (updateContext.requiresShardKey(filter, entity)) { - if (!dbDoc.containsKey(ID_FIELD)) { - if (writeConcernToUse == null) { - collection.insertOne(dbDoc); + if (entity.getShardKey().isImmutable()) { + filter = updateContext.applyShardKey(entity, filter, null); } else { - collection.withWriteConcern(writeConcernToUse).insertOne(dbDoc); + filter = updateContext.applyShardKey(entity, filter, + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()); } - } else if (writeConcernToUse == null) { - collection.replaceOne(Filters.eq(ID_FIELD, dbDoc.get(ID_FIELD)), dbDoc, new UpdateOptions().upsert(true)); - } else { - collection.withWriteConcern(writeConcernToUse).replaceOne(Filters.eq(ID_FIELD, dbDoc.get(ID_FIELD)), dbDoc, - new UpdateOptions().upsert(true)); } - return dbDoc.get(ID_FIELD); + + collectionToUse.replaceOne(filter, replacement, new com.mongodb.client.model.ReplaceOptions().upsert(true)); } + return mapped.getId(); }); } @Override - public UpdateResult upsert(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, true, false); + public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); } @Override - public UpdateResult upsert(Query query, Update update, String collectionName) { + public UpdateResult upsert(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, true, false); } @Override - public UpdateResult upsert(Query query, Update update, Class entityClass, String collectionName) { + public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, true, false); } @Override - public UpdateResult updateFirst(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false); + public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); } @Override - public UpdateResult updateFirst(final Query query, final Update update, final String collectionName) { + public UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, false); } @Override - public UpdateResult updateFirst(Query query, Update update, Class entityClass, String collectionName) { + public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, false, false); } @Override - public UpdateResult updateMulti(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true); + public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); } @Override - public UpdateResult updateMulti(final Query query, final Update update, String collectionName) { + public UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, true); } @Override - public UpdateResult updateMulti(final Query query, final Update update, Class entityClass, String collectionName) { + public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, false, true); } - protected UpdateResult doUpdate(final String collectionName, final Query query, final Update update, - @Nullable final Class entityClass, final boolean upsert, final boolean multi) { + @SuppressWarnings("ConstantConditions") + protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefinition update, + @Nullable Class entityClass, boolean upsert, boolean multi) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return execute(collectionName, new CollectionCallback() { - public UpdateResult doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); - MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); + UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) + : queryOperations.updateSingleContext(update, query, upsert); + updateContext.increaseVersionForUpdateIfNecessary(entity); - increaseVersionForUpdateIfNecessary(entity, update); + Document queryObj = updateContext.getMappedQuery(entity); + UpdateOptions opts = updateContext.getUpdateOptions(entityClass, query); - UpdateOptions opts = new UpdateOptions(); - opts.upsert(upsert); + if (updateContext.isAggregationUpdate()) { - Document queryObj = new Document(); + List pipeline = updateContext.getUpdatePipeline(entityClass); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + update.getUpdateObject(), queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - if (query != null) { + return execute(collectionName, collection -> { - queryObj.putAll(queryMapper.getMappedObject(query.getQueryObject(), entity)); - query.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); } - Document updateObj = update == null ? new Document() - : updateMapper.getMappedObject(update.getUpdateObject(), entity); + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; - if (multi && update.isIsolated() && !queryObj.containsKey("$isolated")) { - queryObj.put("$isolated", 1); - } + return multi ? collection.updateMany(queryObj, pipeline, opts) : collection.updateOne(queryObj, pipeline, opts); + }); + } - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", - serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName); - } + Document updateObj = updateContext.getMappedUpdate(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + updateObj, queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, - entityClass, updateObj, queryObj); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + return execute(collectionName, collection -> { - collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } - if (!UpdateMapper.isUpdateObject(updateObj)) { - return collection.replaceOne(queryObj, updateObj, opts); - } else { - if (multi) { - return collection.updateMany(queryObj, updateObj, opts); + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + + if (!UpdateMapper.isUpdateObject(updateObj)) { + + Document filter = new Document(queryObj); + + if (updateContext.requiresShardKey(filter, entity)) { + + if (entity.getShardKey().isImmutable()) { + filter = updateContext.applyShardKey(entity, filter, null); } else { - return collection.updateOne(queryObj, updateObj, opts); + filter = updateContext.applyShardKey(entity, filter, + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()); } } - } - }); - } - private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity, Update update) { - - if (persistentEntity != null && persistentEntity.hasVersionProperty()) { - String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); - if (!update.modifies(versionFieldName)) { - update.inc(versionFieldName, 1L); + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + return collection.replaceOne(filter, updateObj, replaceOptions); + } else { + return multi ? collection.updateMany(queryObj, updateObj, opts) + : collection.updateOne(queryObj, updateObj, opts); } - } + }); } @Override public DeleteResult remove(Object object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); - return remove(getIdQueryFor(object), object.getClass()); + return remove(object, getCollectionName(object.getClass())); } @Override public DeleteResult remove(Object object, String collectionName) { - Assert.notNull(object, "Object must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - return doRemove(collectionName, getIdQueryFor(object), object.getClass()); - } + Query query = operations.forEntity(object).getRemoveByQuery(); - /** - * Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s - * property value as its {@link Entry#getValue()}. - * - * @param object - * @return - */ - private Pair extractIdPropertyAndValue(Object object) { - - Assert.notNull(object, "Id cannot be extracted from 'null'."); - - Class objectType = object.getClass(); - - if (object instanceof Document) { - return Pair.of(ID_FIELD, ((Document) object).get(ID_FIELD)); - } - - MongoPersistentEntity entity = mappingContext.getPersistentEntity(objectType); - - if (entity != null && entity.hasIdProperty()) { - - MongoPersistentProperty idProperty = entity.getIdProperty(); - return Pair.of(idProperty.getFieldName(), entity.getPropertyAccessor(object).getProperty(idProperty)); - } - - throw new MappingException("No id property found for object of type " + objectType); - } - - /** - * Returns a {@link Query} for the given entity by its id. - * - * @param object must not be {@literal null}. - * @return - */ - private Query getIdQueryFor(Object object) { - - Pair id = extractIdPropertyAndValue(object); - return new Query(where(id.getFirst()).is(id.getSecond())); - } - - /** - * Returns a {@link Query} for the given entities by their ids. - * - * @param objects must not be {@literal null} or {@literal empty}. - * @return - */ - private Query getIdInQueryFor(Collection objects) { - - Assert.notEmpty(objects, "Cannot create Query for empty collection."); - - Iterator it = objects.iterator(); - Pair pair = extractIdPropertyAndValue(it.next()); - - ArrayList ids = new ArrayList(objects.size()); - ids.add(pair.getSecond()); - - while (it.hasNext()) { - ids.add(extractIdPropertyAndValue(it.next()).getSecond()); - } - - return new Query(where(pair.getFirst()).in(ids)); - } - - private void assertUpdateableIdIfNotSet(Object value) { - - MongoPersistentEntity entity = mappingContext.getPersistentEntity(value.getClass()); - - if (entity != null && entity.hasIdProperty()) { - - MongoPersistentProperty property = entity.getRequiredIdProperty(); - Object propertyValue = entity.getPropertyAccessor(value).getProperty(property); - - if (propertyValue != null) { - return; - } - - if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { - throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(), - value.getClass().getName())); - } - } + return doRemove(collectionName, query, object.getClass(), false); } @Override public DeleteResult remove(Query query, String collectionName) { - return doRemove(collectionName, query, null); + return doRemove(collectionName, query, null, true); } @Override public DeleteResult remove(Query query, Class entityClass) { - return remove(query, entityClass, determineCollectionName(entityClass)); + return remove(query, entityClass, getCollectionName(entityClass)); } @Override public DeleteResult remove(Query query, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); - return doRemove(collectionName, query, entityClass); + Assert.notNull(entityClass, "EntityClass must not be null"); + return doRemove(collectionName, query, entityClass, true); } - protected DeleteResult doRemove(final String collectionName, final Query query, - @Nullable final Class entityClass) { + @SuppressWarnings("ConstantConditions") + protected DeleteResult doRemove(String collectionName, Query query, @Nullable Class entityClass, + boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - final MongoPersistentEntity entity = getPersistentEntity(entityClass); - final Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), entity); + MongoPersistentEntity entity = getPersistentEntity(entityClass); - return execute(collectionName, new CollectionCallback() { + DeleteContext deleteContext = multi ? queryOperations.deleteQueryContext(query) + : queryOperations.deleteSingleContext(query); + Document queryObject = deleteContext.getMappedQuery(entity); + DeleteOptions options = deleteContext.getDeleteOptions(entityClass); - public DeleteResult doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, queryObject); - maybeEmitEvent(new BeforeDeleteEvent(queryObject, entityClass, collectionName)); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - Document removeQuery = queryObject; + return execute(collectionName, collection -> { - DeleteOptions options = new DeleteOptions(); - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + maybeEmitEvent(new BeforeDeleteEvent<>(queryObject, entityClass, collectionName)); - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, - entityClass, null, queryObject); + Document removeQuery = queryObject; - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); + } - DeleteResult dr = null; - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(removeQuery), collectionName }); - } + if (query.getLimit() > 0 || query.getSkip() > 0) { - if (query.getLimit() > 0 || query.getSkip() > 0) { + MongoCursor cursor = new QueryCursorPreparer(query, entityClass) + .prepare(collection.find(removeQuery).projection(MappedDocument.getIdOnlyProjection())) // + .iterator(); - MongoCursor cursor = new QueryCursorPreparer(query, entityClass) - .prepare(collection.find(removeQuery).projection(new Document(ID_FIELD, 1))).iterator(); + Set ids = new LinkedHashSet<>(); + while (cursor.hasNext()) { + ids.add(MappedDocument.of(cursor.next()).getId()); + } - Set ids = new LinkedHashSet<>(); - while (cursor.hasNext()) { - ids.add(cursor.next().get(ID_FIELD)); - } + removeQuery = MappedDocument.getIdIn(ids); + } - removeQuery = new Document(ID_FIELD, new Document("$in", ids)); - } + MongoCollection collectionToUse = writeConcernToUse != null + ? collection.withWriteConcern(writeConcernToUse) + : collection; - if (writeConcernToUse == null) { - dr = collection.deleteMany(removeQuery, options); - } else { - dr = collection.withWriteConcern(writeConcernToUse).deleteMany(removeQuery, options); - } + DeleteResult result = multi ? collectionToUse.deleteMany(removeQuery, options) + : collectionToUse.deleteOne(removeQuery, options); - maybeEmitEvent(new AfterDeleteEvent(queryObject, entityClass, collectionName)); + maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName)); - return dr; - } + return result; }); } @Override public List findAll(Class entityClass) { - return findAll(entityClass, determineCollectionName(entityClass)); + return findAll(entityClass, getCollectionName(entityClass)); } @Override public List findAll(Class entityClass, String collectionName) { - return executeFindMultiInternal(new FindCallback(new Document(), new Document()), null, - new ReadDocumentCallback(mongoConverter, entityClass, collectionName), collectionName); + return executeFindMultiInternal( + new FindCallback(CollectionPreparer.identity(), new Document(), new Document(), + operations.forType(entityClass).getCollation().map(Collation::toMongoCollation).orElse(null)), + CursorPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } @Override public MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, - new MapReduceOptions().outputTypeInline(), entityClass); + return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), + entityClass); } @Override @@ -1765,46 +1893,71 @@ public MapReduceResults mapReduce(String inputCollectionName, String mapF @Override public MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(), - entityClass); + return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), entityClass); } @Override public MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(inputCollectionName, "InputCollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(reduceFunction, "ReduceFunction must not be null!"); - Assert.notNull(mapFunction, "MapFunction must not be null!"); + return new MapReduceResults<>( + mapReduce(query, entityClass, inputCollectionName, mapFunction, reduceFunction, mapReduceOptions, entityClass), + new Document()); + } + + /** + * @param query + * @param domainType + * @param inputCollectionName + * @param mapFunction + * @param reduceFunction + * @param mapReduceOptions + * @param resultType + * @return + * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. + */ + @Deprecated + public List mapReduce(Query query, Class domainType, String inputCollectionName, String mapFunction, + String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class resultType) { + + Assert.notNull(domainType, "Domain type must not be null"); + Assert.notNull(inputCollectionName, "Input collection name must not be null"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); String mapFunc = replaceWithResourceIfNecessary(mapFunction); String reduceFunc = replaceWithResourceIfNecessary(reduceFunction); - MongoCollection inputCollection = getCollection(inputCollectionName); + CollectionPreparerDelegate readPreference = createDelegate(query); + MongoCollection inputCollection = readPreference + .prepare(getAndPrepareCollection(doGetDatabase(), inputCollectionName)); // MapReduceOp - MapReduceIterable result = inputCollection.mapReduce(mapFunc, reduceFunc); - if (query != null && result != null) { + MapReduceIterable mapReduce = inputCollection.mapReduce(mapFunc, reduceFunc, Document.class); - if (query.getLimit() > 0 && mapReduceOptions.getLimit() == null) { - result = result.limit(query.getLimit()); - } - if (query.getMeta() != null && query.getMeta().getMaxTimeMsec() != null) { - result = result.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); - } - result = result.sort(getMappedSortObject(query, entityClass)); + if (query.getLimit() > 0 && mapReduceOptions != null && mapReduceOptions.getLimit() == null) { + mapReduce = mapReduce.limit(query.getLimit()); + } + if (query.getMeta().hasMaxTime()) { + mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); + } - result = result.filter(queryMapper.getMappedObject(query.getQueryObject(), Optional.empty())); + Document mappedSort = getMappedSortObject(query, domainType); + if (mappedSort != null && !mappedSort.isEmpty()) { + mapReduce = mapReduce.sort(mappedSort); } + mapReduce = mapReduce + .filter(queryMapper.getMappedObject(query.getQueryObject(), mappingContext.getPersistentEntity(domainType))); + Optional collation = query.getCollation(); if (mapReduceOptions != null) { Optionals.ifAllPresent(collation, mapReduceOptions.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); }); if (mapReduceOptions.getCollation().isPresent()) { @@ -1812,198 +1965,163 @@ public MapReduceResults mapReduce(Query query, String inputCollectionName } if (!CollectionUtils.isEmpty(mapReduceOptions.getScopeVariables())) { - result = result.scope(new Document(mapReduceOptions.getScopeVariables())); + mapReduce = mapReduce.scope(new Document(mapReduceOptions.getScopeVariables())); } - if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit().intValue() > 0) { - result = result.limit(mapReduceOptions.getLimit()); + + if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit() > 0) { + mapReduce = mapReduce.limit(mapReduceOptions.getLimit()); } + if (mapReduceOptions.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) { - result = result.finalizeFunction(mapReduceOptions.getFinalizeFunction().get()); + mapReduce = mapReduce.finalizeFunction(mapReduceOptions.getFinalizeFunction().get()); } + if (mapReduceOptions.getJavaScriptMode() != null) { - result = result.jsMode(mapReduceOptions.getJavaScriptMode()); + mapReduce = mapReduce.jsMode(mapReduceOptions.getJavaScriptMode()); } + if (mapReduceOptions.getOutputSharded().isPresent()) { - result = result.sharded(mapReduceOptions.getOutputSharded().get()); + MongoCompatibilityAdapter.mapReduceIterableAdapter(mapReduce) + .sharded(mapReduceOptions.getOutputSharded().get()); } - } - - result = collation.map(Collation::toMongoCollation).map(result::collation).orElse(result); - - List mappedResults = new ArrayList(); - DocumentCallback callback = new ReadDocumentCallback(mongoConverter, entityClass, inputCollectionName); - - for (Document document : result) { - mappedResults.add(callback.doWith(document)); - } - - return new MapReduceResults(mappedResults, new Document()); - } - - public GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass) { - return group(null, inputCollectionName, groupBy, entityClass); - } - - public GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass) { - Document document = groupBy.getGroupByObject(); - document.put("ns", inputCollectionName); + if (StringUtils.hasText(mapReduceOptions.getOutputCollection()) && !mapReduceOptions.usesInlineOutput()) { - if (criteria == null) { - document.put("cond", null); - } else { - document.put("cond", queryMapper.getMappedObject(criteria.getCriteriaObject(), Optional.empty())); - } - // If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and - // convert to Document + mapReduce = mapReduce.collectionName(mapReduceOptions.getOutputCollection()) + .action(mapReduceOptions.getMapReduceAction()); - if (document.containsKey("initial")) { - Object initialObj = document.get("initial"); - if (initialObj instanceof String) { - String initialAsString = replaceWithResourceIfNecessary((String) initialObj); - document.put("initial", Document.parse(initialAsString)); + if (mapReduceOptions.getOutputDatabase().isPresent()) { + mapReduce = mapReduce.databaseName(mapReduceOptions.getOutputDatabase().get()); + } } } - if (document.containsKey("$reduce")) { - document.put("$reduce", replaceWithResourceIfNecessary(document.get("$reduce").toString())); - } - if (document.containsKey("$keyf")) { - document.put("$keyf", replaceWithResourceIfNecessary(document.get("$keyf").toString())); + if (!collation.isPresent()) { + collation = operations.forType(domainType).getCollation(); } - if (document.containsKey("finalize")) { - document.put("finalize", replaceWithResourceIfNecessary(document.get("finalize").toString())); - } - - Document commandObject = new Document("group", document); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing Group with Document [{}]", serializeToJsonSafely(commandObject)); - } + mapReduce = collation.map(Collation::toMongoCollation).map(mapReduce::collation).orElse(mapReduce); - Document commandResult = executeCommand(commandObject); + List mappedResults = new ArrayList<>(); + DocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Group command result = [{}]", commandResult); - } - - @SuppressWarnings("unchecked") - Iterable resultSet = (Iterable) commandResult.get("retval"); - List mappedResults = new ArrayList(); - DocumentCallback callback = new ReadDocumentCallback(mongoConverter, entityClass, inputCollectionName); - - for (Document resultDocument : resultSet) { - mappedResults.add(callback.doWith(resultDocument)); + for (Document document : mapReduce) { + mappedResults.add(callback.doWith(document)); } - return new GroupByResults(mappedResults, commandResult); + return mappedResults; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override public AggregationResults aggregate(TypedAggregation aggregation, Class outputType) { - return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType); + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override public AggregationResults aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregate(aggregation, inputCollectionName, outputType, context); + return aggregate(aggregation, inputCollectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override public AggregationResults aggregate(Aggregation aggregation, Class inputType, Class outputType) { - return aggregate(aggregation, determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(inputType), outputType, + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override public AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType) { return aggregate(aggregation, collectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(TypedAggregation aggregation, String inputCollectionName, + public Stream aggregateStream(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregateStream(aggregation, inputCollectionName, outputType, context); + return aggregateStream(aggregation, inputCollectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(TypedAggregation aggregation, Class outputType) { - return aggregateStream(aggregation, determineCollectionName(aggregation.getInputType()), outputType); + public Stream aggregateStream(TypedAggregation aggregation, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregateStream(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { + public Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { - return aggregateStream(aggregation, determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregateStream(aggregation, getCollectionName(inputType), outputType, + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { + public Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { return aggregateStream(aggregation, collectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override + @SuppressWarnings("unchecked") public List findAllAndRemove(Query query, String collectionName) { return (List) findAllAndRemove(query, Object.class, collectionName); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List findAllAndRemove(Query query, Class entityClass) { - return findAllAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } + @Override + public UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected UpdateResult replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use ReplaceOptions#none() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + replacement = maybeCallBeforeConvert(replacement, collectionName); + Document mappedReplacement = updateContext.getMappedUpdate(mappingContext.getPersistentEntity(entityType)); + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + replacement = maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedReplacement, updateContext.getQueryObject()); + + UpdateResult result = doReplace(options, entityType, collectionName, updateContext, + createCollectionPreparer(query, action), mappedReplacement); + + if (result.wasAcknowledged()) { + + maybeEmitEvent(new AfterSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallAfterSave(replacement, mappedReplacement, collectionName); + } + + return result; + } + /** * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is @@ -2019,7 +2137,13 @@ protected List doFindAndDelete(String collectionName, Query query, Class< List result = find(query, entityClass, collectionName); if (!CollectionUtils.isEmpty(result)) { - remove(getIdInQueryFor(result), entityClass, collectionName); + + Query byIdInQuery = operations.getByIdInQuery(result); + if (query.hasReadPreference()) { + byIdInQuery.withReadPreference(query.getReadPreference()); + } + + remove(byIdInQuery, entityClass, collectionName); } return result; @@ -2028,28 +2152,34 @@ protected List doFindAndDelete(String collectionName, Query query, Class< protected AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType, @Nullable AggregationOperationContext context) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.notNull(outputType, "Output type must not be null"); - AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context; + return doAggregate(aggregation, collectionName, outputType, + queryOperations.createAggregation(aggregation, context)); + } - return doAggregate(aggregation, collectionName, outputType, rootContext); + private AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + AggregationDefinition context) { + return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext()); } @SuppressWarnings("ConstantConditions") protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, AggregationOperationContext context) { - DocumentCallback callback = new UnwrapAndReadDocumentCallback<>(mongoConverter, outputType, collectionName); + ReadDocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); AggregationOptions options = aggregation.getOptions(); + AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext); + if (options.isExplain()) { - Document command = aggregation.toDocument(collectionName, context); + Document command = aggregationUtil.createCommand(collectionName, aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command)); + LOGGER.debug(String.format("Executing aggregation: %s", serializeToJsonSafely(command))); } Document commandResult = executeCommand(command); @@ -2057,24 +2187,56 @@ protected AggregationResults doAggregate(Aggregation aggregation, String .map(callback::doWith).collect(Collectors.toList()), commandResult); } - List pipeline = aggregation.toPipeline(context); + List pipeline = aggregationUtil.createPipeline(aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug( + String.format("Executing aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } return execute(collectionName, collection -> { List rawResult = new ArrayList<>(); + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); + Class domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType() + : null; + + Optional collation = Optionals.firstNonEmpty(options::getCollation, + () -> operations.forType(domainType) // + .getCollation()); - AggregateIterable aggregateIterable = collection.aggregate(pipeline, Document.class) // - .collation(options.getCollation().map(Collation::toMongoCollation).orElse(null)) // - .allowDiskUse(options.isAllowDiskUse()); + AggregateIterable aggregateIterable = delegate.prepare(collection).aggregate(pipeline, Document.class) // + .collation(collation.map(Collation::toMongoCollation).orElse(null)); + + if (options.isAllowDiskUseSet()) { + aggregateIterable = aggregateIterable.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { aggregateIterable = aggregateIterable.batchSize(options.getCursorBatchSize()); } + options.getComment().ifPresent(aggregateIterable::comment); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + aggregateIterable = hintFunction.apply(mongoDbFactory, aggregateIterable::hintString, aggregateIterable::hint); + } + + if (options.hasExecutionTimeLimit()) { + aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + if (options.isSkipResults()) { + + // toCollection only allowed for $out and $merge if those are the last stages + if (aggregation.getPipeline().isOutOrMerge()) { + aggregateIterable.toCollection(); + } else { + aggregateIterable.first(); + } + return new AggregationResults<>(Collections.emptyList(), new Document()); + } + MongoIterable iterable = aggregateIterable.map(val -> { rawResult.add(val); @@ -2087,82 +2249,87 @@ protected AggregationResults doAggregate(Aggregation aggregation, String } @SuppressWarnings("ConstantConditions") - protected CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, - Class outputType, @Nullable AggregationOperationContext context) { + protected Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType, + @Nullable AggregationOperationContext context) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); + Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); - Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming!"); + AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context); - AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context; AggregationOptions options = aggregation.getOptions(); - List pipeline = aggregation.toPipeline(rootContext); + List pipeline = aggregationDefinition.getAggregationPipeline(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug( + String.format("Streaming aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, (CollectionCallback>) collection -> { + return execute(collectionName, (CollectionCallback>) collection -> { + + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); - AggregateIterable cursor = collection.aggregate(pipeline) // - .allowDiskUse(options.isAllowDiskUse()) // - .useCursor(true); + AggregateIterable cursor = delegate.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { cursor = cursor.batchSize(options.getCursorBatchSize()); } - if (options.getCollation().isPresent()) { - cursor = cursor.collation(options.getCollation().map(Collation::toMongoCollation).get()); + options.getComment().ifPresent(cursor::comment); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (options.getHintObject().isPresent()) { + cursor = hintFunction.apply(mongoDbFactory, cursor::hintString, cursor::hint); } - return new CloseableIterableCursorAdapter<>(cursor.iterator(), exceptionTranslator, readCallback); + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + Class domainType = aggregation instanceof TypedAggregation typedAggregation ? typedAggregation.getInputType() + : null; + + Optionals.firstNonEmpty(options::getCollation, // + () -> operations.forType(domainType).getCollation()) // + .map(Collation::toMongoCollation) // + .ifPresent(cursor::collation); + + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, readCallback).stream(); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class) - */ @Override public ExecutableFind query(Class domainType) { return new ExecutableFindOperationSupport(this).query(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class) - */ @Override public ExecutableUpdate update(Class domainType) { return new ExecutableUpdateOperationSupport(this).update(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class) - */ @Override public ExecutableRemove remove(Class domainType) { return new ExecutableRemoveOperationSupport(this).remove(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableAggregation aggregateAndReturn(Class domainType) { return new ExecutableAggregationOperationSupport(this).aggregateAndReturn(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#insert(java.lang.Class) - */ + @Override + public ExecutableMapReduce mapReduce(Class domainType) { + return new ExecutableMapReduceOperationSupport(this).mapReduce(domainType); + } + @Override public ExecutableInsert insert(Class domainType) { return new ExecutableInsertOperationSupport(this).insert(domainType); @@ -2170,14 +2337,12 @@ public ExecutableInsert insert(Class domainType) { protected String replaceWithResourceIfNecessary(String function) { - String func = function; - if (this.resourceLoader != null && ResourceUtils.isUrl(function)) { - Resource functionResource = resourceLoader.getResource(func); + Resource functionResource = resourceLoader.getResource(function); if (!functionResource.exists()) { - throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found!", function)); + throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found", function)); } Scanner scanner = null; @@ -2186,7 +2351,7 @@ protected String replaceWithResourceIfNecessary(String function) { scanner = new Scanner(functionResource.getInputStream()); return scanner.useDelimiter("\\A").next(); } catch (IOException e) { - throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s!", function), e); + throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s", function), e); } finally { if (scanner != null) { scanner.close(); @@ -2194,33 +2359,72 @@ protected String replaceWithResourceIfNecessary(String function) { } } - return func; + return function; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollectionNames() - */ + @Override + @SuppressWarnings("ConstantConditions") public Set getCollectionNames() { - return execute(new DbCallback>() { - public Set doInDB(MongoDatabase db) throws MongoException, DataAccessException { - Set result = new LinkedHashSet(); - for (String name : db.listCollectionNames()) { - result.add(name); - } - return result; + return execute(db -> { + Set result = new LinkedHashSet<>(); + for (String name : MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db).listCollectionNames()) { + result.add(name); } + return result; }); } public MongoDatabase getDb() { - return mongoDbFactory.getDb(); + return doGetDatabase(); + } + + protected MongoDatabase doGetDatabase() { + return MongoDatabaseUtils.getDatabase(mongoDbFactory, sessionSynchronization); + } + + protected MongoDatabase prepareDatabase(MongoDatabase database) { + return database; + } + + protected , T> E maybeEmitEvent(E event) { + eventDelegate.publishEvent(event); + return event; } - protected void maybeEmitEvent(MongoMappingEvent event) { - if (null != eventPublisher) { - eventPublisher.publishEvent(event); + protected T maybeCallBeforeConvert(T object, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(BeforeConvertCallback.class, object, collection); } + + return object; + } + + protected T maybeCallBeforeSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(BeforeSaveCallback.class, object, document, collection); + } + + return object; + } + + protected T maybeCallAfterSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(AfterSaveCallback.class, object, document, collection); + } + + return object; + } + + protected T maybeCallAfterConvert(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection); + } + + return object; } /** @@ -2230,53 +2434,84 @@ protected void maybeEmitEvent(MongoMappingEvent event) { * @param collectionOptions * @return the collection that was created */ - protected MongoCollection doCreateCollection(final String collectionName, - final Document collectionOptions) { - return execute(new DbCallback>() { - public MongoCollection doInDB(MongoDatabase db) throws MongoException, DataAccessException { + @SuppressWarnings("ConstantConditions") + protected MongoCollection doCreateCollection(String collectionName, Document collectionOptions) { + return doCreateCollection(collectionName, getCreateCollectionOptions(collectionOptions)); + } - CreateCollectionOptions co = new CreateCollectionOptions(); + /** + * Create the specified collection using the provided options + * + * @param collectionName + * @param collectionOptions + * @return the collection that was created + * @since 3.3.3 + */ + @SuppressWarnings("ConstantConditions") + protected MongoCollection doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { - if (collectionOptions.containsKey("capped")) { - co.capped((Boolean) collectionOptions.get("capped")); - } - if (collectionOptions.containsKey("size")) { - co.sizeInBytes(((Number) collectionOptions.get("size")).longValue()); - } - if (collectionOptions.containsKey("max")) { - co.maxDocuments(((Number) collectionOptions.get("max")).longValue()); - } + return execute(db -> { - if (collectionOptions.containsKey("collation")) { - co.collation(IndexConverters.fromDocument(collectionOptions.get("collation", Document.class))); - } + db.createCollection(collectionName, collectionOptions); - if (collectionOptions.containsKey("validator")) { + MongoCollection coll = db.getCollection(collectionName, Document.class); - com.mongodb.client.model.ValidationOptions options = new com.mongodb.client.model.ValidationOptions(); + // TODO: Emit a collection created event + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Created collection [%s]", + coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName)); + } + return coll; + }); + } - if (collectionOptions.containsKey("validationLevel")) { - options.validationLevel(ValidationLevel.fromString(collectionOptions.getString("validationLevel"))); - } - if (collectionOptions.containsKey("validationAction")) { - options.validationAction(ValidationAction.fromString(collectionOptions.getString("validationAction"))); - } + private CreateCollectionOptions getCreateCollectionOptions(Document document) { - options.validator(collectionOptions.get("validator", Document.class)); - co.validationOptions(options); - } + CreateCollectionOptions options = new CreateCollectionOptions(); + + if (document.containsKey("capped")) { + options.capped((Boolean) document.get("capped")); + } + if (document.containsKey("size")) { + options.sizeInBytes(((Number) document.get("size")).longValue()); + } + if (document.containsKey("max")) { + options.maxDocuments(((Number) document.get("max")).longValue()); + } - db.createCollection(collectionName, co); + if (document.containsKey("collation")) { + options.collation(IndexConverters.fromDocument(document.get("collation", Document.class))); + } - MongoCollection coll = db.getCollection(collectionName, Document.class); + if (document.containsKey("validator")) { - // TODO: Emit a collection created event - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", coll.getNamespace().getCollectionName()); - } - return coll; + ValidationOptions validation = new ValidationOptions(); + + if (document.containsKey("validationLevel")) { + validation.validationLevel(ValidationLevel.fromString(document.getString("validationLevel"))); } - }); + if (document.containsKey("validationAction")) { + validation.validationAction(ValidationAction.fromString(document.getString("validationAction"))); + } + + validation.validator(document.get("validator", Document.class)); + options.validationOptions(validation); + } + + if (document.containsKey("timeseries")) { + + Document timeSeries = document.get("timeseries", Document.class); + TimeSeriesOptions timeseries = new TimeSeriesOptions(timeSeries.getString("timeField")); + if (timeSeries.containsKey("metaField")) { + timeseries.metaField(timeSeries.getString("metaField")); + } + if (timeSeries.containsKey("granularity")) { + timeseries.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); + } + options.timeSeriesOptions(timeseries); + } + return options; } /** @@ -2284,24 +2519,49 @@ public MongoCollection doInDB(MongoDatabase db) throws MongoException, * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @return the {@link List} of converted objects. + * @return the converted object or {@literal null} if none exists. */ - protected T doFindOne(String collectionName, Document query, Document fields, Class entityClass) { + @Nullable + protected T doFindOne(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFindOne(collectionName, collectionPreparer, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param preparer the preparer used to modify the cursor on execution. + * @param entityClass the parameterized type of the returned list. + * @return the converted object or {@literal null} if none exists. + * @since 2.2 + */ + @Nullable + @SuppressWarnings("ConstantConditions") + protected T doFindOne(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, CursorPreparer preparer, Class entityClass) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedFields = queryMapper.getMappedObject(fields, entity); + + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query), - mappedFields, entityClass, collectionName); + LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), - new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } /** @@ -2309,14 +2569,16 @@ protected T doFindOne(String collectionName, Document query, Document fields * query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected List doFind(String collectionName, Document query, Document fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, - new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName)); + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } /** @@ -2325,34 +2587,40 @@ protected List doFind(String collectionName, Document query, Document fie * specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set, + * (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected List doFind(String collectionName, Document query, Document fields, Class entityClass, - CursorPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, - new ReadDocumentCallback(mongoConverter, entityClass, collectionName)); + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, CursorPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, + new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - protected List doFind(String collectionName, Document query, Document fields, Class entityClass, - @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { + protected List doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedFields = queryMapper.getMappedFields(fields, entity); - Document mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp ? getMappedSortObject(sqcp.getSortObject(), entity) : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, - collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), + preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, objectCallback, collectionName); } /** @@ -2361,29 +2629,33 @@ protected List doFind(String collectionName, Document query, Document * * @since 2.0 */ - List doFind(String collectionName, Document query, Document fields, Class sourceClass, - Class targetClass, CursorPreparer preparer) { + List doFind(CollectionPreparer> collectionPreparer, String collectionName, + Document query, Document fields, Class sourceClass, Class targetClass, CursorPreparer preparer) { - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(sourceClass); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); - Document mappedFields = getMappedFieldsObject(fields, entity, targetClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp + ? getMappedSortObject(sqcp.getSortObject(), entity) + : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), sourceClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, - new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); } /** * Convert given {@link CollectionOptions} to a document and take the domain type information into account when * creating a mapped schema for validation.
- * This method calls {@link #convertToDocument(CollectionOptions)} for backwards compatibility and potentially - * overwrites the validator with the mapped validator document. In the long run - * {@link #convertToDocument(CollectionOptions)} will be removed so that this one becomes the only source of truth. * * @param collectionOptions can be {@literal null}. * @param targetType must not be {@literal null}. Use {@link Object} type instead. @@ -2392,43 +2664,41 @@ List doFind(String collectionName, Document query, Document fields, Cl */ protected Document convertToDocument(@Nullable CollectionOptions collectionOptions, Class targetType) { - Document doc = convertToDocument(collectionOptions); - - if (collectionOptions != null) { - - collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // - .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); + if (collectionOptions == null) { + return new Document(); } - return doc; - } + Document doc = new Document(); + collectionOptions.getCapped().ifPresent(val -> doc.put("capped", val)); + collectionOptions.getSize().ifPresent(val -> doc.put("size", val)); + collectionOptions.getMaxDocuments().ifPresent(val -> doc.put("max", val)); + collectionOptions.getCollation().ifPresent(val -> doc.append("collation", val.toDocument())); - /** - * @param collectionOptions can be {@literal null}. - * @return never {@literal null}. - * @deprecated since 2.1 in favor of {@link #convertToDocument(CollectionOptions, Class)}. - */ - @Deprecated - protected Document convertToDocument(@Nullable CollectionOptions collectionOptions) { - - Document document = new Document(); + collectionOptions.getValidationOptions().ifPresent(it -> { - if (collectionOptions != null) { + it.getValidationLevel().ifPresent(val -> doc.append("validationLevel", val.getValue())); + it.getValidationAction().ifPresent(val -> doc.append("validationAction", val.getValue())); + it.getValidator().ifPresent(val -> doc.append("validator", getMappedValidator(val, targetType))); + }); - collectionOptions.getCapped().ifPresent(val -> document.put("capped", val)); - collectionOptions.getSize().ifPresent(val -> document.put("size", val)); - collectionOptions.getMaxDocuments().ifPresent(val -> document.put("max", val)); - collectionOptions.getCollation().ifPresent(val -> document.append("collation", val.toDocument())); + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { - collectionOptions.getValidationOptions().ifPresent(it -> { + Document timeseries = new Document("timeField", it.getTimeField()); + if (StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); - it.getValidationLevel().ifPresent(val -> document.append("validationLevel", val.getValue())); - it.getValidationAction().ifPresent(val -> document.append("validationAction", val.getValue())); - it.getValidator().ifPresent(val -> document.append("validator", getMappedValidator(val, Object.class))); - }); - } + collectionOptions.getChangeStreamOptions().map(it -> new Document("enabled", it.getPreAndPostImages())) + .ifPresent(it -> { + doc.put("changeStreamPreAndPostImages", it); + }); - return document; + return doc; } Document getMappedValidator(Validator validator, Class domainType) { @@ -2444,8 +2714,7 @@ Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

+ * The first document that matches the query is returned and also removed from the collection in the database.
* The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -2453,27 +2722,26 @@ Document getMappedValidator(Validator validator, Class domainType) { * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected T doFindAndRemove(String collectionName, Document query, Document fields, Document sort, - @Nullable Collation collation, Class entityClass) { - - EntityReader readerToUse = this.mongoConverter; + @SuppressWarnings("ConstantConditions") + protected T doFindAndRemove(CollectionPreparer collectionPreparer, String collectionName, Document query, + Document fields, Document sort, @Nullable Collation collation, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findAndRemove using query: {} fields: {} sort: {} for class: {} in collection: {}", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName); + LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal( - new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation), - new ReadDocumentCallback(readerToUse, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } - protected T doFindAndModify(String collectionName, Document query, Document fields, Document sort, - Class entityClass, Update update, @Nullable FindAndModifyOptions options) { - - EntityReader readerToUse = this.mongoConverter; + @SuppressWarnings("ConstantConditions") + protected T doFindAndModify(CollectionPreparer collectionPreparer, String collectionName, Document query, + Document fields, Document sort, Class entityClass, UpdateDefinition update, + @Nullable FindAndModifyOptions options) { if (options == null) { options = new FindAndModifyOptions(); @@ -2481,53 +2749,130 @@ protected T doFindAndModify(String collectionName, Document query, Document MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - increaseVersionForUpdateIfNecessary(entity, update); + UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false); + updateContext.increaseVersionForUpdateIfNecessary(entity); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity); + Document mappedQuery = updateContext.getMappedQuery(entity); + Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass) + : updateContext.getMappedUpdate(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndModify using query: {} fields: {} sort: {} for class: {} and update: {} " + "in collection: {}", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), - collectionName); + LOGGER.debug(String.format( + "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s in collection: %s", + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } - return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options), - new ReadDocumentCallback(readerToUse, entityClass, collectionName), collectionName); + return executeFindOneInternal( + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, + update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } /** - * Populates the id property of the saved object, if it's not set already. + * Customize this part for findAndReplace. * - * @param savedObject - * @param id + * @param collectionName The name of the collection to perform the operation in. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param resultType the target domain type. + * @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. */ - protected void populateIdIfNecessary(Object savedObject, Object id) { + @Nullable + protected T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery, + Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, + Class entityType, Document replacement, FindAndReplaceOptions options, Class resultType) { + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection); + } + + CollectionPreparerDelegate createDelegate(Query query) { + return CollectionPreparerDelegate.of(query); + } - if (id == null) { - return; + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createDelegate(query); + if (action == null) { + return collectionPreparer; } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + @Nullable + private T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery, + Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, + Class entityType, Document replacement, FindAndReplaceOptions options, EntityProjection projection) { - if (savedObject instanceof Document) { - Document document = (Document) savedObject; - document.put(ID_FIELD, id); - return; + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), + serializeToJsonSafely(mappedSort), entityType, serializeToJsonSafely(replacement), collectionName)); } - MongoPersistentProperty idProperty = getIdPropertyFor(savedObject.getClass()); + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, mappedSort, + replacement, collation, options), new ProjectingReadCallback<>(mongoConverter, projection, collectionName), + collectionName); + } - if (idProperty != null) { + private UpdateResult doReplace(ReplaceOptions options, Class entityType, String collectionName, + UpdateContext updateContext, CollectionPreparer> collectionPreparer, + Document replacement) { - ConversionService conversionService = mongoConverter.getConversionService(); - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(savedObject.getClass()); - PersistentPropertyAccessor accessor = entity.getPropertyAccessor(savedObject); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); - Object value = accessor.getProperty(idProperty); - if (value == null) { - new ConvertingPropertyAccessor(accessor, conversionService).setProperty(idProperty, id); - } + ReplaceCallback replaceCallback = new ReplaceCallback(collectionPreparer, + updateContext.getMappedQuery(persistentEntity), replacement, updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("replace one using query: %s for class: %s in collection: %s", + serializeToJsonSafely(updateContext.getMappedQuery(persistentEntity)), entityType, collectionName)); } + + return execute(collectionName, replaceCallback); + } + + /** + * Populates the id property of the saved object, if it's not set already. + * + * @param savedObject + * @param id + */ + protected T populateIdIfNecessary(T savedObject, Object id) { + + return operations.forEntity(savedObject, mongoConverter.getConversionService()) // + .populateIdIfNecessary(id); } private MongoCollection getAndPrepareCollection(MongoDatabase db, String collectionName) { @@ -2544,23 +2889,24 @@ private MongoCollection getAndPrepareCollection(MongoDatabase db, Stri * Internal method using callbacks to do queries against the datastore that requires reading a single object from a * collection of objects. It will take the following steps *

    - *
  1. Execute the given {@link ConnectionCallback} for a {@link Document}.
  2. + *
  3. Execute the given {@link CollectionCallback} for a {@link Document}.
  4. *
  5. Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
  6. *
      * * @param * @param collectionCallback the callback to retrieve the {@link Document} with - * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ + @Nullable private T executeFindOneInternal(CollectionCallback collectionCallback, - DocumentCallback objectCallback, String collectionName) { + DocumentCallback documentCallback, String collectionName) { try { - T result = objectCallback - .doWith(collectionCallback.doInCollection(getAndPrepareCollection(getDb(), collectionName))); - return result; + + Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)); + return document != null ? documentCallback.doWith(document) : null; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -2570,51 +2916,38 @@ private T executeFindOneInternal(CollectionCallback collectionCall * Internal method using callback to do queries against the datastore that requires reading a collection of objects. * It will take the following steps *
        - *
      1. Execute the given {@link ConnectionCallback} for a {@link DBCursor}.
      2. - *
      3. Prepare that {@link DBCursor} with the given {@link CursorPreparer} (will be skipped if {@link CursorPreparer} - * is {@literal null}
      4. - *
      5. Iterate over the {@link DBCursor} and applies the given {@link DocumentCallback} to each of the + *
      6. Execute the given {@link CollectionCallback} for a {@link FindIterable}.
      7. + *
      8. Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if + * {@link CursorPreparer} is {@literal null}
      9. + *
      10. Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the * {@link Document}s collecting the actual result {@link List}.
      11. *
          * * @param - * @param collectionCallback the callback to retrieve the {@link DBCursor} with - * @param preparer the {@link CursorPreparer} to potentially modify the {@link DBCursor} before iterating over it - * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param collectionCallback the callback to retrieve the {@link FindIterable} with + * @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ private List executeFindMultiInternal(CollectionCallback> collectionCallback, - @Nullable CursorPreparer preparer, DocumentCallback objectCallback, String collectionName) { + CursorPreparer preparer, DocumentCallback documentCallback, String collectionName) { try { - MongoCursor cursor = null; - - try { - - FindIterable iterable = collectionCallback - .doInCollection(getAndPrepareCollection(getDb(), collectionName)); - - if (preparer != null) { - iterable = preparer.prepare(iterable); - } - - cursor = iterable.iterator(); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { - List result = new ArrayList(); + int available = cursor.available(); + List result = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { Document object = cursor.next(); - result.add(objectCallback.doWith(object)); + result.add(documentCallback.doWith(object)); } return result; - } finally { - - if (cursor != null) { - cursor.close(); - } } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); @@ -2622,29 +2955,14 @@ private List executeFindMultiInternal(CollectionCallback> collectionCallback, - @Nullable CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) { - - try { - - MongoCursor cursor = null; + CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) { - try { - FindIterable iterable = collectionCallback - .doInCollection(getAndPrepareCollection(getDb(), collectionName)); - - if (preparer != null) { - iterable = preparer.prepare(iterable); - } - - cursor = iterable.iterator(); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { - while (cursor.hasNext()) { - callbackHandler.processDocument(cursor.next()); - } - } finally { - if (cursor != null) { - cursor.close(); - } + while (cursor.hasNext()) { + callbackHandler.processDocument(cursor.next()); } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); @@ -2660,33 +2978,7 @@ private MongoPersistentEntity getPersistentEntity(@Nullable Class type) { return type != null ? mappingContext.getPersistentEntity(type) : null; } - @Nullable - private MongoPersistentProperty getIdPropertyFor(Class type) { - - MongoPersistentEntity persistentEntity = getPersistentEntity(type); - return persistentEntity != null ? persistentEntity.getIdProperty() : null; - } - - @Nullable - private String determineEntityCollectionName(@Nullable T obj) { - if (null != obj) { - return determineCollectionName(obj.getClass()); - } - - return null; - } - - String determineCollectionName(@Nullable Class entityClass) { - - if (entityClass == null) { - throw new InvalidDataAccessApiUsageException( - "No class parameter provided, entity collection can't be determined!"); - } - - return mappingContext.getRequiredPersistentEntity(entityClass).getCollection(); - } - - private static MongoConverter getDefaultMongoConverter(MongoDbFactory factory) { + private static MongoConverter getDefaultMongoConverter(MongoDatabaseFactory factory) { DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); @@ -2697,46 +2989,35 @@ private static MongoConverter getDefaultMongoConverter(MongoDbFactory factory) { MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(factory); converter.afterPropertiesSet(); return converter; } - private Document getMappedSortObject(Query query, Class type) { + @Nullable + private Document getMappedSortObject(@Nullable Query query, Class type) { - if (query == null || ObjectUtils.isEmpty(query.getSortObject())) { + if (query == null) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return getMappedSortObject(query.getSortObject(), type); } - private Document getMappedFieldsObject(Document fields, MongoPersistentEntity entity, Class targetType) { - return queryMapper.getMappedFields(addFieldsForProjection(fields, entity.getType(), targetType), entity); + @Nullable + private Document getMappedSortObject(Document sortObject, Class type) { + return getMappedSortObject(sortObject, mappingContext.getPersistentEntity(type)); } - /** - * For cases where {@code fields} is {@literal null} or {@literal empty} add fields required for creating the - * projection (target) type if the {@code targetType} is a {@literal closed interface projection}. - * - * @param fields can be {@literal null}. - * @param domainType must not be {@literal null}. - * @param targetType must not be {@literal null}. - * @return {@link Document} with fields to be included. - */ - private Document addFieldsForProjection(Document fields, Class domainType, Class targetType) { - - if (!fields.isEmpty() || !targetType.isInterface() || ClassUtils.isAssignable(domainType, targetType)) { - return fields; - } - - ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType); + @Nullable + private Document getMappedSortObject(Document sortObject, @Nullable MongoPersistentEntity entity) { - if (projectionInformation.isClosed()) { - projectionInformation.getInputProperties().forEach(it -> fields.append(it.getName(), 1)); + if (ObjectUtils.isEmpty(sortObject)) { + return null; } - return fields; + return queryMapper.getMappedSort(sortObject, entity); } /** @@ -2757,7 +3038,7 @@ static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, /** * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Oliver Gierke * @author Thomas Risberg @@ -2765,23 +3046,25 @@ static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, */ private static class FindOneCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Optional fields; + private final CursorPreparer cursorPreparer; + + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, Document fields, + CursorPreparer preparer) { - public FindOneCallback(Document query, Document fields) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = Optional.of(fields).filter(it -> !ObjectUtils.isEmpty(fields)); + this.cursorPreparer = preparer; } + @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - FindIterable iterable = collection.find(query); - - if (LOGGER.isDebugEnabled()) { - - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), - serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName()); - } + FindIterable iterable = cursorPreparer.initiateFind(collection, + col -> collectionPreparer.prepare(col).find(query, Document.class)); if (fields.isPresent()) { iterable = iterable.projection(fields.get()); @@ -2793,7 +3076,7 @@ public Document doInCollection(MongoCollection collection) throws Mong /** * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Oliver Gierke * @author Thomas Risberg @@ -2801,62 +3084,83 @@ public Document doInCollection(MongoCollection collection) throws Mong */ private static class FindCallback implements CollectionCallback> { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; + private final @Nullable com.mongodb.client.model.Collation collation; - public FindCallback(Document query) { - this(query, new Document()); - } - - public FindCallback(Document query, Document fields) { + public FindCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, @Nullable com.mongodb.client.model.Collation collation) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(fields, "Fields must not be null"); + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; + this.collation = collation; } + @Override public FindIterable doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - return collection.find(query).projection(fields); + FindIterable findIterable = collectionPreparer.prepare(collection).find(query, Document.class) + .projection(fields); + + if (collation != null) { + findIterable = findIterable.collation(collation); + } + return findIterable; } } /** - * Optimized {@link CollectionCallback} that takes an already mappend query and a nullable + * Optimized {@link CollectionCallback} that takes an already mapped query and a nullable * {@link com.mongodb.client.model.Collation} to execute a count query limited to one element. * * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - private static class ExistsCallback implements CollectionCallback { + private class ExistsCallback implements CollectionCallback { + private final CollectionPreparer collectionPreparer; private final Document mappedQuery; private final com.mongodb.client.model.Collation collation; + ExistsCallback(CollectionPreparer collectionPreparer, Document mappedQuery, + com.mongodb.client.model.Collation collation) { + + this.collectionPreparer = collectionPreparer; + this.mappedQuery = mappedQuery; + this.collation = collation; + } + @Override public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - return collection.count(mappedQuery, new CountOptions().limit(1).collation(collation)) > 0; + + return doCount(collectionPreparer, collection.getNamespace().getCollectionName(), mappedQuery, + new CountOptions().limit(1).collation(collation)) > 0; } } /** * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Thomas Risberg */ private static class FindAndRemoveCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; private final Optional collation; - public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) { + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; @@ -2864,32 +3168,39 @@ public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nu this.collation = Optional.ofNullable(collation); } + @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { FindOneAndDeleteOptions opts = new FindOneAndDeleteOptions().sort(sort).projection(fields); collation.map(Collation::toMongoCollation).ifPresent(opts::collation); - return collection.findOneAndDelete(query, opts); + return collectionPreparer.prepare(collection).findOneAndDelete(query, opts); } } private static class FindAndModifyCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; - private final Document update; + private final Object update; + private final List arrayFilters; private final FindAndModifyOptions options; - public FindAndModifyCallback(Document query, Document fields, Document sort, Document update, - FindAndModifyOptions options) { + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; this.update = update; + this.arrayFilters = arrayFilters; this.options = options; } + @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { FindOneAndUpdateOptions opts = new FindOneAndUpdateOptions(); @@ -2904,7 +3215,66 @@ public Document doInCollection(MongoCollection collection) throws Mong options.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation); - return collection.findOneAndUpdate(query, update, opts); + if (!arrayFilters.isEmpty()) { + opts.arrayFilters(arrayFilters); + } + + if (update instanceof Document document) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, document, opts); + } else if (update instanceof List) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, (List) update, opts); + } + + throw new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update)); + } + } + + /** + * {@link CollectionCallback} specific for find and remove operation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + private static class FindAndReplaceCallback implements CollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Document update; + private final @Nullable com.mongodb.client.model.Collation collation; + private final FindAndReplaceOptions options; + + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, @Nullable com.mongodb.client.model.Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.options = options; + this.collation = collation; + } + + @Override + public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + + FindOneAndReplaceOptions opts = new FindOneAndReplaceOptions(); + opts.sort(sort); + opts.collation(collation); + opts.projection(fields); + + if (options.isUpsert()) { + opts.upsert(true); + } + + if (options.isReturnNew()) { + opts.returnDocument(ReturnDocument.AFTER); + } + + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, opts); } } @@ -2915,177 +3285,162 @@ public Document doInCollection(MongoCollection collection) throws Mong * @author Thomas Darimont */ - interface DocumentCallback { + protected interface DocumentCallback { - @Nullable - T doWith(@Nullable Document object); + T doWith(Document object); } /** * Simple {@link DocumentCallback} that will transform {@link Document} into the given target type using the given - * {@link MongoReader}. + * {@link EntityReader}. * * @author Oliver Gierke * @author Christoph Strobl + * @author Roman Puchkovskiy */ - @RequiredArgsConstructor private class ReadDocumentCallback implements DocumentCallback { - private final @NonNull EntityReader reader; - private final @NonNull Class type; + private final EntityReader reader; + private final Class type; private final String collectionName; - @Nullable - public T doWith(@Nullable Document object) { + ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - if (null != object) { - maybeEmitEvent(new AfterLoadEvent(object, type, collectionName)); - } + this.reader = reader; + this.type = type; + this.collectionName = collectionName; + } - T source = reader.read(type, object); + @Override + public T doWith(Document document) { + + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + T entity = reader.read(type, document); - if (null != source) { - maybeEmitEvent(new AfterConvertEvent(object, source, collectionName)); + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return source; + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); + + return entity; } } /** * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the - * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@litera interface}. + * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. * * @param * @param * @since 2.0 */ - @RequiredArgsConstructor private class ProjectingReadCallback implements DocumentCallback { - private final @NonNull EntityReader reader; - private final @NonNull Class entityType; - private final @NonNull Class targetType; - private final @NonNull String collectionName; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document) - */ - @SuppressWarnings("unchecked") - @Nullable - public T doWith(@Nullable Document object) { - - if (object == null) { - return null; - } - - Class typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType - : targetType; - - if (null != object) { - maybeEmitEvent(new AfterLoadEvent(object, targetType, collectionName)); - } - - Object source = reader.read(typeToRead, object); - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source; - - if (null != result) { - maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName)); - } - - return (T) result; - } - } + private final MongoConverter mongoConverter; + private final EntityProjection projection; + private final String collectionName; - class UnwrapAndReadDocumentCallback extends ReadDocumentCallback { + ProjectingReadCallback(MongoConverter mongoConverter, EntityProjection projection, String collectionName) { - public UnwrapAndReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - super(reader, type, collectionName); + this.mongoConverter = mongoConverter; + this.projection = projection; + this.collectionName = collectionName; } @Override - public T doWith(@Nullable Document object) { + @SuppressWarnings("unchecked") + public T doWith(Document document) { - if (object == null) { + if (document == null) { return null; } - Object idField = object.get(Fields.UNDERSCORE_ID); - - if (!(idField instanceof Document)) { - return super.doWith(object); - } + maybeEmitEvent(new AfterLoadEvent<>(document, projection.getMappedType().getType(), collectionName)); - Document toMap = new Document(); - Document nested = (Document) idField; - toMap.putAll(nested); + Object entity = mongoConverter.project(projection, document); - for (String key : object.keySet()) { - if (!Fields.UNDERSCORE_ID.equals(key)) { - toMap.put(key, object.get(key)); - } + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", mongoConverter)); } - return super.doWith(toMap); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return (T) maybeCallAfterConvert(entity, document, collectionName); } } - class QueryCursorPreparer implements CursorPreparer { + class QueryCursorPreparer implements SortingQueryCursorPreparer { - private final @Nullable Query query; + private final Query query; + private final Document sortObject; + private final int limit; + private final long skip; private final @Nullable Class type; - public QueryCursorPreparer(@Nullable Query query, @Nullable Class type) { + QueryCursorPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + QueryCursorPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.DBCursor) - */ - public FindIterable prepare(FindIterable cursor) { - - if (query == null) { - return cursor; - } + @Override + public FindIterable prepare(FindIterable iterable) { - if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues() - && !query.getCollation().isPresent()) { - return cursor; - } + FindIterable cursorToUse = iterable; - FindIterable cursorToUse; + operations.forType(type).getCollation(query) // + .map(Collation::toMongoCollation) // + .ifPresent(cursorToUse::collation); - cursorToUse = query.getCollation().map(Collation::toMongoCollation).map(cursor::collation).orElse(cursor); + Meta meta = query.getMeta(); + HintFunction hintFunction = HintFunction.from(query.getHint()); + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues() + && query.getCollation().isEmpty()) { + return cursorToUse; + } try { - if (query.getSkip() > 0) { - cursorToUse = cursorToUse.skip((int) query.getSkip()); + if (skip > 0) { + cursorToUse = cursorToUse.skip((int) skip); } - if (query.getLimit() > 0) { - cursorToUse = cursorToUse.limit(query.getLimit()); + if (limit > 0) { + cursorToUse = cursorToUse.limit(limit); } - if (!ObjectUtils.isEmpty(query.getSortObject())) { - Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; cursorToUse = cursorToUse.sort(sort); } - Document meta = new Document(); - if (StringUtils.hasText(query.getHint())) { - meta.put("$hint", query.getHint()); + if (hintFunction.isPresent()) { + cursorToUse = hintFunction.apply(mongoDbFactory, cursorToUse::hintString, cursorToUse::hint); } - if (query.getMeta().hasValues()) { + if (meta.hasValues()) { + + if (meta.hasComment()) { + cursorToUse = cursorToUse.comment(meta.getRequiredComment()); + } + + if (meta.hasMaxTime()) { + cursorToUse = cursorToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.getCursorBatchSize() != null) { + cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize()); + } - for (Entry entry : query.getMeta().values()) { - meta.put(entry.getKey(), entry.getValue()); + if (meta.getAllowDiskUse() != null) { + cursorToUse = cursorToUse.allowDiskUse(meta.getAllowDiskUse()); } - for (Meta.CursorOption option : query.getMeta().getFlags()) { + for (Meta.CursorOption option : meta.getFlags()) { switch (option) { @@ -3095,19 +3450,26 @@ public FindIterable prepare(FindIterable cursor) { case PARTIAL: cursorToUse = cursorToUse.partial(true); break; + case SECONDARY_READS: + break; default: throw new IllegalArgumentException(String.format("%s is no supported flag.", option)); } } } - cursorToUse = cursorToUse.modifiers(meta); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } return cursorToUse; } + + @Nullable + @Override + public Document getSortObject() { + return sortObject; + } } /** @@ -3115,9 +3477,11 @@ public FindIterable prepare(FindIterable cursor) { * a delegate and creates a {@link GeoResult} from the result. * * @author Oliver Gierke + * @author Christoph Strobl */ static class GeoNearResultDocumentCallback implements DocumentCallback> { + private final String distanceField; private final DocumentCallback delegate; private final Metric metric; @@ -3125,35 +3489,47 @@ static class GeoNearResultDocumentCallback implements DocumentCallback delegate, Metric metric) { + GeoNearResultDocumentCallback(String distanceField, DocumentCallback delegate, Metric metric) { - Assert.notNull(delegate, "DocumentCallback must not be null!"); + Assert.notNull(delegate, "DocumentCallback must not be null"); + this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } - @Nullable - public GeoResult doWith(@Nullable Document object) { + @Override + public GeoResult doWith(Document object) { - double distance = ((Double) object.get("dis")).doubleValue(); - Document content = (Document) object.get("obj"); + double distance = Double.NaN; + if (object.containsKey(distanceField)) { + distance = NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class); + } - T doWith = delegate.doWith(content); + T doWith = delegate.doWith(object); return new GeoResult<>(doWith, new Distance(distance, metric)); } } /** - * A {@link CloseableIterator} that is backed by a MongoDB {@link Cursor}. + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public MongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDbFactory; + } + + /** + * A {@link CloseableIterator} that is backed by a MongoDB {@link MongoCollection}. * * @author Thomas Darimont * @since 1.7 */ - @AllArgsConstructor(access = AccessLevel.PACKAGE) static class CloseableIterableCursorAdapter implements CloseableIterator { private volatile @Nullable MongoCursor cursor; @@ -3161,20 +3537,24 @@ static class CloseableIterableCursorAdapter implements CloseableIterator { private DocumentCallback objectReadCallback; /** - * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link Cursor}. - * - * @param cursor - * @param exceptionTranslator - * @param objectReadCallback + * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}. */ - public CloseableIterableCursorAdapter(FindIterable cursor, - PersistenceExceptionTranslator exceptionTranslator, DocumentCallback objectReadCallback) { + CloseableIterableCursorAdapter(MongoIterable cursor, PersistenceExceptionTranslator exceptionTranslator, + DocumentCallback objectReadCallback) { this.cursor = cursor.iterator(); this.exceptionTranslator = exceptionTranslator; this.objectReadCallback = objectReadCallback; } + CloseableIterableCursorAdapter(MongoCursor cursor, PersistenceExceptionTranslator exceptionTranslator, + DocumentCallback objectReadCallback) { + + this.cursor = cursor; + this.exceptionTranslator = exceptionTranslator; + this.objectReadCallback = objectReadCallback; + } + @Override public boolean hasNext() { @@ -3201,8 +3581,7 @@ public T next() { try { Document item = cursor.next(); - T converted = objectReadCallback.doWith(item); - return converted; + return objectReadCallback.doWith(item); } catch (RuntimeException ex) { throw potentiallyConvertRuntimeException(ex, exceptionTranslator); } @@ -3228,150 +3607,77 @@ public void close() { } } - public MongoDbFactory getMongoDbFactory() { - return mongoDbFactory; - } - /** - * {@link BatchAggregationLoader} is a little helper that can process cursor results returned by an aggregation - * command execution. On presence of a {@literal nextBatch} indicated by presence of an {@code id} field in the - * {@code cursor} another {@code getMore} command gets executed reading the next batch of documents until all results - * are loaded. + * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the + * server through the driver API.
          + * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired + * target method matching the actual arguments plus a {@link ClientSession}. * * @author Christoph Strobl - * @since 1.10 + * @since 2.1 */ - static class BatchAggregationLoader { - - private static final String CURSOR_FIELD = "cursor"; - private static final String RESULT_FIELD = "result"; - private static final String BATCH_SIZE_FIELD = "batchSize"; - private static final String FIRST_BATCH = "firstBatch"; - private static final String NEXT_BATCH = "nextBatch"; - private static final String SERVER_USED = "serverUsed"; - private static final String OK = "ok"; - - private final MongoTemplate template; - private final ReadPreference readPreference; - private final int batchSize; - - BatchAggregationLoader(MongoTemplate template, ReadPreference readPreference, int batchSize) { + static class SessionBoundMongoTemplate extends MongoTemplate { - this.template = template; - this.readPreference = readPreference; - this.batchSize = batchSize; - } + private final MongoTemplate delegate; + private final ClientSession session; /** - * Run aggregation command and fetch all results. + * @param session must not be {@literal null}. + * @param that must not be {@literal null}. */ - Document aggregate(String collectionName, Aggregation aggregation, AggregationOperationContext context) { - - Document command = prepareAggregationCommand(collectionName, aggregation, context, batchSize); + SessionBoundMongoTemplate(ClientSession session, MongoTemplate that) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command)); - } + super(that.getMongoDatabaseFactory().withSession(session), that); - return mergeAggregationResults(aggregateBatched(command, collectionName, batchSize)); + this.delegate = that; + this.session = session; } - /** - * Pre process the aggregation command sent to the server by adding {@code cursor} options to match execution on - * different server versions. - */ - private static Document prepareAggregationCommand(String collectionName, Aggregation aggregation, - @Nullable AggregationOperationContext context, int batchSize) { - - AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context; - Document command = aggregation.toDocument(collectionName, rootContext); - - if (!aggregation.getOptions().isExplain()) { - command.put(CURSOR_FIELD, new Document(BATCH_SIZE_FIELD, batchSize)); - } + @Override + public MongoCollection getCollection(String collectionName) { - return command; + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getCollection(collectionName); } - private List aggregateBatched(Document command, String collectionName, int batchSize) { - - List results = new ArrayList<>(); - - Document commandResult = template.executeCommand(command, readPreference); - results.add(postProcessResult(commandResult)); - - while (hasNext(commandResult)) { - - Document getMore = new Document("getMore", getNextBatchId(commandResult)) // - .append("collection", collectionName) // - .append(BATCH_SIZE_FIELD, batchSize); - - commandResult = template.executeCommand(getMore, this.readPreference); - results.add(postProcessResult(commandResult)); - } + @Override + public MongoDatabase getDb() { - return results; + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getDb(); } - private static Document postProcessResult(Document commandResult) { - - if (!commandResult.containsKey(CURSOR_FIELD)) { - return commandResult; - } - - Document resultObject = new Document(SERVER_USED, commandResult.get(SERVER_USED)); - resultObject.put(OK, commandResult.get(OK)); - - Document cursor = (Document) commandResult.get(CURSOR_FIELD); - if (cursor.containsKey(FIRST_BATCH)) { - resultObject.put(RESULT_FIELD, cursor.get(FIRST_BATCH)); - } else { - resultObject.put(RESULT_FIELD, cursor.get(NEXT_BATCH)); - } - - return resultObject; + @Override + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + return false; } + } - private static Document mergeAggregationResults(List batchResults) { - - if (batchResults.size() == 1) { - return batchResults.iterator().next(); - } - - Document commandResult = new Document(); - List allResults = new ArrayList<>(); - - for (Document batchResult : batchResults) { - - Collection documents = (Collection) batchResult.get(RESULT_FIELD); - if (!CollectionUtils.isEmpty(documents)) { - allResults.addAll(documents); - } - } - - // take general info from first batch - commandResult.put(SERVER_USED, batchResults.iterator().next().get(SERVER_USED)); - commandResult.put(OK, batchResults.iterator().next().get(OK)); - - // and append the merged batchResults - commandResult.put(RESULT_FIELD, allResults); - - return commandResult; - } + @FunctionalInterface + interface CountExecution { + long countDocuments(CollectionPreparer collectionPreparer, String collection, Document filter, + CountOptions options); + } - private static boolean hasNext(Document commandResult) { + private static class ReplaceCallback implements CollectionCallback { - if (!commandResult.containsKey(CURSOR_FIELD)) { - return false; - } + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document update; + private final com.mongodb.client.model.ReplaceOptions options; - Object next = getNextBatchId(commandResult); - return next != null && ((Number) next).longValue() != 0L; + ReplaceCallback(CollectionPreparer> collectionPreparer, Document query, Document update, + com.mongodb.client.model.ReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.update = update; + this.options = options; } - @Nullable - private static Object getNextBatchId(Document commandResult) { - return ((Document) commandResult.get(CURSOR_FIELD)).get("id"); + @Override + public UpdateResult doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + return collectionPreparer.prepare(collection).replaceOne(query, update, options); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java new file mode 100644 index 0000000000..583b243aa8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Predicates; + +/** + * Common operations performed on properties of an entity like extracting fields information for projection creation. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class PropertyOperations { + + private final MappingContext, MongoPersistentProperty> mappingContext; + + PropertyOperations(MappingContext, MongoPersistentProperty> mappingContext) { + this.mappingContext = mappingContext; + } + + /** + * For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for + * creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a + * {@literal closed interface projection}. + * + * @param projection must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return {@link Document} with fields to be included. + */ + Document computeMappedFieldsForProjection(EntityProjection projection, + Document fields) { + + if (!projection.isClosedProjection()) { + return fields; + } + + Document projectedFields = new Document(); + + if (projection.getMappedType().getType().isInterface()) { + projection.forEach(it -> { + projectedFields.put(it.getPropertyPath().getSegment(), 1); + }); + } else { + + // DTO projections use merged metadata between domain type and result type + PersistentPropertyTranslator translator = PersistentPropertyTranslator.create( + mappingContext.getRequiredPersistentEntity(projection.getDomainType()), + Predicates.negate(MongoPersistentProperty::hasExplicitFieldName)); + + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(projection.getMappedType()); + for (MongoPersistentProperty property : persistentEntity) { + projectedFields.put(translator.translate(property).getFieldName(), 1); + } + } + + return projectedFields; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java new file mode 100644 index 0000000000..28ca85fbd7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java @@ -0,0 +1,1045 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.ShardKey; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateOptions; + +/** + * {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed. + * This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options + * for {@literal count}, {@literal remove}, and other methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Florian Lüdiger + * @since 3.0 + */ +class QueryOperations { + + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + private final EntityOperations entityOperations; + private final PropertyOperations propertyOperations; + private final CodecRegistryProvider codecRegistryProvider; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final AggregationUtil aggregationUtil; + private final Map, Document> mappedShardKey = new ConcurrentHashMap<>(1); + + /** + * Create a new instance of {@link QueryOperations}. + * + * @param queryMapper must not be {@literal null}. + * @param updateMapper must not be {@literal null}. + * @param entityOperations must not be {@literal null}. + * @param propertyOperations must not be {@literal null}. + * @param codecRegistryProvider must not be {@literal null}. + */ + QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations, + PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) { + + this.queryMapper = queryMapper; + this.updateMapper = updateMapper; + this.entityOperations = entityOperations; + this.propertyOperations = propertyOperations; + this.codecRegistryProvider = codecRegistryProvider; + this.mappingContext = queryMapper.getMappingContext(); + this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext); + } + + InsertContext createInsertContext(Document source) { + return createInsertContext(MappedDocument.of(source)); + } + + InsertContext createInsertContext(MappedDocument mappedDocument) { + return new InsertContext(mappedDocument); + } + + /** + * Create a new {@link QueryContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + QueryContext createQueryContext(Query query) { + return new QueryContext(query); + } + + /** + * Create a new {@link DistinctQueryContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link DistinctQueryContext}. + */ + DistinctQueryContext distinctQueryContext(Query query, String fieldName) { + return new DistinctQueryContext(query, fieldName); + } + + /** + * Create a new {@link CountContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link CountContext}. + */ + CountContext countQueryContext(Query query) { + return new CountContext(query); + } + + /** + * Create a new {@link UpdateContext} instance affecting multiple documents. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) { + return new UpdateContext(updateDefinition, query, true, upsert); + } + + /** + * Create a new {@link UpdateContext} instance affecting a single document. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) { + return new UpdateContext(updateDefinition, query, false, upsert); + } + + /** + * Create a new {@link UpdateContext} instance affecting a single document. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) { + return new UpdateContext(updateDefinition, query, false, upsert); + } + + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) { + return new UpdateContext(replacement, upsert); + } + + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(Query query, MappedDocument replacement, boolean upsert) { + return new UpdateContext(query, replacement, upsert); + } + + /** + * Create a new {@link DeleteContext} instance removing all matching documents. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + DeleteContext deleteQueryContext(Query query) { + return new DeleteContext(query, true); + } + + /** + * Create a new {@link DeleteContext} instance only the first matching document. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + DeleteContext deleteSingleContext(Query query) { + return new DeleteContext(query, false); + } + + /** + * Create a new {@link AggregationDefinition} for the given {@link Aggregation}. + * + * @param aggregation must not be {@literal null}. + * @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}. + * @return new instance of {@link AggregationDefinition}. + * @since 3.2 + */ + AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class inputType) { + return new AggregationDefinition(aggregation, inputType); + } + + /** + * Create a new {@link AggregationDefinition} for the given {@link Aggregation}. + * + * @param aggregation must not be {@literal null}. + * @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}. + * @return new instance of {@link AggregationDefinition}. + * @since 3.2 + */ + AggregationDefinition createAggregation(Aggregation aggregation, + @Nullable AggregationOperationContext aggregationOperationContext) { + return new AggregationDefinition(aggregation, aggregationOperationContext); + } + + /** + * {@link InsertContext} encapsulates common tasks required to interact with {@link Document} to be inserted. + * + * @since 3.4.3 + */ + class InsertContext { + + private final MappedDocument source; + + private InsertContext(MappedDocument source) { + this.source = source; + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param type must not be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + * @see #prepareId(MongoPersistentEntity) + */ + MappedDocument prepareId(Class type) { + return prepareId(mappingContext.getPersistentEntity(type)); + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param entity can be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + */ + MappedDocument prepareId(@Nullable MongoPersistentEntity entity) { + + if (entity == null || source.hasId()) { + return source; + } + + MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty != null + && (idProperty.hasExplicitWriteTarget() || idProperty.isAnnotationPresent(MongoId.class))) { + if (!ClassUtils.isAssignable(ObjectId.class, idProperty.getFieldType())) { + source.updateId(queryMapper.convertId(new ObjectId(), idProperty.getFieldType())); + } + } + return source; + } + } + + /** + * {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document + * representation, mapping field names, as well as determining and applying {@link Collation collations}. + * + * @author Christoph Strobl + */ + class QueryContext { + + private final Query query; + + /** + * Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a + * plain {@link Document}. + * + * @param query can be {@literal null}. + */ + private QueryContext(@Nullable Query query) { + this.query = query != null ? query : new Query(); + } + + /** + * @return never {@literal null}. + */ + Query getQuery() { + return query; + } + + /** + * Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}. + * + * @return + */ + Document getQueryObject() { + return query.getQueryObject(); + } + + /** + * Get the already mapped MongoDB query representation. + * + * @param domainType can be {@literal null}. + * @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the + * given{@literal domainType} + * @param + * @return never {@literal null}. + */ + Document getMappedQuery(@Nullable Class domainType, + Function, MongoPersistentEntity> entityLookup) { + return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType)); + } + + /** + * Get the already mapped MongoDB query representation. + * + * @param entity the Entity to map field names to. Can be {@literal null}. + * @param + * @return never {@literal null}. + */ + Document getMappedQuery(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedObject(getQueryObject(), entity); + } + + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { + + Document fields = evaluateFields(entity); + + if (entity == null) { + return fields; + } + + Document mappedFields; + if (!fields.isEmpty()) { + mappedFields = queryMapper.getMappedFields(fields, entity); + } else { + mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields); + mappedFields = queryMapper.addMetaAttributes(mappedFields, entity); + } + + if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName()) + && !query.getQueryObject().containsKey("$text")) { + mappedFields.remove(entity.getTextScoreProperty().getFieldName()); + } + + if (mappedFields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + return mappedFields; + } + + private Document evaluateFields(@Nullable MongoPersistentEntity entity) { + + Document fields = query.getFieldsObject(); + + if (fields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + Document evaluated = new Document(); + + for (Entry entry : fields.entrySet()) { + + if (entry.getValue() instanceof MongoExpression mongoExpression) { + + AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT + : new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper); + + evaluated.put(entry.getKey(), AggregationExpression.from(mongoExpression).toDocument(ctx)); + } else { + evaluated.put(entry.getKey(), entry.getValue()); + } + } + + return evaluated; + } + + /** + * Get the already mapped {@link Query#getSortObject() sort} option. + * + * @param entity the Entity to map field names to. Can be {@literal null}. + * @return never {@literal null}. + */ + Document getMappedSort(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedSort(query.getSortObject(), entity); + } + + /** + * Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to + * the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation() + * collation}. + * + * @param domainType can be {@literal null}. + * @param consumer must not be {@literal null}. + */ + void applyCollation(@Nullable Class domainType, Consumer consumer) { + getCollation(domainType).ifPresent(consumer); + } + + /** + * Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to + * the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation() + * collation}. + * + * @param domainType can be {@literal null}. + * @return never {@literal null}. + */ + Optional getCollation(@Nullable Class domainType) { + + return entityOperations.forType(domainType).getCollation(query) // + .map(Collation::toMongoCollation); + } + + /** + * Get the {@link HintFunction} reading the actual hint form the {@link Query}. + * + * @return new instance of {@link HintFunction}. + * @since 4.2 + */ + HintFunction getHintFunction() { + return HintFunction.from(query.getHint()); + } + + /** + * Read and apply the hint from the {@link Query}. + * + * @since 4.2 + */ + void applyHint(Function stringConsumer, Function bsonConsumer) { + getHintFunction().ifPresent(codecRegistryProvider, stringConsumer, bsonConsumer); + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries. + * + * @author Christoph Strobl + */ + class DistinctQueryContext extends QueryContext { + + private final String fieldName; + + /** + * Create a new {@link DistinctQueryContext} instance. + * + * @param query can be {@literal null}. + * @param fieldName must not be {@literal null}. + */ + private DistinctQueryContext(@Nullable Object query, String fieldName) { + + super(query instanceof Document document ? new BasicQuery(document) : (Query) query); + this.fieldName = fieldName; + } + + @Override + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { + return getMappedFields(entity); + } + + Document getMappedFields(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedFields(new Document(fieldName, 1), entity); + } + + /** + * Get the mapped field name to project to. + * + * @param entity can be {@literal null}. + * @return never {@literal null}. + */ + String getMappedFieldName(@Nullable MongoPersistentEntity entity) { + return getMappedFields(entity).keySet().iterator().next(); + } + + /** + * Get the MongoDB native representation of the given {@literal type}. + * + * @param type must not be {@literal null}. + * @param + * @return never {@literal null}. + */ + @SuppressWarnings("unchecked") + Class getDriverCompatibleClass(Class type) { + + return codecRegistryProvider.getCodecFor(type) // + .map(Codec::getEncoderClass) // + .orElse((Class) BsonValue.class); + } + + /** + * Get the most specific read target type based on the user {@literal requestedTargetType} an the property type + * based on meta information extracted from the {@literal domainType}. + * + * @param requestedTargetType must not be {@literal null}. + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + Class getMostSpecificConversionTargetType(Class requestedTargetType, Class domainType) { + + Class conversionTargetType = requestedTargetType; + try { + + Class propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType(); + + // use the more specific type but favor UserType over property one + if (ClassUtils.isAssignable(requestedTargetType, propertyType)) { + conversionTargetType = propertyType; + } + } catch (PropertyReferenceException e) { + // just don't care about it as we default to Object.class anyway. + } + + return conversionTargetType; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries. + * + * @author Christoph Strobl + */ + class CountContext extends QueryContext { + + /** + * Creates a new {@link CountContext} instance. + * + * @param query can be {@literal null}. + */ + CountContext(@Nullable Query query) { + super(query); + } + + /** + * Get the {@link CountOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + CountOptions getCountOptions(@Nullable Class domainType) { + return getCountOptions(domainType, null); + } + + /** + * Get the {@link CountOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + CountOptions getCountOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + CountOptions options = new CountOptions(); + Query query = getQuery(); + + applyCollation(domainType, options::collation); + + if (query.getLimit() > 0) { + options.limit(query.getLimit()); + } + + if (query.getSkip() > 0) { + options.skip((int) query.getSkip()); + } + + Meta meta = query.getMeta(); + if (meta.hasValues()) { + + if (meta.hasMaxTime()) { + options.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.hasComment()) { + options.comment(meta.getComment()); + } + } + + HintFunction hintFunction = HintFunction.from(query.getHint()); + + if (hintFunction.isPresent()) { + options = hintFunction.apply(codecRegistryProvider, options::hintString, options::hint); + } + + if (callback != null) { + callback.accept(options); + } + + return options; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries. + * + * @author Christoph Strobl + */ + class DeleteContext extends QueryContext { + + private final boolean multi; + + /** + * Crate a new {@link DeleteContext} instance. + * + * @param query can be {@literal null}. + * @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one. + */ + DeleteContext(@Nullable Query query, boolean multi) { + + super(query); + this.multi = multi; + } + + /** + * Get the {@link DeleteOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + DeleteOptions getDeleteOptions(@Nullable Class domainType) { + return getDeleteOptions(domainType, null); + } + + /** + * Get the {@link DeleteOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + DeleteOptions getDeleteOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + DeleteOptions options = new DeleteOptions(); + applyCollation(domainType, options::collation); + + if (callback != null) { + callback.accept(options); + } + + return options; + } + + /** + * @return {@literal true} if all matching documents shall be deleted. + */ + boolean isMulti() { + return multi; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}. + */ + class UpdateContext extends QueryContext { + + private final boolean multi; + private final boolean upsert; + private final @Nullable UpdateDefinition update; + private final @Nullable MappedDocument mappedDocument; + + /** + * Create a new {@link UpdateContext} instance. + * + * @param update must not be {@literal null}. + * @param query must not be {@literal null}. + * @param multi use {@literal true} to update all matching documents. + * @param upsert use {@literal true} to insert a new document if none match. + */ + UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) { + this(update, new BasicQuery(query), multi, upsert); + } + + /** + * Create a new {@link UpdateContext} instance. + * + * @param update must not be {@literal null}. + * @param query can be {@literal null}. + * @param multi use {@literal true} to update all matching documents. + * @param upsert use {@literal true} to insert a new document if none match. + */ + UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) { + + super(query); + + this.multi = multi; + this.upsert = upsert; + this.update = update; + this.mappedDocument = null; + } + + UpdateContext(MappedDocument update, boolean upsert) { + this(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())), update, upsert); + } + + UpdateContext(Query query, MappedDocument update, boolean upsert) { + + super(query); + this.multi = false; + this.upsert = upsert; + this.mappedDocument = update; + this.update = null; + } + + /** + * Get the {@link UpdateOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @return never {@literal null}. + */ + UpdateOptions getUpdateOptions(@Nullable Class domainType) { + return getUpdateOptions(domainType, null); + } + + /** + * Get the {@link UpdateOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param query can be {@literal null} + * @return never {@literal null}. + */ + UpdateOptions getUpdateOptions(@Nullable Class domainType, @Nullable Query query) { + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update != null && update.hasArrayFilters()) { + options + .arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList())); + } + + if (query != null && query.isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } + + HintFunction.from(getQuery().getHint()).ifPresent(codecRegistryProvider, options::hintString, options::hint); + applyCollation(domainType, options::collation); + + return options; + } + + /** + * Get the {@link ReplaceOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + ReplaceOptions getReplaceOptions(@Nullable Class domainType) { + return getReplaceOptions(domainType, null); + } + + /** + * Get the {@link ReplaceOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + ReplaceOptions getReplaceOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + UpdateOptions updateOptions = getUpdateOptions(domainType); + + ReplaceOptions options = new ReplaceOptions(); + options.collation(updateOptions.getCollation()); + options.upsert(updateOptions.isUpsert()); + applyHint(options::hintString, options::hint); + if (!isMulti() && getQuery().isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } + + if (callback != null) { + callback.accept(options); + } + + return options; + } + + @Override + Document getMappedQuery(@Nullable MongoPersistentEntity domainType) { + return applyIsolation(super.getMappedQuery(domainType)); + } + + /** + * A replacement query that is derived from the already {@link MappedDocument}. + * + * @return + */ + Document getReplacementQuery() { + return applyIsolation(getQueryObject()); + } + + private Document applyIsolation(Document mappedQuery) { + if (multi && update != null && update.isIsolated() && !mappedQuery.containsKey("$isolated")) { + mappedQuery = new Document(mappedQuery); + mappedQuery.put("$isolated", 1); + } + return mappedQuery; + } + + Document applyShardKey(MongoPersistentEntity domainType, Document filter, @Nullable Document existing) { + + Document shardKeySource = existing != null ? existing + : mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType); + + Document filterWithShardKey = new Document(filter); + getMappedShardKeyFields(domainType) + .forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue((Bson) shardKeySource, key))); + + return filterWithShardKey; + } + + boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity domainType) { + + return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType) + && !filter.keySet().containsAll(getMappedShardKeyFields(domainType)); + } + + /** + * @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities + * {@literal id} property. + * @since 3.0 + */ + private boolean shardedById(MongoPersistentEntity domainType) { + + ShardKey shardKey = domainType.getShardKey(); + if (shardKey.size() != 1) { + return false; + } + + String key = shardKey.getPropertyNames().iterator().next(); + if (FieldName.ID.name().equals(key)) { + return true; + } + + MongoPersistentProperty idProperty = domainType.getIdProperty(); + return idProperty != null && idProperty.getName().equals(key); + } + + Set getMappedShardKeyFields(MongoPersistentEntity entity) { + return getMappedShardKey(entity).keySet(); + } + + Document getMappedShardKey(MongoPersistentEntity entity) { + return mappedShardKey.computeIfAbsent(entity.getType(), + key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity)); + } + + /** + * Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + List getUpdatePipeline(@Nullable Class domainType) { + + Class type = domainType != null ? domainType : Object.class; + + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, + queryMapper); + return aggregationUtil.createPipeline((AggregationUpdate) update, context); + } + + /** + * Get the already mapped update {@link Document}. + * + * @param entity + * @return + */ + Document getMappedUpdate(@Nullable MongoPersistentEntity entity) { + + if (update != null) { + return update instanceof MappedUpdate ? update.getUpdateObject() + : updateMapper.getMappedObject(update.getUpdateObject(), entity); + } + return mappedDocument.getDocument(); + } + + /** + * Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not + * already done in the actual {@link UpdateDefinition} + * + * @param persistentEntity can be {@literal null}. + */ + void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity) { + + if (persistentEntity != null && persistentEntity.hasVersionProperty()) { + + String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); + if (update != null && !update.modifies(versionFieldName)) { + update.inc(versionFieldName); + } + } + } + + /** + * @return {@literal true} if the update holds an aggregation pipeline. + */ + boolean isAggregationUpdate() { + return update instanceof AggregationUpdate; + } + + /** + * @return {@literal true} if all matching documents should be updated. + */ + boolean isMulti() { + return multi; + } + } + + /** + * A value object that encapsulates common tasks required when running {@literal aggregations}. + * + * @since 3.2 + */ + class AggregationDefinition { + + private final Aggregation aggregation; + private final Lazy aggregationOperationContext; + private final Lazy> pipeline; + private final @Nullable Class inputType; + + /** + * Creates new instance of {@link AggregationDefinition} extracting the input type from either the + * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or + * the given {@literal aggregationOperationContext} if present.
          + * Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and + * the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
          + * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse. + * + * @param aggregation the source aggregation. + * @param aggregationOperationContext can be {@literal null}. + */ + AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) { + + this.aggregation = aggregation; + + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext typeBasedAggregationOperationContext) { + this.inputType = typeBasedAggregationOperationContext.getType(); + } else { + this.inputType = null; + } + + this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext + : aggregationUtil.createAggregationContext(aggregation, getInputType())); + this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext())); + } + + /** + * Creates new instance of {@link AggregationDefinition} extracting the input type from either the + * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or + * the given {@literal aggregationOperationContext} if present.
          + * Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired + * {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
          + * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse. + * + * @param aggregation the source aggregation. + * @param inputType can be {@literal null}. + */ + AggregationDefinition(Aggregation aggregation, @Nullable Class inputType) { + + this.aggregation = aggregation; + + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else { + this.inputType = inputType; + } + + this.aggregationOperationContext = Lazy + .of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType())); + this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext())); + } + + /** + * Obtain the already mapped pipeline. + * + * @return never {@literal null}. + */ + List getAggregationPipeline() { + return pipeline.get(); + } + + /** + * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}. + * @see AggregationPipeline#isOutOrMerge() + */ + boolean isOutOrMerge() { + return aggregation.getPipeline().isOutOrMerge(); + } + + /** + * Obtain the {@link AggregationOperationContext} used for mapping the pipeline. + * + * @return never {@literal null}. + */ + AggregationOperationContext getAggregationOperationContext() { + return aggregationOperationContext.get(); + } + + /** + * @return the input type to map the pipeline against. Can be {@literal null}. + */ + @Nullable + Class getInputType() { + return inputType; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java index 2adb19bf85..54129e6b5d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java index bdcc96949a..954fd61716 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,6 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Flux; import org.springframework.data.mongodb.core.aggregation.Aggregation; @@ -45,61 +41,52 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio */ ReactiveAggregationOperationSupport(ReactiveMongoTemplate template) { - Assert.notNull(template, "Template must not be null!"); + Assert.notNull(template, "Template must not be null"); this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ReactiveAggregation aggregateAndReturn(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveAggregationSupport<>(template, domainType, null, null); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ReactiveAggregationSupport implements AggregationOperationWithAggregation, ReactiveAggregation, TerminatingAggregationOperation { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Aggregation aggregation; - String collection; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Aggregation aggregation; + private final String collection; + + ReactiveAggregationSupport(ReactiveMongoTemplate template, Class domainType, Aggregation aggregation, + String collection) { + + this.template = template; + this.domainType = domainType; + this.aggregation = aggregation; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithCollection#inCollection(java.lang.String) - */ @Override public AggregationOperationWithAggregation inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithAggregation#by(org.springframework.data.mongodb.core.Aggregation) - */ @Override public TerminatingAggregationOperation by(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.TerminatingAggregationOperation#all() - */ @Override public Flux all() { return template.aggregate(aggregation, getCollectionName(aggregation), domainType); @@ -111,16 +98,14 @@ private String getCollectionName(Aggregation aggregation) { return collection; } - if (aggregation instanceof TypedAggregation) { - - TypedAggregation typedAggregation = (TypedAggregation) aggregation; + if (aggregation instanceof TypedAggregation typedAggregation) { if (typedAggregation.getInputType() != null) { - return template.determineCollectionName(typedAggregation.getInputType()); + return template.getCollectionName(typedAggregation.getInputType()); } } - return template.determineCollectionName(domainType); + return template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java new file mode 100644 index 0000000000..7f88b63f28 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java @@ -0,0 +1,144 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; + +import com.mongodb.bulk.BulkWriteResult; + +/** + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling + * {@link #execute()}. + * + *
          + * ReactiveMongoOperations ops = …;
          + *
          + * ops.bulkOps(BulkMode.UNORDERED, Person.class)
          + * 				.insert(newPerson)
          + * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
          + * 				.execute();
          + * 
          + *

          + * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface ReactiveBulkOperations { + + /** + * Add a single insert to the bulk operation. + * + * @param documents the document to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(Object documents); + + /** + * Add a list of inserts to the bulk operation. + * + * @param documents List of documents to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(List documents); + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateOne(Query query, UpdateDefinition update); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update); + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations upsert(Query query, UpdateDefinition update); + + /** + * Add a single remove operation to the bulk operation. + * + * @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(Query remove); + + /** + * Add a list of remove operations to the bulk operation. + * + * @param removes the remove operations to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(List removes); + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. The {@link Query} may define a + * {@link Query#with(Sort) sort order} to influence which document to replace when potentially matching + * multiple candidates. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + default ReactiveBulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + + /** + * Execute all bulk operations using the default write concern. + * + * @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc. + */ + Mono execute(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java new file mode 100644 index 0000000000..4f936e0ffa --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java @@ -0,0 +1,200 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.function.Consumer; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB + * Change Stream operations in a fluent API style.
          + * The starting {@literal domainType} is used for mapping a potentially given + * {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the + * originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}. + * However, it is possible to define an different {@literal returnType} via {@code as}.
          + * The collection to operate on is optional in which case call collection with the actual database are watched, use + * {@literal watchCollection} to define a fixed collection. + * + *

          + *     
          + *         changeStream(Jedi.class)
          + *             .watchCollection("star-wars")
          + *             .filter(where("operationType").is("insert"))
          + *             .resumeAt(Instant.now())
          + *             .listen();
          + *     
          + * 
          + * + * @author Christoph Strobl + * @since 2.2 + */ +public interface ReactiveChangeStreamOperation { + + /** + * Start creating a change stream operation for the given {@literal domainType} watching all collections within the + * database.
          + * Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or + * {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}. + * + * @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements. + * @return new instance of {@link ReactiveChangeStream}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveChangeStream changeStream(Class domainType); + + /** + * Compose change stream execution by calling one of the terminating methods. + */ + interface TerminatingChangeStream { + + /** + * Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription} + * is {@link org.reactivestreams.Subscription#cancel() canceled}. + *
          + * However, the stream may become dead, or invalid, if all watched collections, databases are dropped. + */ + Flux> listen(); + } + + /** + * Collection override (optional). + */ + interface ChangeStreamWithCollection { + + /** + * Explicitly set the name of the collection to watch.
          + * Skip this step to watch all collections within the database. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if {@code collection} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection watchCollection(String collection); + + /** + * Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.
          + * Skip this step to watch all collections within the database. + * + * @param entityClass must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if {@code entityClass} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection watchCollection(Class entityClass); + } + + /** + * Provide a filter for limiting results (optional). + */ + interface ChangeStreamWithFilterAndProjection extends ResumingChangeStream, TerminatingChangeStream { + + /** + * Use an {@link Aggregation} to filter matching events. + * + * @param by must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection filter(Aggregation by); + + /** + * Use a {@link CriteriaDefinition critera} to filter matching events via an + * {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}. + * + * @param by must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by); + + /** + * Define the target type fields should be mapped to. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + ChangeStreamWithFilterAndProjection as(Class resultType); + } + + /** + * Resume a change stream. (optional). + */ + interface ResumingChangeStream extends TerminatingChangeStream { + + /** + * Resume the change stream at a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#resumeAt(Instant) + * @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp) + * @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}. + */ + TerminatingChangeStream resumeAt(Object token); + + /** + * Resume the change stream after a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue) + * @see ChangeStreamOptionsBuilder#resumeToken(BsonValue) + * @throws IllegalArgumentException if the given beacon not a {@link BsonValue}. + */ + TerminatingChangeStream resumeAfter(Object token); + + /** + * Start the change stream after a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue) + * @throws IllegalArgumentException if the given beacon not a {@link BsonValue}. + */ + TerminatingChangeStream startAfter(Object token); + } + + /** + * Provide some options. + */ + interface ChangeStreamWithOptions { + + /** + * Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously + * defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the + * builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}. + * + * @param optionsConsumer never {@literal null}. + * @return new instance of {@link ReactiveChangeStream}. + */ + ReactiveChangeStream withOptions(Consumer optionsConsumer); + } + + /** + * {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way. + */ + interface ReactiveChangeStream extends ChangeStreamWithOptions, ChangeStreamWithCollection, + TerminatingChangeStream, ResumingChangeStream, ChangeStreamWithFilterAndProjection {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java new file mode 100644 index 0000000000..afeb6c5e0e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java @@ -0,0 +1,187 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.bson.Document; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.MatchOperation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * @author Christoph Strobl + * @since 2.2 + */ +class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation { + + private final ReactiveMongoTemplate template; + + /** + * @param template must not be {@literal null}. + */ + ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveChangeStream changeStream(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null); + } + + static class ReactiveChangeStreamSupport + implements ReactiveChangeStream, ChangeStreamWithFilterAndProjection { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final @Nullable ChangeStreamOptions options; + + private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, @Nullable ChangeStreamOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.options = options; + } + + @Override + public ChangeStreamWithFilterAndProjection watchCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options); + } + + @Override + public ChangeStreamWithFilterAndProjection watchCollection(Class entityClass) { + + Assert.notNull(entityClass, "Collection type not be null"); + + return watchCollection(template.getCollectionName(entityClass)); + } + + @Override + public TerminatingChangeStream resumeAt(Object token) { + + return withOptions(builder -> { + + if (token instanceof Instant instant) { + builder.resumeAt(instant); + } else if (token instanceof BsonTimestamp bsonTimestamp) { + builder.resumeAt(bsonTimestamp); + } + }); + } + + @Override + public TerminatingChangeStream resumeAfter(Object token) { + + Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue"); + + return withOptions(builder -> builder.resumeAfter((BsonValue) token)); + } + + @Override + public TerminatingChangeStream startAfter(Object token) { + + Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue"); + + return withOptions(builder -> builder.startAfter((BsonValue) token)); + } + + @Override + public ReactiveChangeStreamSupport withOptions(Consumer optionsConsumer) { + + ChangeStreamOptionsBuilder builder = initOptionsBuilder(); + optionsConsumer.accept(builder); + + return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build()); + } + + @Override + public ChangeStreamWithFilterAndProjection as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options); + } + + @Override + public ChangeStreamWithFilterAndProjection filter(Aggregation filter) { + return withOptions(builder -> builder.filter(filter)); + } + + @Override + public ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by) { + + MatchOperation $match = Aggregation.match(by); + Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match) + : Aggregation.newAggregation($match); + return filter(aggregation); + } + + @Override + public Flux> listen() { + return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType); + } + + private ChangeStreamOptionsBuilder initOptionsBuilder() { + + ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder(); + if (options == null) { + return builder; + } + + options.getFilter().ifPresent(it -> { + if (it instanceof Aggregation aggregation) { + builder.filter(aggregation); + } else { + builder.filter(((List) it).toArray(new Document[0])); + } + }); + options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup); + options.getFullDocumentBeforeChangeLookup().ifPresent(builder::fullDocumentBeforeChangeLookup); + options.getCollation().ifPresent(builder::collation); + + if (options.isResumeAfter()) { + options.getResumeToken().ifPresent(builder::resumeAfter); + options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter); + } else if (options.isStartAfter()) { + options.getResumeToken().ifPresent(builder::startAfter); + } else { + options.getResumeTimestamp().ifPresent(builder::resumeAt); + options.getResumeBsonTimestamp().ifPresent(builder::resumeAt); + } + + return builder; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java index 86f22ac8ae..dda6bf1b96 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -30,5 +30,4 @@ public interface ReactiveCollectionCallback { Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java index bb1c946383..470fd05ef7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java index 8dbc357d3b..cba827ffed 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +18,11 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResult; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; @@ -38,13 +42,14 @@ * query(Human.class) * .inCollection("star-wars") * .as(Jedi.class) - * .matching(query(where("firstname").is("luke"))) + * .matching(where("firstname").is("luke")) * .all(); * * * * @author Mark Paluch * @author Christoph Strobl + * @author Juergen Zimmermann * @since 2.0 */ public interface ReactiveFindOperation { @@ -86,7 +91,42 @@ interface TerminatingFind { Flux all(); /** - * Get the number of matching elements. + * Return a scroll of elements either starting or resuming at {@link ScrollPosition}. + *

          + * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a scroll of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Mono> scroll(ScrollPosition scrollPosition); + + /** + * Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will + * not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link org.reactivestreams.Subscription#cancel() canceled}.
          + * However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the + * document at the "end" of the collection and then the application deletes that document.
          + * A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the + * streams will linger and exhaust resources.
          + * NOTE: Requires a capped collection. + * + * @return the {@link Flux} emitting converted objects. + * @since 2.1 + */ + Flux tail(); + + /** + * Get the number of matching elements.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead. * * @return {@link Mono} emitting total number of matching elements. Never {@literal null}. */ @@ -127,6 +167,18 @@ interface FindWithQuery extends TerminatingFind { */ TerminatingFind matching(Query query); + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingFind matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + /** * Set the filter query for the geoNear execution. * @@ -242,9 +294,21 @@ interface DistinctWithQuery extends DistinctWithProjection { * * @param query must not be {@literal null}. * @return new instance of {@link TerminatingDistinct}. - * @throws IllegalArgumentException if resultType is {@literal null}. + * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingDistinct matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingDistinct matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java index 90aa9f2de2..d1aec8af36 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import org.bson.Document; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.Window; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; @@ -31,8 +30,6 @@ import org.springframework.util.Assert; import org.springframework.util.StringUtils; -import com.mongodb.reactivestreams.client.FindPublisher; - /** * Implementation of {@link ReactiveFindOperation}. * @@ -40,21 +37,20 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveFindOperationSupport implements ReactiveFindOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull ReactiveMongoTemplate template; + private final ReactiveMongoTemplate template; + + ReactiveFindOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation#query(java.lang.Class) - */ @Override public ReactiveFind query(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveFindSupport<>(template, domainType, domainType, null, ALL_QUERY); } @@ -65,85 +61,63 @@ public ReactiveFind query(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ReactiveFindSupport implements ReactiveFind, FindWithCollection, FindWithProjection, FindWithQuery { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Class returnType; - String collection; - Query query; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final String collection; + private final Query query; + + ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, String collection, + Query query) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithCollection#inCollection(java.lang.String) - */ @Override public FindWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection#as(java.lang.Class) - */ @Override public FindWithQuery as(Class returnType) { - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(returnType, "ReturnType must not be null"); return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingFind matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#first() - */ @Override public Mono first() { FindPublisherPreparer preparer = getCursorPreparer(query); - Flux result = doFind(new FindPublisherPreparer() { - @Override - public FindPublisher prepare(FindPublisher publisher) { - return preparer.prepare(publisher).limit(1); - } - }); + Flux result = doFind(publisher -> preparer.prepare(publisher).limit(1)); return result.next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#one() - */ @Override public Mono one() { FindPublisherPreparer preparer = getCursorPreparer(query); - Flux result = doFind(new FindPublisherPreparer() { - @Override - public FindPublisher prepare(FindPublisher publisher) { - return preparer.prepare(publisher).limit(2); - } - }); + Flux result = doFind(publisher -> preparer.prepare(publisher).limit(2)); return result.collectList().flatMap(it -> { @@ -153,57 +127,47 @@ public FindPublisher prepare(FindPublisher publisher) { if (it.size() > 1) { return Mono.error( - new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1)); + new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1)); } return Mono.just(it.get(0)); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#all() - */ @Override public Flux all() { return doFind(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery) - */ + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); + } + + @Override + public Flux tail() { + return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType)); + } + @Override public TerminatingFindNear near(NearQuery nearQuery) { return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#count() - */ @Override public Mono count() { return template.count(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#exists() - */ @Override public Mono exists() { return template.exists(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindDistinct#distinct(java.lang.String) - */ @Override public TerminatingDistinct distinct(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new DistinctOperationSupport<>(this, field); } @@ -213,8 +177,8 @@ private Flux doFind(@Nullable FindPublisherPreparer preparer) { Document queryObject = query.getQueryObject(); Document fieldsObject = query.getFieldsObject(); - return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType, - preparer != null ? preparer : getCursorPreparer(query)); + return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject, + fieldsObject, domainType, returnType, preparer != null ? preparer : getCursorPreparer(query)); } @SuppressWarnings("unchecked") @@ -229,7 +193,7 @@ private FindPublisherPreparer getCursorPreparer(Query query) { } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } private String asString() { @@ -251,35 +215,23 @@ public DistinctOperationSupport(ReactiveFindSupport delegate, String field) { this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithProjection#as(java.lang.Class) - */ @Override public TerminatingDistinct as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.as(resultType), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override @SuppressWarnings("unchecked") public TerminatingDistinct matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core..ReactiveFindOperation.TerminatingDistinct#all() - */ @Override public Flux all() { return delegate.doFindDistinct(field); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java index 85ebb5973a..30d61771df 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,8 @@ * Stripped down interface providing access to a fluent API that specifies a basic set of reactive MongoDB operations. * * @author Mark Paluch + * @author Christoph Strobl * @since 2.0 */ public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation, - ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation {} + ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java index 946a82ae02..ff3b690639 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java index e6daaedbe0..06d3c6eae7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,6 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -34,69 +30,61 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveInsertOperationSupport implements ReactiveInsertOperation { - private final @NonNull ReactiveMongoTemplate template; + private final ReactiveMongoTemplate template; + + ReactiveInsertOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation#insert(java.lang.Class) - */ @Override public ReactiveInsert insert(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveInsertSupport<>(template, domainType, null); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ReactiveInsertSupport implements ReactiveInsert { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - String collection; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final String collection; + + ReactiveInsertSupport(ReactiveMongoTemplate template, Class domainType, String collection) { + + this.template = template; + this.domainType = domainType; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.TerminatingInsert#one(java.lang.Object) - */ @Override public Mono one(T object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return template.insert(object, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.TerminatingInsert#all(java.util.Collection) - */ @Override public Flux all(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.insert(objects, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.InsertWithCollection#inCollection(java.lang.String) - */ @Override public ReactiveInsert inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty."); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveInsertSupport<>(template, domainType, collection); } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java new file mode 100644 index 0000000000..798b1ca7dd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java @@ -0,0 +1,212 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Query; + +/** + * {@link ReactiveMapReduceOperation} allows creation and execution of MongoDB mapReduce operations in a fluent API + * style. The starting {@literal domainType} is used for mapping an optional {@link Query} provided via {@code matching} + * into the MongoDB specific representation. By default, the originating {@literal domainType} is also used for mapping + * back the results from the {@link org.bson.Document}. However, it is possible to define an different + * {@literal returnType} via {@code as} to mapping the result.
          + * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there + * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the + * collection name for the execution. + * + *
          + *     
          + *         mapReduce(Human.class)
          + *             .map("function() { emit(this.id, this.firstname) }")
          + *             .reduce("function(id, name) { return sum(id, name); }")
          + *             .inCollection("star-wars")
          + *             .as(Jedi.class)
          + *             .matching(query(where("lastname").is("skywalker")))
          + *             .all();
          + *     
          + * 
          + * + * @author Christoph Strobl + * @since 2.1 + */ +public interface ReactiveMapReduceOperation { + + /** + * Start creating a mapReduce operation for the given {@literal domainType}. + * + * @param domainType must not be {@literal null}. + * @return new instance of {@link ExecutableFind}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + MapReduceWithMapFunction mapReduce(Class domainType); + + /** + * Trigger mapReduce execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface TerminatingMapReduce { + + /** + * Get the {@link Flux} emitting mapReduce results. + * + * @return a {@link Flux} emitting the already mapped operation results. + */ + Flux all(); + } + + /** + * Provide the Javascript {@code function()} used to map matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithMapFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param mapFunction must not be {@literal null} nor empty. + * @return new instance of {@link MapReduceWithReduceFunction}. + * @throws IllegalArgumentException if {@literal mapFunction} is {@literal null} or empty. + */ + MapReduceWithReduceFunction map(String mapFunction); + + } + + /** + * Provide the Javascript {@code function()} used to reduce matching documents. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithReduceFunction { + + /** + * Set the Javascript map {@code function()}. + * + * @param reduceFunction must not be {@literal null} nor empty. + * @return new instance of {@link ReactiveMapReduce}. + * @throws IllegalArgumentException if {@literal reduceFunction} is {@literal null} or empty. + */ + ReactiveMapReduce reduce(String reduceFunction); + + } + + /** + * Collection override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithCollection extends MapReduceWithQuery { + + /** + * Explicitly set the name of the collection to perform the mapReduce operation on.
          + * Skip this step to use the default collection derived from the domain type. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link MapReduceWithProjection}. + * @throws IllegalArgumentException if collection is {@literal null}. + */ + MapReduceWithProjection inCollection(String collection); + } + + /** + * Input document filter query (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithQuery extends TerminatingMapReduce { + + /** + * Set the filter query to be used. + * + * @param query must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + */ + TerminatingMapReduce matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingMapReduce matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithProjection extends MapReduceWithQuery { + + /** + * Define the target type fields should be mapped to.
          + * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + MapReduceWithQuery as(Class resultType); + } + + /** + * Additional mapReduce options (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface MapReduceWithOptions { + + /** + * Set additional options to apply to the mapReduce operation. + * + * @param options must not be {@literal null}. + * @return new instance of {@link ReactiveMapReduce}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + ReactiveMapReduce with(MapReduceOptions options); + } + + /** + * {@link ReactiveMapReduce} provides methods for constructing reactive mapReduce operations in a fluent way. + * + * @author Christoph Strobl + * @since 2.1 + */ + interface ReactiveMapReduce extends MapReduceWithMapFunction, MapReduceWithReduceFunction, + MapReduceWithCollection, MapReduceWithProjection, MapReduceWithOptions { + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java new file mode 100644 index 0000000000..4f0d395950 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java @@ -0,0 +1,178 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Implementation of {@link ReactiveMapReduceOperation}. + * + * @author Christoph Strobl + * @since 2.1 + */ +class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation { + + private static final Query ALL_QUERY = new Query(); + + private final ReactiveMongoTemplate template; + + ReactiveMapReduceOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class) + */ + @Override + public ReactiveMapReduceSupport mapReduce(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + + return new ReactiveMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + static class ReactiveMapReduceSupport + implements ReactiveMapReduce, MapReduceWithOptions, MapReduceWithCollection, MapReduceWithProjection, + MapReduceWithQuery, MapReduceWithReduceFunction, MapReduceWithMapFunction { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final Query query; + private final @Nullable String mapFunction; + private final @Nullable String reduceFunction; + private final @Nullable MapReduceOptions options; + + ReactiveMapReduceSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, Query query, @Nullable String mapFunction, @Nullable String reduceFunction, + @Nullable MapReduceOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.query = query; + this.mapFunction = mapFunction; + this.reduceFunction = reduceFunction; + this.options = options; + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all() + */ + @Override + public Flux all() { + + return template.mapReduce(query, domainType, getCollectionName(), returnType, mapFunction, reduceFunction, + options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithCollection#inCollection(java.lang.String) + */ + @Override + public MapReduceWithProjection inCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithQuery#query(org.springframework.data.mongodb.core.query.Query) + */ + @Override + public TerminatingMapReduce matching(Query query) { + + Assert.notNull(query, "Query must not be null"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithProjection#as(java.lang.Class) + */ + @Override + public MapReduceWithQuery as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithOptions#with(org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) + */ + @Override + public ReactiveMapReduce with(MapReduceOptions options) { + + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithMapFunction#map(java.lang.String) + */ + @Override + public MapReduceWithReduceFunction map(String mapFunction) { + + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + /* + * (non-Javascript) + * @see in org.springframework.data.mongodb.core.ReactiveMapReduceOperation.MapReduceWithReduceFunction#reduce(java.lang.String) + */ + @Override + public ReactiveMapReduce reduce(String reduceFunction) { + + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); + + return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, + reduceFunction, options); + } + + private String getCollectionName() { + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java index 4d41a5fcb0..89d1cd78ac 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,7 +22,7 @@ import org.springframework.lang.Nullable; import org.springframework.util.StringUtils; -import com.mongodb.async.client.MongoClientSettings; +import com.mongodb.MongoClientSettings; import com.mongodb.reactivestreams.client.MongoClient; import com.mongodb.reactivestreams.client.MongoClients; @@ -36,13 +36,11 @@ public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private @Nullable String connectionString; private @Nullable String host; private @Nullable Integer port; private @Nullable MongoClientSettings mongoClientSettings; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; /** * Configures the host to connect to. @@ -86,7 +84,13 @@ public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientSett * @param exceptionTranslator */ public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; + } + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return exceptionTranslator.translateExceptionIfPossible(ex); } @Override @@ -115,7 +119,7 @@ protected MongoClient createInstance() throws Exception { } throw new IllegalStateException( - "Cannot create MongoClients. One of the following is required: mongoClientSettings, connectionString or host/port"); + "Cannot create MongoClients; One of the following is required: mongoClientSettings, connectionString or host/port"); } @Override @@ -123,8 +127,4 @@ protected void destroyInstance(@Nullable MongoClient instance) throws Exception instance.close(); } - @Override - public DataAccessException translateExceptionIfPossible(RuntimeException ex) { - return exceptionTranslator.translateExceptionIfPossible(ex); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java deleted file mode 100644 index d109fed02d..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright 2016-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.util.ArrayList; -import java.util.List; - -import org.bson.codecs.configuration.CodecRegistry; -import org.springframework.beans.factory.config.AbstractFactoryBean; -import org.springframework.util.Assert; - -import com.mongodb.MongoCredential; -import com.mongodb.ReadConcern; -import com.mongodb.ReadPreference; -import com.mongodb.WriteConcern; -import com.mongodb.async.client.MongoClientSettings; -import com.mongodb.connection.ClusterSettings; -import com.mongodb.connection.ConnectionPoolSettings; -import com.mongodb.connection.ServerSettings; -import com.mongodb.connection.SocketSettings; -import com.mongodb.connection.SslSettings; -import com.mongodb.connection.StreamFactoryFactory; - -/** - * A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver. - * - * @author Mark Paluch - * @since 2.0 - */ -public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean { - - private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build(); - - private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference(); - private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern(); - private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern(); - private List credentialList = new ArrayList<>(); - private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory(); - private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry(); - private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings(); - private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings(); - private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings(); - private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings(); - private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings(); - private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings(); - - /** - * Set the {@link ReadPreference}. - * - * @param readPreference - */ - public void setReadPreference(ReadPreference readPreference) { - this.readPreference = readPreference; - } - - /** - * Set the {@link WriteConcern}. - * - * @param writeConcern - */ - public void setWriteConcern(WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /** - * Set the {@link ReadConcern}. - * - * @param readConcern - */ - public void setReadConcern(ReadConcern readConcern) { - this.readConcern = readConcern; - } - - /** - * Set the List of {@link MongoCredential}s. - * - * @param credentialList must not be {@literal null}. - */ - public void setCredentialList(List credentialList) { - - Assert.notNull(credentialList, "CredendialList must not be null!"); - - this.credentialList.addAll(credentialList); - } - - /** - * Adds the {@link MongoCredential} to the list of credentials. - * - * @param mongoCredential must not be {@literal null}. - */ - public void addMongoCredential(MongoCredential mongoCredential) { - - Assert.notNull(mongoCredential, "MongoCredential must not be null!"); - - this.credentialList.add(mongoCredential); - } - - /** - * Set the {@link StreamFactoryFactory}. - * - * @param streamFactoryFactory - */ - public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) { - this.streamFactoryFactory = streamFactoryFactory; - } - - /** - * Set the {@link CodecRegistry}. - * - * @param codecRegistry - */ - public void setCodecRegistry(CodecRegistry codecRegistry) { - this.codecRegistry = codecRegistry; - } - - /** - * Set the {@link ClusterSettings}. - * - * @param clusterSettings - */ - public void setClusterSettings(ClusterSettings clusterSettings) { - this.clusterSettings = clusterSettings; - } - - /** - * Set the {@link SocketSettings}. - * - * @param socketSettings - */ - public void setSocketSettings(SocketSettings socketSettings) { - this.socketSettings = socketSettings; - } - - /** - * Set the heartbeat {@link SocketSettings}. - * - * @param heartbeatSocketSettings - */ - public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) { - this.heartbeatSocketSettings = heartbeatSocketSettings; - } - - /** - * Set the {@link ConnectionPoolSettings}. - * - * @param connectionPoolSettings - */ - public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) { - this.connectionPoolSettings = connectionPoolSettings; - } - - /** - * Set the {@link ServerSettings}. - * - * @param serverSettings - */ - public void setServerSettings(ServerSettings serverSettings) { - this.serverSettings = serverSettings; - } - - /** - * Set the {@link SslSettings}. - * - * @param sslSettings - */ - public void setSslSettings(SslSettings sslSettings) { - this.sslSettings = sslSettings; - } - - @Override - public Class getObjectType() { - return MongoClientSettings.class; - } - - @Override - protected MongoClientSettings createInstance() throws Exception { - - return MongoClientSettings.builder() // - .readPreference(readPreference) // - .writeConcern(writeConcern) // - .readConcern(readConcern) // - .credentialList(credentialList) // - .streamFactoryFactory(streamFactoryFactory) // - .codecRegistry(codecRegistry) // - .clusterSettings(clusterSettings) // - .socketSettings(socketSettings) // - .heartbeatSocketSettings(heartbeatSocketSettings) // - .connectionPoolSettings(connectionPoolSettings) // - .serverSettings(serverSettings) // - .sslSettings(sslSettings) // - .build(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java new file mode 100644 index 0000000000..8697ce4dcd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java @@ -0,0 +1,78 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; +import reactor.util.context.Context; + +import java.util.function.Function; + +import org.reactivestreams.Publisher; + +import org.springframework.util.Assert; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * {@link ReactiveMongoContext} utilizes and enriches the Reactor {@link Context} with information potentially required + * for e.g. {@link ClientSession} handling and transactions. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @see Mono#deferContextual(Function) + * @see Context + */ +public class ReactiveMongoContext { + + private static final Class SESSION_KEY = ClientSession.class; + + /** + * Gets the {@code Mono} from Reactor {@link reactor.util.context.Context}. The resulting {@link Mono} + * emits the {@link ClientSession} if a session is associated with the current {@link reactor.util.context.Context + * subscriber context}. If the context does not contain a session, the resulting {@link Mono} terminates empty (i.e. + * without emitting a value). + * + * @return the {@link Mono} emitting the client session if present; otherwise the {@link Mono} terminates empty. + */ + public static Mono getSession() { + + return Mono.deferContextual(ctx -> { + + if (ctx.hasKey(SESSION_KEY)) { + return ctx.> get(SESSION_KEY); + } + + return Mono.empty(); + }); + } + + /** + * Sets the {@link ClientSession} into the Reactor {@link reactor.util.context.Context}. + * + * @param context must not be {@literal null}. + * @param session must not be {@literal null}. + * @return a new {@link Context}. + * @see Context#put(Object, Object) + */ + public static Context setSession(Context context, Publisher session) { + + Assert.notNull(context, "Context must not be null"); + Assert.notNull(session, "Session publisher must not be null"); + + return context.put(SESSION_KEY, Mono.from(session)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java index 9797979b0a..90f2d2345d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,28 +19,44 @@ import reactor.core.publisher.Mono; import java.util.Collection; -import java.util.List; +import java.util.function.Consumer; +import java.util.function.Supplier; import org.bson.Document; import org.reactivestreams.Publisher; import org.reactivestreams.Subscription; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResult; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import com.mongodb.ClientSessionOptions; import com.mongodb.ReadPreference; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.ClientSession; import com.mongodb.reactivestreams.client.MongoCollection; /** @@ -48,14 +64,18 @@ *

          * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using - * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
          + * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB + * specific documentation to learn more about Multi + * Document Transactions. * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet * @since 2.0 * @see Flux * @see Mono - * @see Project Reactor + * @see Project Reactor */ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { @@ -76,7 +96,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { ReactiveIndexOperations indexOps(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * Execute a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the * MongoDB driver to convert the JSON string to a Document. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. * @@ -105,8 +125,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Mono executeCommand(Document command, @Nullable ReadPreference readPreference); /** - * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. - *

          + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
          * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -117,8 +136,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Flux execute(ReactiveDatabaseCallback action); /** - * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. - *

          + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
          * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -129,8 +147,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Flux execute(Class entityClass, ReactiveCollectionCallback action); /** - * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. - *

          + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
          * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -141,6 +158,60 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { */ Flux execute(String collectionName, ReactiveCollectionCallback action); + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
          + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use + * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the + * {@link ClientSession} when done. + * + * @param sessionProvider must not be {@literal null}. + * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. + * @since 2.1 + */ + default ReactiveSessionScoped withSession(Supplier sessionProvider) { + + Assert.notNull(sessionProvider, "SessionProvider must not be null"); + + return withSession(Mono.fromSupplier(sessionProvider)); + } + + /** + * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} + * with given {@literal sessionOptions} to each and every command issued against MongoDB.
          + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use + * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the + * {@link ClientSession} when done. + * + * @param sessionOptions must not be {@literal null}. + * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. + * @since 2.1 + */ + ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions); + + /** + * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the + * {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB. + *
          + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use + * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the + * {@link ClientSession} when done. + * + * @param sessionProvider must not be {@literal null}. + * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. + * @since 2.1 + */ + ReactiveSessionScoped withSession(Publisher sessionProvider); + + /** + * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
          + * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. + * + * @return {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. + * @since 2.1 + */ + ReactiveMongoOperations withSession(ClientSession session); + /** * Create an uncapped collection with a name based on the provided entity class. * @@ -176,6 +247,58 @@ Mono> createCollection(Class entityClass, */ Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + /** * A set of collection names. * @@ -184,18 +307,19 @@ Mono> createCollection(Class entityClass, Flux getCollectionNames(); /** - * Get a collection by name, creating it if it doesn't exist. - *

          + * Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is + * created on first interaction with the server. Collections can be explicitly created via + * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) + * exists} first.
          * Translate any exceptions as necessary. * * @param collectionName name of the collection. - * @return an existing collection or a newly created one. + * @return an existing collection or one created on first server interaction. */ - MongoCollection getCollection(String collectionName); + Mono> getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

          + * Check to see if a collection with a name indicated by the entity class exists.
          * Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -204,8 +328,7 @@ Mono> createCollection(Class entityClass, Mono collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

          + * Check to see if a collection with a given name exists.
          * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -214,8 +337,7 @@ Mono> createCollection(Class entityClass, Mono collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

          + * Drop the collection with the name indicated by the entity class.
          * Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -223,8 +345,7 @@ Mono> createCollection(Class entityClass, Mono dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

          + * Drop the collection with the given name.
          * Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -232,11 +353,43 @@ Mono> createCollection(Class entityClass, Mono dropCollection(String collectionName); /** - * Query for a {@link Flux} of objects of type T from the collection used by the entity class. - *

          + * Returns a new {@link ReactiveBulkOperations} for the given collection.
          + * NOTE: Any additional support for field mapping, etc. is not available for {@literal update} or + * {@literal remove} operations in bulk mode due to the lack of domain type information. Use + * {@link #bulkOps(BulkMode, Class, String)} to get full type specific support. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityClass the name of the entity class, must not be {@literal null}. + * @return {@link ReactiveBulkOperations} on the named collection associated of the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type and collection name. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityType the name of the entity class. Can be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection associated with the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); + + /** + * Query for a {@link Flux} of objects of type T from the collection used by the entity class.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -246,11 +399,9 @@ Mono> createCollection(Class entityClass, Flux findAll(Class entityClass); /** - * Query for a {@link Flux} of objects of type T from the specified collection. - *

          + * Query for a {@link Flux} of objects of type T from the specified collection.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -262,15 +413,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

          + * specified type.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @return the converted object. @@ -279,15 +428,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

          + * type.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @param collectionName name of the collection to retrieve the objects from. @@ -300,7 +447,7 @@ Mono> createCollection(Class entityClass, * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. */ @@ -309,7 +456,7 @@ Mono> createCollection(Class entityClass, /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. * @return {@literal true} if the query yields a result. */ @@ -318,7 +465,7 @@ Mono> createCollection(Class entityClass, /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. @@ -327,14 +474,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type. - *

          + *
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned {@link Flux}. Must not be {@literal null}. * @return the {@link Flux} of converted objects. @@ -342,15 +488,13 @@ Mono> createCollection(Class entityClass, Flux find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type. - *

          + * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned {@link Flux}. * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. @@ -358,6 +502,57 @@ Mono> createCollection(Class entityClass, */ Flux find(Query query, Class entityClass, String collectionName); + /** + * Query for a scroll of objects of type T from the specified collection.
          + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
          + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

          + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
          + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
          + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

          + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@code null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. @@ -457,11 +652,9 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(TypedAggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation. - *

          + * Execute an aggregation operation.
          * The raw results will be mapped to the given entity class and are returned as stream. The name of the - * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}. - *

          + * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}.
          * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -475,11 +668,9 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(TypedAggregation aggregation, Class outputType); /** - * Execute an aggregation operation. - *

          + * Execute an aggregation operation.
          * The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the - * {@code inputType}. - *

          + * {@code inputType}.
          * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -495,10 +686,8 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(Aggregation aggregation, Class inputType, Class outputType); /** - * Execute an aggregation operation. - *

          - * The raw results will be mapped to the given entity class. - *

          + * Execute an aggregation operation.
          + * The raw results will be mapped to the given entity class.
          * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -519,94 +708,304 @@ default Flux findDistinct(Query query, String field, String collection, C * entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a * particular number of results. + *

          + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

          + * + *
          +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
          +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
          +	 * Flux<Document> results = aggregate(geoNear, Document.class);
          +	 * 
          * * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @return the converted {@link GeoResult}s. + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated Flux> geoNear(NearQuery near, Class entityClass); /** * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB * limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect * a particular number of results. + *

          + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

          + * + *
          +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
          +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
          +	 * Flux<Document> results = aggregate(geoNear, Document.class);
          +	 * 
          * * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @param collectionName the collection to trigger the query against. If no collection name is given the entity class * will be inspected. * @return the converted {@link GeoResult}s. + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated Flux> geoNear(NearQuery near, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @return the converted object that was updated before it was updated. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, Class entityClass); + Mono findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated before it was updated. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, Class entityClass, String collectionName); + Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. + * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. * @param entityClass the parametrized type. * @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()} * this will either be the object as it was before the update or as it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()} * this will either be the object as it was before the update or as it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName); + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
          + * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
          + * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, String collectionName) { + return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { + return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the parametrized type. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class entityType, + String collectionName) { + + return findAndReplace(query, replacement, options, entityType, collectionName, entityType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection + * from. Must not be {@literal null}. + * @param resultType the parametrized type projection return type. Must not be {@literal null}, use the domain type of + * {@code Object.class} instead. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 2.1 + */ + default Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + Class resultType) { + + return findAndReplace(query, replacement, options, entityType, + getCollectionName(ClassUtils.getUserClass(entityType)), resultType); + } + + /** + * Triggers + * findOneAndReplace + * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document + * taking {@link FindAndReplaceOptions} into account.
          + * NOTE: The replacement entity must not hold an {@literal id}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection + * from. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @param resultType resultType the parametrized type projection return type. Must not be {@literal null}, use the + * domain type of {@code Object.class} instead. + * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of + * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or + * as it is after the update. + * @since 2.1 + */ + Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType); + /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

          - * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

          + * database.
          + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @return the converted object @@ -616,14 +1015,13 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

          + *
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @param collectionName name of the collection to retrieve the objects from. @@ -633,349 +1031,604 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio /** * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ Mono count(Query query, Class entityClass); /** * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} * must solely consist of document field references as we lack type information to map potential property references - * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #estimatedCount(String) + * @see #exactCount(Query, String) */ Mono count(Query query, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity - * class to map the given {@link Query}. + * class to map the given {@link Query}.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. + * @see #estimatedCount(String) + * @see #exactCount(Query, Class, String) */ Mono count(Query query, @Nullable Class entityClass, String collectionName); /** - * Insert the object into the collection for the entity type of the object to save. - *

          - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. - *

          - *

          + * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, + * based on collection statistics.
          + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param entityClass must not be {@literal null}. + * @return a {@link Mono} emitting the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.1 + */ + default Mono estimatedCount(Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return estimatedCount(getCollectionName(entityClass)); + } + + /** + * Estimate the number of documents in the given collection based on collection statistics.
          + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param collectionName must not be {@literal null}. + * @return a {@link Mono} emitting the estimated number of documents. + * @since 3.1 + */ + Mono estimatedCount(String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default Mono exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default Mono exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + Mono exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
          + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
          + * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
          * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

          + * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. - * @return the saved object. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono insert(T objectToSave); /** - * Insert the object into the specified collection. - *

          + * Insert the object into the specified collection.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

          + * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. - * @return the saved object. + * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. */ Mono insert(T objectToSave, String collectionName); /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the batch of objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. - * @return the saved objects. + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux insert(Collection batchToSave, Class entityClass); /** * Insert a batch of objects into the specified collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the list of objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. - * @return the saved objects. + * @return the inserted objects. */ Flux insert(Collection batchToSave, String collectionName); /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the list of objects to save. Must not be {@literal null}. * @return the saved objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ Flux insertAll(Collection objectsToSave); /** - * Insert the object into the collection for the entity type of the object to save. - *

          - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * Insert the object into the collection for the entity type of the object to save.
          + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. - *

          - *

          + * Spring's + * Type Conversion" for more details.
          * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. - * @return the saved object. + * @return the inserted objects. */ Mono insert(Mono objectToSave); /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. - * @return the saved objects. + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the type. */ Flux insertAll(Mono> batchToSave, Class entityClass); /** * Insert objects into the specified collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. - * @return the saved objects. + * @return the inserted objects. */ Flux insertAll(Mono> batchToSave, String collectionName); /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the publisher which provides objects to save. Must not be {@literal null}. - * @return the saved objects. + * @return the inserted objects. */ Flux insertAll(Mono> objectsToSave); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

          + * object is not already present, that is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. + * Spring's + * Type Conversion" for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

          + * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

          + * is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(T objectToSave, String collectionName); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

          + * object is not already present, that is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. + * Spring's Type + * Conversion for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(Mono objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

          + * is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * - * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(Mono objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

          + * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono upsert(Query query, Update update, Class entityClass); + Mono upsert(Query query, UpdateDefinition update, Class entityClass); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document.
          * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific + * support. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono upsert(Query query, Update update, String collectionName); + Mono upsert(Query query, UpdateDefinition update, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono upsert(Query query, Update update, Class entityClass, String collectionName); + Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates the first object that is found in the collection of the entity class that matches the query document with * the provided update document. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class that determines the collection to use. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateFirst(Query query, Update update, Class entityClass); + Mono updateFirst(Query query, UpdateDefinition update, Class entityClass); /** * Updates the first object that is found in the specified collection that matches the query document criteria with * the provided updated document.
          * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateFirst(Query query, Update update, String collectionName); + Mono updateFirst(Query query, UpdateDefinition update, String collectionName); /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document.
          - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * the provided updated document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateFirst(Query query, Update update, Class entityClass, String collectionName); + Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see Update + * @see AggregationUpdate */ - Mono updateMulti(Query query, Update update, Class entityClass); + Mono updateMulti(Query query, UpdateDefinition update, Class entityClass); /** * Updates all objects that are found in the specified collection that matches the query document criteria with the * provided updated document.
          * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific + * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateMulti(Query query, Update update, String collectionName); + Mono updateMulti(Query query, UpdateDefinition update, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateMulti(Query query, Update update, Class entityClass, String collectionName); + Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param object must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono remove(Object object); @@ -983,45 +1636,54 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio * Removes the given object from the given collection. * * @param object must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Object object, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param objectToRemove must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono remove(Mono objectToRemove); /** - * Removes the given object from the given collection. + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param objectToRemove must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Mono objectToRemove, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class that determines the collection to use. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Mono remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class of the pojo to be operated on. Can be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Query query, @Nullable Class entityClass, String collectionName); @@ -1032,8 +1694,9 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio * NOTE: Any additional support for field mapping is not available due to the lack of domain type * information. Use {@link #remove(Query, Class, String)} to get full type specific support. * - * @param query the query document that specifies the criteria used to remove a record. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Query query, String collectionName); @@ -1044,7 +1707,8 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. * * @param query the query document that specifies the criteria used to find and remove documents. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link Flux} converted objects deleted by this operation. */ Flux findAllAndRemove(Query query, String collectionName); @@ -1055,37 +1719,113 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. * @return the {@link Flux} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. * * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link Flux} converted objects deleted by this operation. */ Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
          + * The collection name is derived from the {@literal replacement} type.
          + * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

          + * {@link Subscription#cancel() canceled}.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Flux}. * @return the {@link Flux} of converted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux tail(Query query, Class entityClass); @@ -1093,15 +1833,13 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

          + * {@link Subscription#cancel() canceled}.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Flux}. * @param collectionName name of the collection to retrieve the objects from. @@ -1110,54 +1848,127 @@ Mono findAndModify(Query query, Update update, FindAndModifyOptions optio Flux tail(Query query, Class entityClass, String collectionName); /** - * Subscribe to a MongoDB Change Streams via the reactive - * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed - * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}. - *

          + * Subscribe to a MongoDB Change Stream for all events in + * the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to + * filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}.
          * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

          + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
          * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * - * @param filter can be {@literal null}. - * @param resultType must not be {@literal null}. - * @param options must not be {@literal null}. - * @param collectionName must not be {@literal null} nor empty. + * @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}. + * @param targetType the result type to use. * @param - * @return + * @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive. * @since 2.1 + * @see ReactiveMongoDatabaseFactory#getMongoDatabase() + * @see ChangeStreamOptions#getFilter() */ - Flux> changeStream(@Nullable Aggregation filter, Class resultType, - ChangeStreamOptions options, String collectionName); + default Flux> changeStream(ChangeStreamOptions options, Class targetType) { + return changeStream(null, options, targetType); + } /** - * Subscribe to a MongoDB Change Streams via the reactive - * infrastructure. Use the optional provided aggregation chain to filter events. The stream will not be completed - * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}. - *

          + * Subscribe to a MongoDB Change Stream for all events in + * the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter + * events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link Subscription#cancel() canceled}.
          * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

          - * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumeToken} + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
          + * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * - * @param filter can be empty, must not be {@literal null}. - * @param resultType must not be {@literal null}. - * @param options must not be {@literal null}. - * @param collectionName must not be {@literal null} nor empty. + * @param collectionName the collection to watch. Can be {@literal null} to watch all collections. + * @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}. + * @param targetType the result type to use. * @param - * @return + * @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive. * @since 2.1 + * @see ChangeStreamOptions#getFilter() */ - Flux> changeStream(List filter, Class resultType, ChangeStreamOptions options, - String collectionName); + default Flux> changeStream(@Nullable String collectionName, ChangeStreamOptions options, + Class targetType) { + + return changeStream(null, collectionName, options, targetType); + } + + /** + * Subscribe to a MongoDB Change Stream via the reactive + * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed + * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
          + * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
          + * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} + * for resuming change streams. + * + * @param database the database to watch. Can be {@literal null}, uses configured default if so. + * @param collectionName the collection to watch. Can be {@literal null}, watches all collections if so. + * @param options must not be {@literal null}. Use {@link ChangeStreamOptions#empty()}. + * @param targetType the result type to use. + * @param + * @return the {@link Flux} emitting {@link ChangeStreamEvent events} as they arrive. + * @since 2.1 + * @see ChangeStreamOptions#getFilter() + */ + Flux> changeStream(@Nullable String database, @Nullable String collectionName, + ChangeStreamOptions options, Class targetType); + + /** + * Execute a map-reduce operation. Use {@link MapReduceOptions} to optionally specify an output collection and other + * args. + * + * @param filterQuery the selection criteria for the documents going input to the map function. Must not be + * {@literal null}. + * @param domainType source type used to determine the input collection name and map the filter {@link Query} against. + * Must not be {@literal null}. + * @param resultType the mapping target of the operations result documents. Must not be {@literal null}. + * @param mapFunction the JavaScript map function. Must not be {@literal null}. + * @param reduceFunction the JavaScript reduce function. Must not be {@literal null}. + * @param options additional options like output collection. Must not be {@literal null}. + * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. + * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. + */ + @Deprecated + Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, + String reduceFunction, MapReduceOptions options); + + /** + * Execute a map-reduce operation. Use {@link MapReduceOptions} to optionally specify an output collection and other + * args. + * + * @param filterQuery the selection criteria for the documents going input to the map function. Must not be + * {@literal null}. + * @param domainType source type used to map the filter {@link Query} against. Must not be {@literal null}. + * @param inputCollectionName the input collection. + * @param resultType the mapping target of the operations result documents. Must not be {@literal null}. + * @param mapFunction the JavaScript map function. Must not be {@literal null}. + * @param reduceFunction the JavaScript reduce function. Must not be {@literal null}. + * @param options additional options like output collection. Must not be {@literal null}. + * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. + * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. + */ + @Deprecated + Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, + String mapFunction, String reduceFunction, MapReduceOptions options); /** * Returns the underlying {@link MongoConverter}. * - * @return + * @return never {@literal null}. */ MongoConverter getConverter(); + /** + * The collection name used for the specified class by this template. + * + * @param entityClass must not be {@literal null}. + * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. + * @since 2.1 + */ + String getCollectionName(Class entityClass); + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index f7e31741ee..b74ec6aa1c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,38 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; - -import java.util.*; -import java.util.Map.Entry; +import reactor.util.function.Tuples; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; -import javax.annotation.Nonnull; - +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.BsonValue; import org.bson.Document; -import org.bson.codecs.Codec; import org.bson.conversions.Bson; import org.bson.types.ObjectId; import org.reactivestreams.Publisher; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.reactivestreams.Subscriber; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -46,93 +54,110 @@ import org.springframework.context.ApplicationEventPublisherAware; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.core.convert.ConversionService; import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.annotation.Id; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Metric; import org.springframework.data.mapping.MappingException; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mapping.context.MappingContextEvent; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils; +import org.springframework.data.mongodb.SessionSynchronization; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; +import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.QueryOperations.CountContext; +import org.springframework.data.mongodb.core.QueryOperations.DeleteContext; +import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; +import org.springframework.data.mongodb.core.QueryOperations.QueryContext; +import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.mongodb.core.convert.*; -import org.springframework.data.mongodb.core.index.IndexOperationsAdapter; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; -import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; +import org.springframework.data.mongodb.core.index.ReactiveMongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; -import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.core.mapping.event.*; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.mongodb.util.MongoClientVersion; -import org.springframework.data.projection.ProjectionInformation; -import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.Optionals; -import org.springframework.data.util.Pair; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; +import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; +import com.mongodb.ClientSessionOptions; import com.mongodb.CursorType; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBRef; -import com.mongodb.Mongo; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; +import com.mongodb.client.model.CountOptions; import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateViewOptions; import com.mongodb.client.model.DeleteOptions; -import com.mongodb.client.model.Filters; +import com.mongodb.client.model.EstimatedDocumentCountOptions; import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; import com.mongodb.client.model.ReturnDocument; import com.mongodb.client.model.UpdateOptions; -import com.mongodb.client.model.ValidationOptions; import com.mongodb.client.model.changestream.FullDocument; import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.InsertOneResult; import com.mongodb.client.result.UpdateResult; import com.mongodb.reactivestreams.client.AggregatePublisher; import com.mongodb.reactivestreams.client.ChangeStreamPublisher; +import com.mongodb.reactivestreams.client.ClientSession; import com.mongodb.reactivestreams.client.DistinctPublisher; import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; import com.mongodb.reactivestreams.client.MongoClient; import com.mongodb.reactivestreams.client.MongoCollection; import com.mongodb.reactivestreams.client.MongoDatabase; -import com.mongodb.reactivestreams.client.Success; -import com.mongodb.util.JSONParseException; /** * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps @@ -140,33 +165,33 @@ * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a - * {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services - * as bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the - * application context, in the first case given to the service directly, in the second case to the prepared template. + * {@link ReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. + *

          + * Note: The {@link ReactiveMongoDatabaseFactory} should always be configured as a bean in the application context, in + * the first case given to the service directly, in the second case to the prepared template. + *

          {@link ReadPreference} and {@link com.mongodb.ReadConcern}

          + *

          + * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

          + * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. * * @author Mark Paluch * @author Christoph Strobl + * @author Roman Puchkovskiy + * @author Mathieu Ouellet + * @author Yadhukrishna S Pai + * @author Florian Lüdiger * @since 2.0 */ public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { - public static final DbRefResolver NO_OP_REF_RESOLVER = new NoOpDbRefResolver(); + public static final DbRefResolver NO_OP_REF_RESOLVER = NoOpDbRefResolver.INSTANCE; - private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class); - private static final String ID_FIELD = "_id"; + private static final Log LOGGER = LogFactory.getLog(ReactiveMongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; - private static final Collection> ITERABLE_CLASSES; - - static { - - Set> iterableClasses = new HashSet<>(); - iterableClasses.add(List.class); - iterableClasses.add(Collection.class); - iterableClasses.add(Iterator.class); - iterableClasses.add(Publisher.class); - - ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); - } private final MongoConverter mongoConverter; private final MappingContext, MongoPersistentProperty> mappingContext; @@ -174,15 +199,23 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati private final PersistenceExceptionTranslator exceptionTranslator; private final QueryMapper queryMapper; private final UpdateMapper updateMapper; - private final JsonSchemaMapper schemaMapper; - private final SpelAwareProxyProjectionFactory projectionFactory; + private final ApplicationListener> indexCreatorListener; + private final EntityOperations operations; + private final PropertyOperations propertyOperations; + private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; private @Nullable ReadPreference readPreference; private @Nullable ApplicationEventPublisher eventPublisher; - private @Nullable MongoPersistentEntityIndexCreator indexCreator; + private @Nullable ReactiveEntityCallbacks entityCallbacks; + private @Nullable ReactiveMongoPersistentEntityIndexCreator indexCreator; + + private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + + private CountExecution countExecution = this::doExactCount; /** * Constructor used for a basic template configuration. @@ -191,7 +224,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati * @param databaseName must not be {@literal null} or empty. */ public ReactiveMongoTemplate(MongoClient mongoClient, String databaseName) { - this(new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName), null); + this(new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName), (MongoConverter) null); } /** @@ -200,7 +233,7 @@ public ReactiveMongoTemplate(MongoClient mongoClient, String databaseName) { * @param mongoDatabaseFactory must not be {@literal null}. */ public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory) { - this(mongoDatabaseFactory, null); + this(mongoDatabaseFactory, (MongoConverter) null); } /** @@ -211,31 +244,81 @@ public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory) */ public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, @Nullable MongoConverter mongoConverter) { + this(mongoDatabaseFactory, mongoConverter, ReactiveMongoTemplate::handleSubscriptionException); + } + + /** + * Constructor used for a basic template configuration. + * + * @param mongoDatabaseFactory must not be {@literal null}. + * @param mongoConverter can be {@literal null}. + * @param subscriptionExceptionHandler exception handler called by {@link Flux#doOnError(Consumer)} on reactive type + * materialization via {@link Publisher#subscribe(Subscriber)}. This callback is used during non-blocking + * subscription of e.g. index creation {@link Publisher}s. Must not be {@literal null}. + * @since 2.1 + */ + public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, + @Nullable MongoConverter mongoConverter, Consumer subscriptionExceptionHandler) { - Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null!"); + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null"); this.mongoDatabaseFactory = mongoDatabaseFactory; this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); - this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); - this.projectionFactory = new SpelAwareProxyProjectionFactory(); + this.indexCreatorListener = new IndexCreatorEventListener(subscriptionExceptionHandler); // We always have a mapping context in the converter, whether it's a simple one or not - mappingContext = this.mongoConverter.getMappingContext(); + this.mappingContext = this.mongoConverter.getMappingContext(); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); + this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, + mongoDatabaseFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); + // We create indexes based on mapping events + if (this.mappingContext instanceof MongoMappingContext mongoMappingContext) { - if (mappingContext instanceof MongoMappingContext) { - indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, - (collectionName) -> IndexOperationsAdapter.blocking(indexOps(collectionName))); - eventPublisher = new MongoMappingEventPublisher(indexCreator); - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mongoMappingContext.isAutoIndexCreation()) { + this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps); + this.eventPublisher = new MongoMappingEventPublisher(this.indexCreatorListener); + + mongoMappingContext.setApplicationEventPublisher(this.eventPublisher); + this.mappingContext.getPersistentEntities() + .forEach(entity -> onCheckForIndexes(entity, subscriptionExceptionHandler)); } } } + private ReactiveMongoTemplate(ReactiveMongoDatabaseFactory dbFactory, ReactiveMongoTemplate that) { + + this.mongoDatabaseFactory = dbFactory; + this.exceptionTranslator = that.exceptionTranslator; + this.mongoConverter = that.mongoConverter; + this.queryMapper = that.queryMapper; + this.updateMapper = that.updateMapper; + this.indexCreator = that.indexCreator; + this.indexCreatorListener = that.indexCreatorListener; + this.mappingContext = that.mappingContext; + this.operations = that.operations; + this.propertyOperations = that.propertyOperations; + this.sessionSynchronization = that.sessionSynchronization; + this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; + } + + private void onCheckForIndexes(MongoPersistentEntity entity, Consumer subscriptionExceptionHandler) { + + if (indexCreator != null) { + indexCreator.checkForIndexes(entity).subscribe(v -> {}, subscriptionExceptionHandler); + } + } + + private static void handleSubscriptionException(Throwable t) { + LOGGER.error("Unexpected exception during asynchronous execution", t); + } + /** * Configures the {@link WriteResultChecking} to be used with the template. Setting {@literal null} will reset the * default of {@link ReactiveMongoTemplate#DEFAULT_WRITE_RESULT_CHECKING}. @@ -248,8 +331,7 @@ public void setWriteResultChecking(@Nullable WriteResultChecking resultChecking) /** * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} - * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no - * {@link WriteConcern} will be used. + * configured on the {@link MongoDatabaseFactory} will apply. * * @param writeConcern can be {@literal null}. */ @@ -267,7 +349,7 @@ public void setWriteConcernResolver(@Nullable WriteConcernResolver writeConcernR } /** - * Used by @{link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations + * Used by {@link {@link #prepareCollection(MongoCollection)} to set the {@link ReadPreference} before any operations * are performed. * * @param readPreference @@ -276,44 +358,117 @@ public void setReadPreference(ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + eventDelegate.setPublisher(eventPublisher); + + if (entityCallbacks == null) { + setEntityCallbacks(ReactiveEntityCallbacks.create(applicationContext)); + } + + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); } + } + + /** + * Set the {@link ReactiveEntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the + * {@link ReactiveBeforeSaveCallback}.
          + * Overrides potentially existing {@link ReactiveEntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 2.2 + */ + public void setEntityCallbacks(ReactiveEntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiFunction> estimationFilter) { - projectionFactory.setBeanFactory(applicationContext); - projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); + if (enabled) { + + this.countExecution = (collectionName, filter, options) -> { + + return estimationFilter.apply(filter, options).flatMap(canEstimate -> { + if (!canEstimate) { + return doExactCount(collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionName, estimatedDocumentCountOptions); + }); + }; + } else { + this.countExecution = this::doExactCount; + } } /** - * Inspects the given {@link ApplicationContext} for {@link MongoPersistentEntityIndexCreator} and those in turn if - * they were registered for the current {@link MappingContext}. If no creator for the current {@link MappingContext} - * can be found we manually add the internally created one as {@link ApplicationListener} to make sure indexes get - * created appropriately for entity types persisted through this {@link ReactiveMongoTemplate} instance. + * Inspects the given {@link ApplicationContext} for {@link ReactiveMongoPersistentEntityIndexCreator} and those in + * turn if they were registered for the current {@link MappingContext}. If no creator for the current + * {@link MappingContext} can be found we manually add the internally created one as {@link ApplicationListener} to + * make sure indexes get created appropriately for entity types persisted through this {@link ReactiveMongoTemplate} + * instance. * * @param context must not be {@literal null}. */ private void prepareIndexCreator(ApplicationContext context) { - String[] indexCreators = context.getBeanNamesForType(MongoPersistentEntityIndexCreator.class); + String[] indexCreators = context.getBeanNamesForType(ReactiveMongoPersistentEntityIndexCreator.class); for (String creator : indexCreators) { - MongoPersistentEntityIndexCreator creatorBean = context.getBean(creator, MongoPersistentEntityIndexCreator.class); + ReactiveMongoPersistentEntityIndexCreator creatorBean = context.getBean(creator, + ReactiveMongoPersistentEntityIndexCreator.class); if (creatorBean.isIndexCreatorFor(mappingContext)) { return; } } - if (context instanceof ConfigurableApplicationContext) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext) { + configurableApplicationContext.addApplicationListener(indexCreatorListener); } } @@ -322,90 +477,117 @@ private void prepareIndexCreator(ApplicationContext context) { * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) - */ + @Override public ReactiveIndexOperations indexOps(String collectionName) { return new DefaultReactiveIndexOperations(this, collectionName, this.queryMapper); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) - */ + @Override public ReactiveIndexOperations indexOps(Class entityClass) { - return new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass), this.queryMapper, - entityClass); + return new DefaultReactiveIndexOperations(this, getCollectionName(entityClass), this.queryMapper, entityClass); } + @Override public String getCollectionName(Class entityClass) { - return this.determineCollectionName(entityClass); + return operations.determineCollectionName(entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(java.lang.String) - */ + @Override public Mono executeCommand(String jsonCommand) { - Assert.notNull(jsonCommand, "Command must not be empty!"); + Assert.notNull(jsonCommand, "Command must not be empty"); return executeCommand(Document.parse(jsonCommand)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document) - */ - public Mono executeCommand(final Document command) { + @Override + public Mono executeCommand(Document command) { return executeCommand(command, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) - */ - public Mono executeCommand(final Document command, @Nullable ReadPreference readPreference) { + @Override + public Mono executeCommand(Document command, @Nullable ReadPreference readPreference) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference, Document.class) : db.runCommand(command, Document.class)).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.ReactiveCollectionCallback) - */ @Override public Flux execute(Class entityClass, ReactiveCollectionCallback action) { - return createFlux(determineCollectionName(entityClass), action); + return createFlux(getCollectionName(entityClass), action); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(org.springframework.data.mongodb.core.ReactiveDbCallback) - */ @Override public Flux execute(ReactiveDatabaseCallback action) { return createFlux(action); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.ReactiveCollectionCallback) - */ + @Override public Flux execute(String collectionName, ReactiveCollectionCallback callback) { - Assert.notNull(callback, "ReactiveCollectionCallback must not be null!"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); + return createFlux(collectionName, callback); } + @Override + public ReactiveSessionScoped withSession(Publisher sessionProvider) { + + Mono cachedSession = Mono.from(sessionProvider).cache(); + + return new ReactiveSessionScoped() { + + @Override + public Flux execute(ReactiveSessionCallback action, Consumer doFinally) { + + return cachedSession.flatMapMany(session -> { + + return ReactiveMongoTemplate.this.withSession(action, session) // + .doFinally(signalType -> { + doFinally.accept(session); + }); + }); + } + }; + } + + /** + * Define if {@link ReactiveMongoTemplate} should participate in transactions. Default is set to + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION}.
          + * NOTE: MongoDB transactions require at least MongoDB 4.0. + * + * @since 2.2 + */ + public void setSessionSynchronization(SessionSynchronization sessionSynchronization) { + this.sessionSynchronization = sessionSynchronization; + } + + private Flux withSession(ReactiveSessionCallback action, ClientSession session) { + + ReactiveSessionBoundMongoTemplate operations = new ReactiveSessionBoundMongoTemplate(session, + ReactiveMongoTemplate.this); + + return Flux.from(action.doInSession(operations)) // + .contextWrite(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session))); + } + + @Override + public ReactiveMongoOperations withSession(ClientSession session) { + return new ReactiveSessionBoundMongoTemplate(session, ReactiveMongoTemplate.this); + } + + @Override + public ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions) { + return withSession(mongoDatabaseFactory.getSession(sessionOptions)); + } + /** * Create a reusable Flux for a {@link ReactiveDatabaseCallback}. It's up to the developer to choose to obtain a new * {@link Flux} or to reuse the {@link Flux}. @@ -415,9 +597,10 @@ public Flux execute(String collectionName, ReactiveCollectionCallback */ public Flux createFlux(ReactiveDatabaseCallback callback) { - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); - return Flux.defer(() -> callback.doInDB(getMongoDatabase())).onErrorMap(translateException()); + return Mono.defer(this::doGetDatabase).flatMapMany(database -> callback.doInDB(prepareDatabase(database))) + .onErrorMap(translateException()); } /** @@ -427,11 +610,12 @@ public Flux createFlux(ReactiveDatabaseCallback callback) { * @param callback must not be {@literal null} * @return a {@link Mono} wrapping the {@link ReactiveDatabaseCallback}. */ - public Mono createMono(final ReactiveDatabaseCallback callback) { + public Mono createMono(ReactiveDatabaseCallback callback) { - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); - return Mono.defer(() -> Mono.from(callback.doInDB(getMongoDatabase()))).onErrorMap(translateException()); + return Mono.defer(this::doGetDatabase).flatMap(database -> Mono.from(callback.doInDB(prepareDatabase(database)))) + .onErrorMap(translateException()); } /** @@ -443,11 +627,11 @@ public Mono createMono(final ReactiveDatabaseCallback callback) { */ public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); - Mono> collectionPublisher = Mono - .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + Mono> collectionPublisher = doGetDatabase() + .map(database -> getAndPrepareCollection(database, collectionName)); return collectionPublisher.flatMapMany(callback::doInCollection).onErrorMap(translateException()); } @@ -462,155 +646,187 @@ public Flux createFlux(String collectionName, ReactiveCollectionCallback< */ public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(callback, "ReactiveCollectionCallback must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); - Mono> collectionPublisher = Mono - .fromCallable(() -> getAndPrepareCollection(getMongoDatabase(), collectionName)); + Mono> collectionPublisher = doGetDatabase() + .map(database -> getAndPrepareCollection(database, collectionName)); return collectionPublisher.flatMap(collection -> Mono.from(callback.doInCollection(collection))) .onErrorMap(translateException()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) - */ + @Override public Mono> createCollection(Class entityClass) { - return createCollection(determineCollectionName(entityClass)); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public Mono> createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions) { - return doCreateCollection(determineCollectionName(entityClass), - convertToCreateCollectionOptions(collectionOptions, entityClass)); + + Assert.notNull(entityClass, "EntityClass must not be null"); + + CollectionOptions options = collectionOptions != null ? collectionOptions : CollectionOptions.empty(); + options = Optionals + .firstNonEmpty(() -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> operations.forType(entityClass).getCollation()) // + .map(options::collation).orElse(options); + + return doCreateCollection(getCollectionName(entityClass), convertToCreateCollectionOptions(options, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String) - */ + @Override public Mono> createCollection(String collectionName) { return doCreateCollection(collectionName, new CreateCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public Mono> createCollection(String collectionName, @Nullable CollectionOptions collectionOptions) { return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String) - */ - public MongoCollection getCollection(final String collectionName) { - return execute((MongoDatabaseCallback>) db -> db.getCollection(collectionName)); + @Override + public Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class) - */ + @Override + public Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private Mono> createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected Mono> doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + return Flux.from(db.createView(name, source, pipeline, viewOptions)) + .then(Mono.fromSupplier(() -> db.getCollection(name))); + }).next(); + } + + @Override + public Mono> getCollection(String collectionName) { + + Assert.notNull(collectionName, "Collection name must not be null"); + + return createMono(db -> Mono.just(db.getCollection(collectionName))); + } + + @Override public Mono collectionExists(Class entityClass) { - return collectionExists(determineCollectionName(entityClass)); + return collectionExists(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.String) - */ - public Mono collectionExists(final String collectionName) { - return createMono(db -> Flux.from(db.listCollectionNames()) // + @Override + public Mono collectionExists(String collectionName) { + return createMono(db -> Flux.from(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(db).listCollectionNames()) // .filter(s -> s.equals(collectionName)) // .map(s -> true) // .single(false)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.Class) - */ + @Override public Mono dropCollection(Class entityClass) { - return dropCollection(determineCollectionName(entityClass)); + return dropCollection(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.String) - */ - public Mono dropCollection(final String collectionName) { + @Override + public Mono dropCollection(String collectionName) { - return createMono(db -> db.getCollection(collectionName).drop()).doOnSuccess(success -> { + return createMono(collectionName, MongoCollection::drop).doOnSuccess(success -> { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Dropped collection [" + collectionName + "]"); } }).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollectionNames() - */ + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass) { + return bulkOps(mode, entityClass, getCollectionName(entityClass)); + } + + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { + + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + DefaultReactiveBulkOperations operations = new DefaultReactiveBulkOperations(this, collectionName, + new ReactiveBulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, + updateMapper, eventPublisher, entityCallbacks)); + + operations.setDefaultWriteConcern(writeConcern); + + return operations; + } + + @Override public Flux getCollectionNames() { - return createFlux(MongoDatabase::listCollectionNames); + return createFlux(db -> MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(db).listCollectionNames()); } - public MongoDatabase getMongoDatabase() { + public Mono getMongoDatabase() { return mongoDatabaseFactory.getMongoDatabase(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + protected Mono doGetDatabase() { + return ReactiveMongoDatabaseUtils.getDatabase(mongoDatabaseFactory, sessionSynchronization); + } + + @Override public Mono findOne(Query query, Class entityClass) { - return findOne(query, entityClass, determineCollectionName(entityClass)); + return findOne(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono findOne(Query query, Class entityClass, String collectionName) { if (ObjectUtils.isEmpty(query.getSortObject())) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, - query.getCollation().orElse(null)); + return doFindOne(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); } query.limit(1); return find(query, entityClass, collectionName).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono exists(Query query, Class entityClass) { - return exists(query, entityClass, determineCollectionName(entityClass)); + return exists(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono exists(Query query, String collectionName) { return exists(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - public Mono exists(final Query query, @Nullable Class entityClass, String collectionName) { + @Override + public Mono exists(Query query, @Nullable Class entityClass, String collectionName) { if (query == null) { throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); @@ -618,378 +834,478 @@ public Mono exists(final Query query, @Nullable Class entityClass, S return createFlux(collectionName, collection -> { - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass)); - FindPublisher findPublisher = collection.find(mappedQuery).projection(new Document("_id", 1)); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + QueryContext queryContext = queryOperations.createQueryContext(query); + Document filter = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); - findPublisher = query.getCollation().map(Collation::toMongoCollation).map(findPublisher::collation) - .orElse(findPublisher); + FindPublisher findPublisher = collectionPreparer.prepare(collection).find(filter, Document.class) + .projection(new Document(FieldName.ID.name(), 1)); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("exists: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + queryContext.applyCollation(entityClass, findPublisher::collation); return findPublisher.limit(1); }).hasElements(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Flux find(Query query, Class entityClass) { - return find(query, entityClass, determineCollectionName(entityClass)); + return find(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Flux find(@Nullable Query query, Class entityClass, String collectionName) { if (query == null) { return findAll(entityClass, collectionName); } - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, - new QueryFindPublisherPreparer(query, entityClass)); + return doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class) - */ - public Mono findById(Object id, Class entityClass) { - return findById(id, entityClass, determineCollectionName(entityClass)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class, java.lang.String) - */ - public Mono findById(Object id, Class entityClass, String collectionName) { - - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityClass); - MongoPersistentProperty idProperty = persistentEntity != null ? persistentEntity.getIdProperty() : null; + @Override + public Mono> scroll(Query query, Class entityType) { - String idKey = idProperty == null ? ID_FIELD : idProperty.getName(); + Assert.notNull(entityType, "Entity type must not be null"); - return doFindOne(collectionName, new Document(idKey, id), null, entityClass, null); + return scroll(query, entityType, getCollectionName(entityType)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class) - */ - public Flux findDistinct(Query query, String field, Class entityClass, Class resultClass) { - return findDistinct(query, field, determineCollectionName(entityClass), entityClass, resultClass); + @Override + public Mono> scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.String, java.lang.Class, java.lang.Class) - */ - @SuppressWarnings("unchecked") - public Flux findDistinct(Query query, String field, String collectionName, Class entityClass, - Class resultClass) { + Mono> doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(resultClass, "ResultClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); - MongoPersistentEntity entity = getPersistentEntity(entityClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity); - String mappedFieldName = queryMapper.getMappedFields(new Document(field, 1), entity).keySet().iterator().next(); + if (query.hasKeyset()) { - Class mongoDriverCompatibleType = mongoDatabaseFactory.getCodecFor(resultClass).map(Codec::getEncoderClass) - .orElse((Class) BsonValue.class); + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); - Flux result = execute(collectionName, collection -> { + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), + keysetPaginationQuery.query(), keysetPaginationQuery.fields(), sourceClass, + new QueryFindPublisherPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback) + .collectList(); - DistinctPublisher publisher = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); + return result.map(it -> ScrollUtils.createWindow(query, it, sourceClass, operations)); + } - return query.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); - }); + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), sourceClass, + new QueryFindPublisherPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), callback) + .collectList(); - if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { + return result.map( + it -> ScrollUtils.createWindow(it, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip()))); + } - Class targetType = getMostSpecificConversionTargetType(resultClass, entityClass, field); - MongoConverter converter = getConverter(); + @Override + public Mono findById(Object id, Class entityClass) { + return findById(id, entityClass, getCollectionName(entityClass)); + } - result = result.map(it -> converter.mapValueToTargetType(it, targetType, NO_OP_REF_RESOLVER)); - } + @Override + public Mono findById(Object id, Class entityClass, String collectionName) { - return (Flux) result; + String idKey = operations.getIdPropertyName(entityClass); + + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), null, entityClass, + (Collation) null); } - /** - * @param userType must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param field must not be {@literal null}. - * @return the most specific conversion target type depending on user preference and domain type property. - * @since 2.1 - */ - private static Class getMostSpecificConversionTargetType(Class userType, Class domainType, String field) { + @Override + public Flux findDistinct(Query query, String field, Class entityClass, Class resultClass) { + return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); + } - Class conversionTargetType = userType; - try { + @Override + @SuppressWarnings("unchecked") + public Flux findDistinct(Query query, String field, String collectionName, Class entityClass, + Class resultClass) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); + + MongoPersistentEntity entity = getPersistentEntity(entityClass); + DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); + + Document mappedQuery = distinctQueryContext.getMappedQuery(entity); + String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); + Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); - Class propertyType = PropertyPath.from(field, domainType).getLeafProperty().getLeafType(); + Flux result = execute(collectionName, collection -> { - // use the more specific type but favor UserType over property one - if (ClassUtils.isAssignable(userType, propertyType)) { - conversionTargetType = propertyType; + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); } - } catch (PropertyReferenceException e) { - // just don't care about it as we default to Object.class anyway. + FindPublisherPreparer preparer = new QueryFindPublisherPreparer(query, entityClass); + + DistinctPublisher publisher = collectionPreparer.prepare(collection).distinct(mappedFieldName, mappedQuery, + mongoDriverCompatibleType); + distinctQueryContext.applyCollation(entityClass, publisher::collation); + return publisher; + }); + + if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { + + Class targetType = distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass); + MongoConverter converter = getConverter(); + + result = result.map(it -> converter.mapValueToTargetType(it, targetType, NO_OP_REF_RESOLVER)); } - return conversionTargetType; + return (Flux) result; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override public Flux aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregate(aggregation, inputCollectionName, outputType, context); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return doAggregate(aggregation, inputCollectionName, aggregation.getInputType(), outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override public Flux aggregate(TypedAggregation aggregation, Class outputType) { - return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType); + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override public Flux aggregate(Aggregation aggregation, Class inputType, Class outputType) { - - return aggregate(aggregation, determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + return doAggregate(aggregation, getCollectionName(inputType), inputType, outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override public Flux aggregate(Aggregation aggregation, String collectionName, Class outputType) { - return aggregate(aggregation, collectionName, outputType, null); + return doAggregate(aggregation, collectionName, null, outputType); } - /** - * @param aggregation must not be {@literal null}. - * @param collectionName must not be {@literal null}. - * @param outputType must not be {@literal null}. - * @param context can be {@literal null} and will be defaulted to {@link Aggregation#DEFAULT_CONTEXT}. - * @return never {@literal null}. - */ - protected Flux aggregate(Aggregation aggregation, String collectionName, Class outputType, - @Nullable AggregationOperationContext context) { + protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType) { - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(outputType, "Output type must not be null!"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); - AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context; AggregationOptions options = aggregation.getOptions(); - List pipeline = aggregation.toPipeline(rootContext); + Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming"); - Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming!"); - Assert.isNull(options.getCursorBatchSize(), "Cannot use batchSize cursor option with streaming!"); + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug(String.format("Streaming aggregation: %s in collection %s", + serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName)); } ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, collection -> aggregateAndMap(collection, pipeline, options, readCallback)); + return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), + ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); } private Flux aggregateAndMap(MongoCollection collection, List pipeline, - AggregationOptions options, ReadDocumentCallback readCallback) { + boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback readCallback, + @Nullable Class inputType) { + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(options); + AggregatePublisher cursor = collectionPreparer.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } + + if (options.getCursorBatchSize() != null) { + cursor = cursor.batchSize(options.getCursorBatchSize()); + } + + options.getComment().ifPresent(cursor::comment); - AggregatePublisher cursor = collection.aggregate(pipeline).allowDiskUse(options.isAllowDiskUse()) - .useCursor(true); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + cursor = hintFunction.apply(mongoDatabaseFactory, cursor::hintString, cursor::hint); + } + + Optionals.firstNonEmpty(options::getCollation, () -> operations.forType(inputType).getCollation()) // + .map(Collation::toMongoCollation) // + .ifPresent(cursor::collation); + + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } - if (options.getCollation().isPresent()) { - cursor = cursor.collation(options.getCollation().map(Collation::toMongoCollation).get()); + if (options.isSkipResults()) { + return (isOutOrMerge ? Flux.from(cursor.toCollection()) : Flux.from(cursor.first())).thenMany(Mono.empty()); } - return Flux.from(cursor).map(readCallback::doWith); + return Flux.from(cursor).flatMapSequential(readCallback::doWith); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class) - */ @Override public Flux> geoNear(NearQuery near, Class entityClass) { - return geoNear(near, entityClass, determineCollectionName(entityClass)); + return geoNear(near, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class, java.lang.String) - */ @Override - @SuppressWarnings("unchecked") public Flux> geoNear(NearQuery near, Class entityClass, String collectionName) { return geoNear(near, entityClass, collectionName, entityClass); } + @SuppressWarnings("unchecked") protected Flux> geoNear(NearQuery near, Class entityClass, String collectionName, Class returnType) { if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); } if (entityClass == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); } - String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass); - Document nearDbObject = near.toDocument(); + String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(entityClass); + String distanceField = operations.nearQueryDistanceFieldName(entityClass); + EntityProjection projection = operations.introspectProjection(returnType, entityClass); - Document command = new Document("geoNear", collection); - command.putAll(nearDbObject); + GeoNearResultDocumentCallback callback = new GeoNearResultDocumentCallback<>(distanceField, + new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); - return Flux.defer(() -> { - - if (nearDbObject.containsKey("query")) { - Document query = (Document) nearDbObject.get("query"); - command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(entityClass))); - } + Builder optionsBuilder = AggregationOptions.builder(); + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); + } - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command), - entityClass, collectionName); - } + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); + } - GeoNearResultDbObjectCallback callback = new GeoNearResultDbObjectCallback( - new ProjectingReadCallback<>(mongoConverter, entityClass, returnType, collectionName), near.getMetric()); + optionsBuilder.collation(near.getCollation()); - return executeCommand(command, this.readPreference).flatMapMany(document -> { + Aggregation $geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, distanceField)) + .withOptions(optionsBuilder.build()); - List l = document.get("results", List.class); - if (l == null) { - return Flux.empty(); - } - return Flux.fromIterable(l); - }).skip(near.getSkip() != null ? near.getSkip() : 0).map(callback::doWith); - }); + return aggregate($geoNear, collection, Document.class) // + .flatMapSequential(callback::doWith); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) - */ - public Mono findAndModify(Query query, Update update, Class entityClass) { - return findAndModify(query, update, new FindAndModifyOptions(), entityClass, determineCollectionName(entityClass)); + @Override + public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono findAndModify(Query query, Update update, Class entityClass, String collectionName) { + @Override + public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class) - */ - public Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass) { - return findAndModify(query, update, options, entityClass, determineCollectionName(entityClass)); + @Override + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass) { + return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class, java.lang.String) - */ - public Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, - String collectionName) { + @Override + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass, String collectionName) { + + Assert.notNull(options, "Options must not be null "); + Assert.notNull(entityClass, "Entity class must not be null"); FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and FindAndModifyOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); }); - query.getCollation().ifPresent(optionsToUse::collation); + if (!optionsToUse.getCollation().isPresent()) { + operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); + } - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); + return doFindAndModify(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override + public Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + String collectionName, Class resultType) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "Entity class must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + Document mappedQuery = queryContext.getMappedQuery(entity); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedSort = queryContext.getMappedSort(entity); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + + return Mono.defer(() -> { + + PersistableEntityModel pem = PersistableEntityModel.of(replacement, collectionName); + + maybeEmitEvent(new BeforeConvertEvent<>(pem.getSource(), pem.getCollection())); + + return maybeCallBeforeConvert(pem.getSource(), pem.getCollection()).map(pem::mutate).flatMap(it -> { + PersistableEntityModel mapped = it + .addTargetDocument(operations.forEntity(it.getSource()).toMappedDocument(mongoConverter).getDocument()); + maybeEmitEvent(new BeforeSaveEvent(mapped.getSource(), mapped.getTarget(), mapped.getCollection())); + + return maybeCallBeforeSave(it.getSource(), mapped.getTarget(), mapped.getCollection()) + .map(potentiallyModified -> PersistableEntityModel.of(potentiallyModified, mapped.getTarget(), + mapped.getCollection())); + }).flatMap(it -> { + + Mono afterFindAndReplace = doFindAndReplace(it.getCollection(), collectionPreparer, mappedQuery, + mappedFields, mappedSort, queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), + options, projection); + return afterFindAndReplace.flatMap(saved -> { + maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection())); + return maybeCallAfterSave(saved, it.getTarget(), it.getCollection()); + }); + }); + }); + } + + @Override public Mono findAndRemove(Query query, Class entityClass) { - return findAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono findAndRemove(Query query, Class entityClass, String collectionName) { - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), query.getCollation().orElse(null), entityClass); + operations.forType(entityClass).getCollation(query); + return doFindAndRemove(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), + operations.forType(entityClass).getCollation(query).orElse(null), entityClass); } /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class) */ + @Override public Mono count(Query query, Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(entityClass, "Entity class must not be null"); - return count(query, entityClass, determineCollectionName(entityClass)); + return count(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ - public Mono count(final Query query, String collectionName) { + @Override + public Mono count(Query query, String collectionName) { return count(query, null, collectionName); } + @Override + public Mono count(Query query, @Nullable Class entityClass, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + return createMono(collectionName, collection -> { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document filter = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return doCount(collectionName, filter, options); + }); + } + + /** + * Run the actual count operation against the collection with given name. + * + * @param collectionName the name of the collection to count matching documents in. + * @param filter the filter to apply. Must not be {@literal null}. + * @param options options to apply. Like collation and the such. + * @return + */ + protected Mono doCount(String collectionName, Document filter, CountOptions options) { + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return countExecution.countDocuments(collectionName, filter, options); + } + /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#estimatedCount(java.lang.String) */ - public Mono count(@Nullable Query query, @Nullable Class entityClass, String collectionName) { + @Override + public Mono estimatedCount(String collectionName) { + return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions()); + } - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + protected Mono doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) { + return createMono(collectionName, collection -> collection.estimatedDocumentCount(options)); + } - return createMono(collectionName, collection -> { + @Override + public Mono exactCount(Query query, @Nullable Class entityClass, String collectionName) { - final Document Document = query == null ? null - : queryMapper.getMappedObject(query.getQueryObject(), - entityClass == null ? null : mappingContext.getPersistentEntity(entityClass)); + CountContext countContext = queryOperations.countQueryContext(query); - return collection.count(Document); - }); + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(collectionName, mappedQuery, options); + } + + protected Mono doExactCount(String collectionName, Document filter, CountOptions options) { + + return createMono(collectionName, + collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + + if (!filter.isEmpty() || !isEmptyOptions(options)) { + return Mono.just(false); + } + return ReactiveMongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()).map(it -> !it); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; } /* @@ -999,263 +1315,224 @@ public Mono count(@Nullable Query query, @Nullable Class entityClass, S @Override public Mono insert(Mono objectToSave) { - Assert.notNull(objectToSave, "Mono to insert must not be null!"); + Assert.notNull(objectToSave, "Mono to insert must not be null"); return objectToSave.flatMap(this::insert); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono, java.lang.Class) - */ @Override public Flux insertAll(Mono> batchToSave, Class entityClass) { - return insertAll(batchToSave, determineCollectionName(entityClass)); + return insertAll(batchToSave, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Flux insertAll(Mono> batchToSave, String collectionName) { - Assert.notNull(batchToSave, "Batch to insert must not be null!"); + Assert.notNull(batchToSave, "Batch to insert must not be null"); - return Flux.from(batchToSave).flatMap(collection -> insert(collection, collectionName)); + return Flux.from(batchToSave).flatMapSequential(collection -> insert(collection, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object) - */ + @Override public Mono insert(T objectToSave) { - Assert.notNull(objectToSave, "Object to insert must not be null!"); + Assert.notNull(objectToSave, "Object to insert must not be null"); - ensureNotIterable(objectToSave); - return insert(objectToSave, determineEntityCollectionName(objectToSave)); + ensureNotCollectionLike(objectToSave); + return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object, java.lang.String) - */ + @Override public Mono insert(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to insert must not be null!"); + Assert.notNull(objectToSave, "Object to insert must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return doInsert(collectionName, objectToSave, this.mongoConverter); } protected Mono doInsert(String collectionName, T objectToSave, MongoWriter writer) { - assertUpdateableIdIfNotSet(objectToSave); - - return Mono.defer(() -> { + return Mono.just(PersistableEntityModel.of(objectToSave, collectionName)) // + .doOnNext(it -> maybeEmitEvent(new BeforeConvertEvent<>(it.getSource(), it.getCollection()))) // + .flatMap(it -> maybeCallBeforeConvert(it.getSource(), it.getCollection()).map(it::mutate)) // + .map(it -> { - initializeVersionProperty(objectToSave); - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + AdaptibleEntity entity = operations.forEntity(it.getSource(), mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - Document dbDoc = toDbObject(objectToSave, writer); + PersistableEntityModel model = PersistableEntityModel.of(entity.initializeVersionProperty(), + entity.toMappedDocument(writer).getDocument(), it.getCollection()); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); + maybeEmitEvent(new BeforeSaveEvent<>(model.getSource(), model.getTarget(), model.getCollection())); + return model; + })// + .flatMap(it -> { + return maybeCallBeforeSave(it.getSource(), it.getTarget(), it.getCollection()).map(it::mutate); + }).flatMap(it -> { - Mono afterInsert = insertDBObject(collectionName, dbDoc, objectToSave.getClass()).flatMap(id -> { - populateIdIfNecessary(objectToSave, id); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); - return Mono.just(objectToSave); - }); + return insertDocument(it.getCollection(), it.getTarget(), it.getSource().getClass()).flatMap(id -> { - return afterInsert; - }); + T saved = operations.forEntity(it.getSource(), mongoConverter.getConversionService()) + .populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), collectionName)); + return maybeCallAfterSave(saved, it.getTarget(), collectionName); + }); + }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.Class) - */ + @Override public Flux insert(Collection batchToSave, Class entityClass) { - return doInsertBatch(determineCollectionName(entityClass), batchToSave, this.mongoConverter); + return doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.String) - */ + @Override public Flux insert(Collection batchToSave, String collectionName) { return doInsertBatch(collectionName, batchToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(java.util.Collection) - */ + @Override public Flux insertAll(Collection objectsToSave) { return doInsertAll(objectsToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(reactor.core.publisher.Mono) - */ @Override public Flux insertAll(Mono> objectsToSave) { - return Flux.from(objectsToSave).flatMap(this::insertAll); + return Flux.from(objectsToSave).flatMapSequential(this::insertAll); } protected Flux doInsertAll(Collection listToSave, MongoWriter writer) { - final Map> elementsByCollection = new HashMap>(); + Map> elementsByCollection = new HashMap<>(); listToSave.forEach(element -> { - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(element.getClass()); - String collection = entity.getCollection(); - List collectionElements = elementsByCollection.get(collection); - - if (null == collectionElements) { - collectionElements = new ArrayList(); - elementsByCollection.put(collection, collectionElements); - } + String collection = getCollectionName(element.getClass()); + List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); collectionElements.add(element); }); return Flux.fromIterable(elementsByCollection.keySet()) - .flatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); + .concatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); } - protected Flux doInsertBatch(final String collectionName, final Collection batchToSave, - final MongoWriter writer) { + protected Flux doInsertBatch(String collectionName, Collection batchToSave, + MongoWriter writer) { - Assert.notNull(writer, "MongoWriter must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); - Mono>> prepareDocuments = Flux.fromIterable(batchToSave) - .flatMap(new Function>>() { - @Override - public Flux> apply(T o) { + Mono, Document>>> prepareDocuments = Flux.fromIterable(batchToSave) + .flatMap(uninitialized -> { - initializeVersionProperty(o); - maybeEmitEvent(new BeforeConvertEvent(o, collectionName)); + BeforeConvertEvent event = new BeforeConvertEvent<>(uninitialized, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); - Document dbDoc = toDbObject(o, writer); + return maybeCallBeforeConvert(toConvert, collectionName).flatMap(it -> { - maybeEmitEvent(new BeforeSaveEvent(o, dbDoc, collectionName)); - return Flux.zip(Mono.just(o), Mono.just(dbDoc)); - } + AdaptibleEntity entity = operations.forEntity(it, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); + + T initialized = entity.initializeVersionProperty(); + MappedDocument mapped = entity.toMappedDocument(writer); + + maybeEmitEvent(new BeforeSaveEvent<>(initialized, mapped.getDocument(), collectionName)); + return maybeCallBeforeSave(initialized, mapped.getDocument(), collectionName).map(toSave -> { + + MappedDocument mappedDocument = queryOperations.createInsertContext(mapped) + .prepareId(uninitialized.getClass()); + + return Tuples.of(entity, mappedDocument.getDocument()); + }); + }); }).collectList(); - Flux> insertDocuments = prepareDocuments.flatMapMany(tuples -> { + Flux, Document>> insertDocuments = prepareDocuments.flatMapMany(tuples -> { - List dbObjects = tuples.stream().map(Tuple2::getT2).collect(Collectors.toList()); + List documents = tuples.stream().map(Tuple2::getT2).collect(Collectors.toList()); - return insertDocumentList(collectionName, dbObjects).thenMany(Flux.fromIterable(tuples)); + return insertDocumentList(collectionName, documents).thenMany(Flux.fromIterable(tuples)); }); - return insertDocuments.map(tuple -> { + return insertDocuments.flatMapSequential(tuple -> { - populateIdIfNecessary(tuple.getT1(), tuple.getT2().get(ID_FIELD)); - maybeEmitEvent(new AfterSaveEvent(tuple.getT1(), tuple.getT2(), collectionName)); - return tuple.getT1(); + Document document = tuple.getT2(); + Object id = MappedDocument.of(document).getId(); + + T saved = tuple.getT1().populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, document, collectionName)); + return maybeCallAfterSave(saved, document, collectionName); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono) - */ @Override public Mono save(Mono objectToSave) { - Assert.notNull(objectToSave, "Mono to save must not be null!"); + Assert.notNull(objectToSave, "Mono to save must not be null"); return objectToSave.flatMap(this::save); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Mono save(Mono objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Mono to save must not be null!"); + Assert.notNull(objectToSave, "Mono to save must not be null"); return objectToSave.flatMap(o -> save(o, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object) - */ + @Override public Mono save(T objectToSave) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - return save(objectToSave, determineEntityCollectionName(objectToSave)); + Assert.notNull(objectToSave, "Object to save must not be null"); + return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object, java.lang.String) - */ + @Override public Mono save(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - - MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(objectToSave.getClass()); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - // No optimistic locking -> simple save - if (mongoPersistentEntity == null || !mongoPersistentEntity.hasVersionProperty()) { - return doSave(collectionName, objectToSave, this.mongoConverter); - } + AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); - return doSaveVersioned(objectToSave, mongoPersistentEntity, collectionName); + return source.isVersionedEntity() ? doSaveVersioned(source, collectionName) + : doSave(collectionName, objectToSave, this.mongoConverter); } - private Mono doSaveVersioned(T objectToSave, MongoPersistentEntity entity, String collectionName) { - - return createMono(collectionName, collection -> { - - ConvertingPropertyAccessor convertingAccessor = new ConvertingPropertyAccessor( - entity.getPropertyAccessor(objectToSave), mongoConverter.getConversionService()); - - MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); - MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); - - Object version = convertingAccessor.getProperty(versionProperty); - Number versionNumber = convertingAccessor.getProperty(versionProperty, Number.class); + private Mono doSaveVersioned(AdaptibleEntity source, String collectionName) { - // Fresh instance -> initialize version property - if (version == null) { - return doInsert(collectionName, objectToSave, mongoConverter); - } + if (source.isNew()) { + return doInsert(collectionName, source.getBean(), this.mongoConverter); + } - assertUpdateableIdIfNotSet(objectToSave); + return createMono(collectionName, collection -> { // Create query for entity with the id and old version - Object id = convertingAccessor.getProperty(idProperty); - Query query = new Query(Criteria.where(idProperty.getName()).is(id).and(versionProperty.getName()).is(version)); + Query query = source.getQueryForVersion(); - if (versionNumber == null) { - versionNumber = 0; - } // Bump version number - convertingAccessor.setProperty(versionProperty, versionNumber.longValue() + 1); + T toSave = source.incrementVersion(); - ReactiveMongoTemplate.this.maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); + source.assertUpdateableIdIfNotSet(); - Document document = ReactiveMongoTemplate.this.toDbObject(objectToSave, mongoConverter); + BeforeConvertEvent event = new BeforeConvertEvent<>(toSave, collectionName); + T afterEvent = maybeEmitEvent(event).getSource(); - ReactiveMongoTemplate.this.maybeEmitEvent(new BeforeSaveEvent(objectToSave, document, collectionName)); - Update update = Update.fromDocument(document, ID_FIELD); + return maybeCallBeforeConvert(afterEvent, collectionName).flatMap(toConvert -> { - return doUpdate(collectionName, query, update, objectToSave.getClass(), false, false).map(updateResult -> { + MappedDocument mapped = operations.forEntity(toConvert).toMappedDocument(mongoConverter); + Document document = mapped.getDocument(); - maybeEmitEvent(new AfterSaveEvent(objectToSave, document, collectionName)); - return objectToSave; + maybeEmitEvent(new BeforeSaveEvent<>(toConvert, document, collectionName)); + return maybeCallBeforeSave(toConvert, document, collectionName).flatMap(it -> { + + return doUpdate(collectionName, query, mapped.updateWithoutId(), it.getClass(), false, false) + .flatMap(result -> { + maybeEmitEvent(new AfterSaveEvent(it, document, collectionName)); + return maybeCallAfterSave(it, document, collectionName); + }); + }); }); }); } @@ -1266,27 +1543,38 @@ protected Mono doSave(String collectionName, T objectToSave, MongoWriter< return createMono(collectionName, collection -> { - maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)); - Document dbDoc = toDbObject(objectToSave, writer); - maybeEmitEvent(new BeforeSaveEvent(objectToSave, dbDoc, collectionName)); + T toSave = maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)).getSource(); + + return maybeCallBeforeConvert(toSave, collectionName).flatMap(toConvert -> { + + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + Document dbDoc = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent(toConvert, dbDoc, collectionName)); + + return maybeCallBeforeSave(toConvert, dbDoc, collectionName).flatMap(it -> { - return saveDocument(collectionName, dbDoc, objectToSave.getClass()).map(id -> { + return saveDocument(collectionName, dbDoc, it.getClass()).flatMap(id -> { - populateIdIfNecessary(objectToSave, id); - maybeEmitEvent(new AfterSaveEvent(objectToSave, dbDoc, collectionName)); - return objectToSave; + T saved = entity.populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); + return maybeCallAfterSave(saved, dbDoc, collectionName); + }); + }); }); }); } - protected Mono insertDBObject(final String collectionName, final Document dbDoc, final Class entityClass) { + protected Mono insertDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName); + LOGGER.debug(String + .format("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName)); } - final Document document = new Document(dbDoc); - Flux execute = execute(collectionName, collection -> { + MappedDocument document = MappedDocument.of(dbDoc); + queryOperations.createInsertContext(document).prepareId(entityClass); + + Flux execute = execute(collectionName, collection -> { MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, dbDoc, null); @@ -1294,23 +1582,23 @@ protected Mono insertDBObject(final String collectionName, final Documen MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); - return collectionToUse.insertOne(document); + return collectionToUse.insertOne(document.getDocument()); }); - return Flux.from(execute).last().map(success -> document.get(ID_FIELD)); + return Flux.from(execute).last().map(success -> document.getId()); } - protected Flux insertDocumentList(final String collectionName, final List dbDocList) { + protected Flux insertDocumentList(String collectionName, List dbDocList) { if (dbDocList.isEmpty()) { return Flux.empty(); } if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting list of DBObjects containing " + dbDocList.size() + " items"); + LOGGER.debug(String.format("Inserting list of Documents containing %d items", dbDocList.size())); } - final List documents = new ArrayList<>(); + List documents = new ArrayList<>(dbDocList.size()); return execute(collectionName, collection -> { @@ -1322,12 +1610,14 @@ protected Flux insertDocumentList(final String collectionName, final L documents.addAll(toDocuments(dbDocList)); return collectionToUse.insertMany(documents); - }).flatMap(s -> { - List documentsWithIds = documents.stream() - .filter(document -> document.get(ID_FIELD) instanceof ObjectId).collect(Collectors.toList()); - return Flux.fromIterable(documentsWithIds); - }).map(document -> document.get(ID_FIELD, ObjectId.class)); + }).flatMapSequential(s -> { + + return Flux.fromStream(documents.stream() // + .map(MappedDocument::of) // + .filter(it -> it.isIdPresent(ObjectId.class)) // + .map(it -> it.getId(ObjectId.class))); + }); } private MongoCollection prepareCollection(MongoCollection collection, @@ -1340,11 +1630,10 @@ private MongoCollection prepareCollection(MongoCollection co return collectionToUse; } - protected Mono saveDocument(final String collectionName, final Document document, - final Class entityClass) { + protected Mono saveDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving Document containing fields: " + document.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", document.keySet())); } return createMono(collectionName, collection -> { @@ -1352,144 +1641,181 @@ protected Mono saveDocument(final String collectionName, final Document MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, document, null); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MappedDocument mapped = MappedDocument.of(document); + + MongoCollection collectionToUse = writeConcernToUse == null // + ? collection // + : collection.withWriteConcern(writeConcernToUse); Publisher publisher; - if (!document.containsKey(ID_FIELD)) { - if (writeConcernToUse == null) { - publisher = collection.insertOne(document); + if (!mapped.hasId()) { + publisher = collectionToUse + .insertOne(queryOperations.createInsertContext(mapped).prepareId(entityClass).getDocument()); + } else { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); + Document filter = updateContext.getReplacementQuery(); + Document replacement = updateContext.getMappedUpdate(entity); + + Mono deferredFilter; + + if (updateContext.requiresShardKey(filter, entity)) { + if (entity.getShardKey().isImmutable()) { + deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null)); + } else { + deferredFilter = Mono + .from( + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()) + .defaultIfEmpty(replacement).map(it -> updateContext.applyShardKey(entity, filter, it)); + } } else { - publisher = collection.withWriteConcern(writeConcernToUse).insertOne(document); + deferredFilter = Mono.just(filter); } - } else if (writeConcernToUse == null) { - publisher = collection.replaceOne(Filters.eq(ID_FIELD, document.get(ID_FIELD)), document, - new UpdateOptions().upsert(true)); - } else { - publisher = collection.withWriteConcern(writeConcernToUse) - .replaceOne(Filters.eq(ID_FIELD, document.get(ID_FIELD)), document, new UpdateOptions().upsert(true)); + + publisher = deferredFilter.flatMapMany( + it -> collectionToUse.replaceOne(it, replacement, updateContext.getReplaceOptions(entityClass))); } - return Mono.from(publisher).map(o -> document.get(ID_FIELD)); + return Mono.from(publisher).map(o -> mapped.getId()); }); + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) - */ - public Mono upsert(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, true, false); + @Override + public Mono upsert(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) - */ - public Mono upsert(Query query, Update update, String collectionName) { + @Override + public Mono upsert(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono upsert(Query query, Update update, Class entityClass, String collectionName) { + @Override + public Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, true, false); } /* * (non-Javadoc)) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) */ - public Mono updateFirst(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false); + @Override + public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) - */ - public Mono updateFirst(final Query query, final Update update, final String collectionName) { + @Override + public Mono updateFirst(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono updateFirst(Query query, Update update, Class entityClass, String collectionName) { + @Override + public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, + String collectionName) { return doUpdate(collectionName, query, update, entityClass, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) - */ - public Mono updateMulti(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true); + @Override + public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) - */ - public Mono updateMulti(final Query query, final Update update, String collectionName) { + @Override + public Mono updateMulti(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono updateMulti(final Query query, final Update update, Class entityClass, + @Override + public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, false, true); } - protected Mono doUpdate(final String collectionName, @Nullable Query query, @Nullable Update update, - @Nullable Class entityClass, final boolean upsert, final boolean multi) { + protected Mono doUpdate(String collectionName, Query query, @Nullable UpdateDefinition update, + @Nullable Class entityClass, boolean upsert, boolean multi) { MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); - Flux result = execute(collectionName, collection -> { + UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) + : queryOperations.updateSingleContext(update, query, upsert); + updateContext.increaseVersionForUpdateIfNecessary(entity); - increaseVersionForUpdateIfNecessary(entity, update); + Document queryObj = updateContext.getMappedQuery(entity); + UpdateOptions updateOptions = updateContext.getUpdateOptions(entityClass, query); - Document queryObj = query == null ? new Document() : queryMapper.getMappedObject(query.getQueryObject(), entity); - Document updateObj = update == null ? new Document() - : updateMapper.getMappedObject(update.getUpdateObject(), entity); + Flux result; - if (LOGGER.isDebugEnabled()) { - LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", - serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); - } + if (updateContext.isAggregationUpdate()) { + + List pipeline = updateContext.getUpdatePipeline(entityClass); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + update.getUpdateObject(), queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + result = execute(collectionName, collection -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); + } + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + + return multi ? collection.updateMany(queryObj, pipeline, updateOptions) + : collection.updateOne(queryObj, pipeline, updateOptions); + }); + } else { + + Document updateObj = updateContext.getMappedUpdate(entity); MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, updateObj, queryObj); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); - UpdateOptions updateOptions = new UpdateOptions().upsert(upsert); - query.getCollation().map(Collation::toMongoCollation).ifPresent(updateOptions::collation); + result = execute(collectionName, collection -> { - if (!UpdateMapper.isUpdateObject(updateObj)) { - return collectionToUse.replaceOne(queryObj, updateObj, updateOptions); - } - if (multi) { - return collectionToUse.updateMany(queryObj, updateObj, updateOptions); - } - return collectionToUse.updateOne(queryObj, updateObj, updateOptions); - }).doOnNext(updateResult -> { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } + + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + + if (!UpdateMapper.isUpdateObject(updateObj)) { + + Document filter = new Document(queryObj); + Mono deferredFilter; + + if (updateContext.requiresShardKey(filter, entity)) { + if (entity.getShardKey().isImmutable()) { + deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null)); + } else { + deferredFilter = Mono.from( + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()) + .defaultIfEmpty(updateObj).map(it -> updateContext.applyShardKey(entity, filter, it)); + } + } else { + deferredFilter = Mono.just(filter); + } + + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + return deferredFilter.flatMap(it -> Mono.from(collectionToUse.replaceOne(it, updateObj, replaceOptions))); + } + + return multi ? collectionToUse.updateMany(queryObj, updateObj, updateOptions) + : collectionToUse.updateOne(queryObj, updateObj, updateOptions); + }); + } + + result = result.doOnNext(updateResult -> { if (entity != null && entity.hasVersionProperty() && !multi) { if (updateResult.wasAcknowledged() && updateResult.getMatchedCount() == 0) { - Document queryObj = query == null ? new Document() - : queryMapper.getMappedObject(query.getQueryObject(), entity); - Document updateObj = update == null ? new Document() - : updateMapper.getMappedObject(update.getUpdateObject(), entity); - if (dbObjectContainsVersionProperty(queryObj, entity)) - throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: " - + updateObj.toString() + " to collection " + collectionName); + Document updateObj = updateContext.getMappedUpdate(entity); + if (containsVersionProperty(queryObj, entity)) + throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity %s to collection %s".formatted(entity.getName(), collectionName)); } } }); @@ -1497,130 +1823,40 @@ protected Mono doUpdate(final String collectionName, @Nullable Que return result.next(); } - private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity, Update update) { - - if (persistentEntity != null && persistentEntity.hasVersionProperty()) { - String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); - if (!update.modifies(versionFieldName)) { - update.inc(versionFieldName, 1L); - } - } - } - - private boolean dbObjectContainsVersionProperty(Document document, - @Nullable MongoPersistentEntity persistentEntity) { + private boolean containsVersionProperty(Document document, @Nullable MongoPersistentEntity persistentEntity) { if (persistentEntity == null || !persistentEntity.hasVersionProperty()) { return false; } - return document.containsKey(persistentEntity.getRequiredIdProperty().getFieldName()); + return document.containsKey(persistentEntity.getRequiredVersionProperty().getFieldName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono) - */ @Override public Mono remove(Mono objectToRemove) { return objectToRemove.flatMap(this::remove); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Mono remove(Mono objectToRemove, String collectionName) { return objectToRemove.flatMap(it -> remove(it, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object) - */ + @Override public Mono remove(Object object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); - return remove(getIdQueryFor(object), object.getClass()); + return remove(operations.forEntity(object).getRemoveByQuery(), object.getClass()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object, java.lang.String) - */ + @Override public Mono remove(Object object, String collectionName) { - Assert.notNull(object, "Object must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - return doRemove(collectionName, getIdQueryFor(object), object.getClass()); - } - - /** - * Returns {@link Entry} containing the field name of the id property as {@link Entry#getKey()} and the {@link Id}s - * property value as its {@link Entry#getValue()}. - * - * @param object - * @return - */ - private Pair extractIdPropertyAndValue(Object object) { - - Assert.notNull(object, "Id cannot be extracted from 'null'."); - - Assert.notNull(object, "Id cannot be extracted from 'null'."); - - Class objectType = object.getClass(); - - if (object instanceof Document) { - return Pair.of(ID_FIELD, ((Document) object).get(ID_FIELD)); - } - - MongoPersistentEntity entity = mappingContext.getPersistentEntity(objectType); - - if (entity != null && entity.hasIdProperty()) { - - MongoPersistentProperty idProperty = entity.getIdProperty(); - return Pair.of(idProperty.getFieldName(), entity.getPropertyAccessor(object).getProperty(idProperty)); - } - - throw new MappingException("No id property found for object of type " + objectType); - } - - /** - * Returns a {@link Query} for the given entity by its id. - * - * @param object must not be {@literal null}. - * @return - */ - private Query getIdQueryFor(Object object) { - - Pair id = extractIdPropertyAndValue(object); - return new Query(where(id.getFirst()).is(id.getSecond())); - } - - /** - * Returns a {@link Query} for the given entities by their ids. - * - * @param objects must not be {@literal null} or {@literal empty}. - * @return - */ - private Query getIdInQueryFor(Collection objects) { - - Assert.notEmpty(objects, "Cannot create Query for empty collection."); - - Iterator it = objects.iterator(); - Pair firstEntry = extractIdPropertyAndValue(it.next()); - - ArrayList ids = new ArrayList(objects.size()); - ids.add(firstEntry.getSecond()); - - while (it.hasNext()) { - ids.add(extractIdPropertyAndValue(it.next()).getSecond()); - } - - return new Query(where(firstEntry.getFirst()).in(ids)); + return doRemove(collectionName, operations.forEntity(object).getRemoveByQuery(), object.getClass()); } private void assertUpdateableIdIfNotSet(Object value) { @@ -1638,32 +1874,23 @@ private void assertUpdateableIdIfNotSet(Object value) { if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(), + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), value.getClass().getName())); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono remove(Query query, String collectionName) { return remove(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono remove(Query query, Class entityClass) { - return remove(query, entityClass, determineCollectionName(entityClass)); + return remove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono remove(Query query, @Nullable Class entityClass, String collectionName) { return doRemove(collectionName, query, entityClass); } @@ -1671,225 +1898,351 @@ public Mono remove(Query query, @Nullable Class entityClass, St protected Mono doRemove(String collectionName, Query query, @Nullable Class entityClass) { if (query == null) { - throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!"); + throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null"); } - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - - final Document queryObject = query.getQueryObject(); - final MongoPersistentEntity entity = getPersistentEntity(entityClass); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - return execute(collectionName, collection -> { - - Document removeQuey = queryMapper.getMappedObject(queryObject, entity); + MongoPersistentEntity entity = getPersistentEntity(entityClass); - maybeEmitEvent(new BeforeDeleteEvent(removeQuey, entityClass, collectionName)); + DeleteContext deleteContext = queryOperations.deleteQueryContext(query); + Document queryObject = deleteContext.getMappedQuery(entity); + DeleteOptions deleteOptions = deleteContext.getDeleteOptions(entityClass); + Document removeQuery = deleteContext.getMappedQuery(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, removeQuery); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, - null, removeQuey); + return execute(collectionName, collection -> { - final DeleteOptions deleteOptions = new DeleteOptions(); - query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); + maybeEmitEvent(new BeforeDeleteEvent<>(removeQuery, entityClass, collectionName)); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + MongoCollection collectionToUse = collectionPreparer + .prepare(prepareCollection(collection, writeConcernToUse)); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(removeQuey), collectionName }); + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); } if (query.getLimit() > 0 || query.getSkip() > 0) { FindPublisher cursor = new QueryFindPublisherPreparer(query, entityClass) - .prepare(collection.find(removeQuey)) // - .projection(new Document(ID_FIELD, 1)); + .prepare(collection.find(removeQuery)) // + .projection(MappedDocument.getIdOnlyProjection()); return Flux.from(cursor) // - .map(doc -> doc.get(ID_FIELD)) // + .map(MappedDocument::of) // + .map(MappedDocument::getId) // .collectList() // .flatMapMany(val -> { - return collectionToUse.deleteMany(new Document(ID_FIELD, new Document("$in", val)), deleteOptions); + return collectionToUse.deleteMany(MappedDocument.getIdIn(val), deleteOptions); }); } else { - return collectionToUse.deleteMany(removeQuey, deleteOptions); + return collectionToUse.deleteMany(removeQuery, deleteOptions); } - }).doOnNext(deleteResult -> maybeEmitEvent(new AfterDeleteEvent(queryObject, entityClass, collectionName))) + }).doOnNext(it -> maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName))) // .next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class) - */ + @Override public Flux findAll(Class entityClass) { - return findAll(entityClass, determineCollectionName(entityClass)); + return findAll(entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class, java.lang.String) - */ + @Override public Flux findAll(Class entityClass, String collectionName) { - return executeFindMultiInternal(new FindCallback(null), null, - new ReadDocumentCallback(mongoConverter, entityClass, collectionName), collectionName); + return executeFindMultiInternal(new FindCallback(CollectionPreparer.identity(), null), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override + @SuppressWarnings("unchecked") public Flux findAllAndRemove(Query query, String collectionName) { return (Flux) findAllAndRemove(query, Object.class, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public Flux findAllAndRemove(Query query, Class entityClass) { - return findAllAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public Flux findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override + public Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected Mono replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + return createMono(collectionName, collection -> { + + Document mappedUpdate = updateContext.getMappedUpdate(entity); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedUpdate, updateContext.getQueryObject()); + + MongoCollection collectionToUse = createCollectionPreparer(query, action).prepare(collection); + + return collectionToUse.replaceOne(updateContext.getMappedQuery(entity), mappedUpdate, + updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + }); + } + @Override public Flux tail(Query query, Class entityClass) { - return tail(query, entityClass, determineCollectionName(entityClass)); + return tail(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public Flux tail(@Nullable Query query, Class entityClass, String collectionName) { + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); if (query == null) { - // TODO: clean up - LOGGER.debug(String.format("find for class: %s in collection: %s", entityClass, collectionName)); + LOGGER.debug(String.format("Tail for class: %s in collection: %s", entityClass, collectionName)); return executeFindMultiInternal( - collection -> new FindCallback(null).doInCollection(collection).cursorType(CursorType.TailableAwait), null, - new ReadDocumentCallback(mongoConverter, entityClass, collectionName), collectionName); + collection -> new FindCallback(collectionPreparer, null).doInCollection(collection) + .cursorType(CursorType.TailableAwait), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + return doFind(collectionName, collectionPreparer, query.getQueryObject(), query.getFieldsObject(), entityClass, new TailingQueryFindPublisherPreparer(query, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, org.springframework.data.mongodb.core.ChangeStreamOptions, java.lang.String) - */ @Override - public Flux> changeStream(@Nullable Aggregation filter, Class resultType, - ChangeStreamOptions options, String collectionName) { + public Flux> changeStream(@Nullable String database, @Nullable String collectionName, + ChangeStreamOptions options, Class targetType) { + + List filter = prepareFilter(options); + FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT + : FullDocument.UPDATE_LOOKUP; + + return ReactiveMongoDatabaseUtils.getDatabase(database, mongoDatabaseFactory) // + .map(db -> { + ChangeStreamPublisher publisher; + if (StringUtils.hasText(collectionName)) { + publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class) + : db.getCollection(collectionName).watch(filter, Document.class); + + } else { + publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); + } + + if (options.isResumeAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter) + .orElse(publisher); + } else if (options.isStartAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::startAfter) + .orElse(publisher); + } + publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation) + .orElse(publisher); + publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher); + + if (options.getFullDocumentBeforeChangeLookup().isPresent()) { + publisher = publisher.fullDocumentBeforeChange(options.getFullDocumentBeforeChangeLookup().get()); + } + return publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument)); + }) // + .flatMapMany(publisher -> Flux.from(publisher) + .map(document -> new ChangeStreamEvent<>(document, targetType, getConverter()))); + } + + List prepareFilter(ChangeStreamOptions options) { - Assert.notNull(resultType, "Result type must not be null!"); - Assert.notNull(options, "ChangeStreamOptions must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Object filter = options.getFilter().orElse(Collections.emptyList()); - if (filter == null) { - return changeStream(Collections.emptyList(), resultType, options, collectionName); + if (filter instanceof Aggregation agg) { + AggregationOperationContext context = agg instanceof TypedAggregation typedAggregation + ? new TypeBasedAggregationOperationContext(typedAggregation.getInputType(), + getConverter().getMappingContext(), queryMapper) + : new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper); + + return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", + Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns"))); } - AggregationOperationContext context = filter instanceof TypedAggregation ? new TypeBasedAggregationOperationContext( - ((TypedAggregation) filter).getInputType(), mappingContext, queryMapper) : Aggregation.DEFAULT_CONTEXT; + if (filter instanceof List) { + return (List) filter; + } - return changeStream( - filter.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", - Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns"))), - resultType, options, collectionName); + throw new IllegalArgumentException( + "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(java.util.List, java.lang.Class, org.springframework.data.mongodb.core.ChangeStreamOptions, java.lang.String) - */ @Override - public Flux> changeStream(List filter, Class resultType, - ChangeStreamOptions options, String collectionName) { + public Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, + String reduceFunction, MapReduceOptions options) { - Assert.notNull(filter, "Filter must not be null!"); - Assert.notNull(resultType, "Result type must not be null!"); - Assert.notNull(options, "ChangeStreamOptions must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + return mapReduce(filterQuery, domainType, getCollectionName(domainType), resultType, mapFunction, reduceFunction, + options); + } - ChangeStreamPublisher publisher = filter.isEmpty() ? getCollection(collectionName).watch() - : getCollection(collectionName).watch(filter); + @Override + public Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, + String mapFunction, String reduceFunction, MapReduceOptions options) { - publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher); - publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); + Assert.notNull(filterQuery, "Filter query must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + Assert.hasText(inputCollectionName, "Input collection name must not be null or empty"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); + Assert.notNull(options, "MapReduceOptions must not be null"); - if (options.getFullDocumentLookup().isPresent() || resultType != Document.class) { - publisher = publisher.fullDocument(options.getFullDocumentLookup().isPresent() - ? options.getFullDocumentLookup().get() : FullDocument.UPDATE_LOOKUP); - } + assertLocalFunctionNames(mapFunction, reduceFunction); + + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(filterQuery); + return createFlux(inputCollectionName, collection -> { + + Document mappedQuery = queryMapper.getMappedObject(filterQuery.getQueryObject(), + mappingContext.getPersistentEntity(domainType)); - return Flux.from(publisher).map(document -> new ChangeStreamEvent<>(document, resultType, getConverter())); + MapReducePublisher publisher = collectionPreparer.prepare(collection).mapReduce(mapFunction, + reduceFunction, Document.class); + + publisher.filter(mappedQuery); + + Document mappedSort = getMappedSortObject(filterQuery, domainType); + if (mappedSort != null && !mappedSort.isEmpty()) { + publisher.sort(mappedSort); + } + + Meta meta = filterQuery.getMeta(); + if (meta.hasMaxTime()) { + publisher.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (filterQuery.getLimit() > 0 || (options.getLimit() != null)) { + + if (filterQuery.getLimit() > 0 && (options.getLimit() != null)) { + throw new IllegalArgumentException( + "Both Query and MapReduceOptions define a limit; Please provide the limit only via one of the two."); + } + + if (filterQuery.getLimit() > 0) { + publisher.limit(filterQuery.getLimit()); + } + + if (options.getLimit() != null) { + publisher.limit(options.getLimit()); + } + } + + Optional collation = filterQuery.getCollation(); + + Optionals.ifAllPresent(filterQuery.getCollation(), options.getCollation(), (l, r) -> { + throw new IllegalArgumentException( + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); + }); + + if (options.getCollation().isPresent()) { + collation = options.getCollation(); + } + + if (!CollectionUtils.isEmpty(options.getScopeVariables())) { + publisher = publisher.scope(new Document(options.getScopeVariables())); + } + + if (options.getLimit() != null && options.getLimit() > 0) { + publisher = publisher.limit(options.getLimit()); + } + + if (options.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) { + publisher = publisher.finalizeFunction(options.getFinalizeFunction().get()); + } + + if (options.getJavaScriptMode() != null) { + publisher = publisher.jsMode(options.getJavaScriptMode()); + } + + if (options.getOutputSharded().isPresent()) { + MongoCompatibilityAdapter.mapReducePublisherAdapter(publisher).sharded(options.getOutputSharded().get()); + } + + if (StringUtils.hasText(options.getOutputCollection()) && !options.usesInlineOutput()) { + publisher = publisher.collectionName(options.getOutputCollection()).action(options.getMapReduceAction()); + + if (options.getOutputDatabase().isPresent()) { + publisher = publisher.databaseName(options.getOutputDatabase().get()); + } + } + + publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); + + return Flux.from(publisher) + .flatMapSequential(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith); + }); + } + + private static void assertLocalFunctionNames(String... functions) { + + for (String function : functions) { + + if (ResourceUtils.isUrl(function)) { + + throw new IllegalArgumentException(String.format( + "Blocking accessing to resource %s is not allowed using reactive infrastructure; You may load the resource at startup and cache its value.", + function)); + } + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation#query(java.lang.Class) - */ @Override public ReactiveFind query(Class domainType) { return new ReactiveFindOperationSupport(this).query(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation#update(java.lang.Class) - */ @Override public ReactiveUpdate update(Class domainType) { return new ReactiveUpdateOperationSupport(this).update(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation#remove(java.lang.Class) - */ @Override public ReactiveRemove remove(Class domainType) { return new ReactiveRemoveOperationSupport(this).remove(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation#insert(java.lang.Class) - */ @Override public ReactiveInsert insert(Class domainType) { return new ReactiveInsertOperationSupport(this).insert(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ReactiveAggregation aggregateAndReturn(Class domainType) { return new ReactiveAggregationOperationSupport(this).aggregateAndReturn(domainType); } + @Override + public ReactiveMapReduce mapReduce(Class domainType) { + return new ReactiveMapReduceOperationSupport(this).mapReduce(domainType); + } + + @Override + public ReactiveChangeStream changeStream(Class domainType) { + return new ReactiveChangeStreamOperationSupport(this).changeStream(domainType); + } + /** * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is @@ -1904,9 +2257,9 @@ protected Flux doFindAndDelete(String collectionName, Query query, Class< Flux flux = find(query, entityClass, collectionName); - return Flux.from(flux).collectList() - .flatMapMany(list -> Flux.from(remove(getIdInQueryFor(list), entityClass, collectionName)) - .flatMap(deleteResult -> Flux.fromIterable(list))); + return Flux.from(flux).collectList().filter(it -> !it.isEmpty()) + .flatMapMany(list -> Flux.from(remove(operations.getByIdInQuery(list), entityClass, collectionName)) + .flatMapSequential(deleteResult -> Flux.fromIterable(list))); } /** @@ -1916,17 +2269,17 @@ protected Flux doFindAndDelete(String collectionName, Query query, Class< * @param collectionOptions * @return the collection that was created */ - protected Mono> doCreateCollection(final String collectionName, - final CreateCollectionOptions collectionOptions) { + protected Mono> doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { - return createMono(db -> db.createCollection(collectionName, collectionOptions)).map(success -> { + return createMono(db -> db.createCollection(collectionName, collectionOptions)).doOnSuccess(it -> { // TODO: Emit a collection created event if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", collectionName); + LOGGER.debug(String.format("Created collection [%s]", collectionName)); } - return getCollection(collectionName); - }); + + }).then(getCollection(collectionName)); } /** @@ -1934,26 +2287,52 @@ protected Mono> doCreateCollection(final String collec * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. * @param collation can be {@literal null}. * @return the {@link List} of converted objects. */ - protected Mono doFindOne(String collectionName, Document query, @Nullable Document fields, + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, Class entityClass, @Nullable Collation collation) { + return doFindOne(collectionName, collectionPreparer, query, fields, entityClass, + findPublisher -> collation != null ? findPublisher.collation(collation.toMongoCollation()) : findPublisher); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @param preparer the preparer modifying collection and publisher to fit the needs. + * @return the {@link List} of converted objects. + * @since 2.2 + */ + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, + Class entityClass, FindPublisherPreparer preparer) { + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity); + + QueryContext queryContext = queryOperations + .createQueryContext(new BasicQuery(query, fields != null ? fields : new Document())); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, collation), - new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } /** @@ -1961,14 +2340,16 @@ protected Mono doFindOne(String collectionName, Document query, @Nullable * query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, - new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName)); + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } /** @@ -1977,85 +2358,77 @@ protected Flux doFind(String collectionName, Document query, Document fie * specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link com.mongodb.client.FindIterable} used when iterating over + * the result set, (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, - FindPublisherPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, - new ReadDocumentCallback(mongoConverter, entityClass, collectionName)); + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, FindPublisherPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, + new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, - @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { + protected Flux doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedFields = queryMapper.getMappedFields(fields, entity); - Document mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, - collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + objectCallback, collectionName); } - /** - * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while - * using sourceClass for mapping the query. - * - * @since 2.0 - */ - Flux doFind(String collectionName, Document query, Document fields, Class sourceClass, - Class targetClass, FindPublisherPreparer preparer) { - - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(sourceClass); - - Document mappedFields = getMappedFieldsObject(fields, entity, targetClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName); - } - - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, - new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName); + CollectionPreparer> createCollectionPreparer(Query query) { + return ReactiveCollectionPreparerDelegate.of(query); } - private Document getMappedFieldsObject(Document fields, MongoPersistentEntity entity, Class targetType) { - return queryMapper.getMappedFields(addFieldsForProjection(fields, entity.getType(), targetType), entity); + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createCollectionPreparer(query); + if (action == null) { + return collectionPreparer; + } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); } /** - * For cases where {@code fields} is {@literal null} or {@literal empty} add fields required for creating the - * projection (target) type if the {@code targetType} is a {@literal closed interface projection}. + * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while + * using sourceClass for mapping the query. * - * @param fields must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param targetType must not be {@literal null}. - * @return {@link Document} with fields to be included. + * @since 2.0 */ - private Document addFieldsForProjection(Document fields, Class domainType, Class targetType) { + Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class sourceClass, Class targetClass, FindPublisherPreparer preparer) { - if (!fields.isEmpty() || !targetType.isInterface() || ClassUtils.isAssignable(domainType, targetType)) { - return fields; - } + MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); - ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedQuery = queryContext.getMappedQuery(entity); - if (projectionInformation.isClosed()) { - projectionInformation.getInputProperties().forEach(it -> fields.append(it.getName(), 1)); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName)); } - return fields; + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); } protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions) { @@ -2064,177 +2437,222 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, Class entityType) { - - CreateCollectionOptions result = new CreateCollectionOptions(); - - if (collectionOptions == null) { - return result; - } - - collectionOptions.getCapped().ifPresent(result::capped); - collectionOptions.getSize().ifPresent(result::sizeInBytes); - collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); - collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); - - collectionOptions.getValidationOptions().ifPresent(it -> { - - ValidationOptions validationOptions = new ValidationOptions(); - - it.getValidationAction().ifPresent(validationOptions::validationAction); - it.getValidationLevel().ifPresent(validationOptions::validationLevel); - - it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); - - result.validationOptions(validationOptions); - }); - - return result; - } - - private Document getMappedValidator(Validator validator, Class domainType) { - - Document validationRules = validator.toDocument(); - - if (validationRules.containsKey("$jsonSchema")) { - return schemaMapper.mapSchema(validationRules, domainType); - } - - return queryMapper.getMappedObject(validationRules, mappingContext.getPersistentEntity(domainType)); + return operations.convertToCreateCollectionOptions(collectionOptions, entityType); } /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

          + * The first document that matches the query is returned and also removed from the collection in the database.
          * The query document is specified as a standard Document and so is the fields specification. * - * @param collectionName name of the collection to retrieve the objects from - * @param query the query document that specifies the criteria used to find a record - * @param collation collation + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param collation collation. * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected Mono doFindAndRemove(String collectionName, Document query, Document fields, Document sort, + protected Mono doFindAndRemove(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, Document sort, @Nullable Collation collation, Class entityClass) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName)); + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal( - new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation), - new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } - protected Mono doFindAndModify(String collectionName, Document query, Document fields, Document sort, - Class entityClass, Update update, FindAndModifyOptions options) { + protected Mono doFindAndModify(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, Document sort, + Class entityClass, UpdateDefinition update, FindAndModifyOptions options) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false); + updateContext.increaseVersionForUpdateIfNecessary(entity); return Mono.defer(() -> { - increaseVersionForUpdateIfNecessary(entity, update); - - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity); + Document mappedQuery = updateContext.getMappedQuery(entity); + Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass) + : updateContext.getMappedUpdate(entity); if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format( "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s " + "in collection: %s", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } - return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options), - new ReadDocumentCallback(this.mongoConverter, entityClass, collectionName), collectionName); + return executeFindOneInternal( + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, + update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); }); } - protected void maybeEmitEvent(MongoMappingEvent event) { - if (null != eventPublisher) { - eventPublisher.publishEvent(event); - } + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param resultType the target domain type. + * @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 2.1 + */ + protected Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, + Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, Class resultType) { + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionName, collectionPreparer, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection); } /** - * Populates the id property of the saved object, if it's not set already. + * Customize this part for findAndReplace. * - * @param savedObject - * @param id - */ - private void populateIdIfNecessary(Object savedObject, @Nullable Object id) { + * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + private Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, + Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, EntityProjection projection) { + + return Mono.defer(() -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityType, + serializeToJsonSafely(replacement), collectionName)); + } - if (id == null) { - return; + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, + mappedSort, replacement, collation, options), + new ProjectingReadCallback<>(this.mongoConverter, projection, collectionName), collectionName); + + }); + } + + protected , T> E maybeEmitEvent(E event) { + eventDelegate.publishEvent(event); + return event; + } + + protected Mono maybeCallBeforeConvert(T object, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveBeforeConvertCallback.class, object, collection); } - if (savedObject instanceof Document) { - Document Document = (Document) savedObject; - Document.put(ID_FIELD, id); - return; + return Mono.just(object); + } + + protected Mono maybeCallBeforeSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveBeforeSaveCallback.class, object, document, collection); } - MongoPersistentProperty idProp = getIdPropertyFor(savedObject.getClass()); + return Mono.just(object); + } + + protected Mono maybeCallAfterSave(T object, Document document, String collection) { - if (idProp == null) { - return; + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveAfterSaveCallback.class, object, document, collection); } - ConversionService conversionService = mongoConverter.getConversionService(); - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(savedObject.getClass()); - PersistentPropertyAccessor accessor = entity.getPropertyAccessor(savedObject); + return Mono.just(object); + } + + protected Mono maybeCallAfterConvert(T object, Document document, String collection) { - if (accessor.getProperty(idProp) != null) { - return; + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveAfterConvertCallback.class, object, document, collection); } - new ConvertingPropertyAccessor(accessor, conversionService).setProperty(idProp, id); + return Mono.just(object); } private MongoCollection getAndPrepareCollection(MongoDatabase db, String collectionName) { try { - MongoCollection collection = db.getCollection(collectionName); + MongoCollection collection = db.getCollection(collectionName, Document.class); return prepareCollection(collection); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } - protected void ensureNotIterable(Object o) { - - boolean isIterable = o.getClass().isArray(); - - if (!isIterable) { - for (Class iterableClass : ITERABLE_CLASSES) { - if (iterableClass.isAssignableFrom(o.getClass()) || o.getClass().getName().equals(iterableClass.getName())) { - isIterable = true; - break; - } - } - } + /** + * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or + * {@link Iterator}. + * + * @param source can be {@literal null}. + * @since 3.2. + */ + protected void ensureNotCollectionLike(@Nullable Object source) { - if (isIterable) { + if (EntityOperations.isCollectionLike(source) || source instanceof Publisher) { throw new IllegalArgumentException("Cannot use a collection here."); } } /** * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like - * slaveOk() etc. Can be overridden in sub-classes. + * withCodecRegistry() etc. Can be overridden in sub-classes. * * @param collection */ protected MongoCollection prepareCollection(MongoCollection collection) { - if (this.readPreference != null) { + + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { return collection.withReadPreference(readPreference); } + return collection; } + /** + * @param database + * @return + * @since 2.1 + */ + protected MongoDatabase prepareDatabase(MongoDatabase database) { + return database; + } + /** * Prepare the WriteConcern before any processing is done using it. This allows a convenient way to apply custom * settings in sub-classes.
          @@ -2253,13 +2671,20 @@ protected WriteConcern prepareWriteConcern(MongoAction mongoAction) { return potentiallyForceAcknowledgedWrite(wc); } + /** + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public ReactiveMongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDatabaseFactory; + } + @Nullable private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) { - if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking) - && MongoClientVersion.isMongo3Driver()) { + if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject()instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } @@ -2283,7 +2708,7 @@ private Mono executeFindOneInternal(ReactiveCollectionCallback DocumentCallback objectCallback, String collectionName) { return createMono(collectionName, - collection -> Mono.from(collectionCallback.doInCollection(collection)).map(objectCallback::doWith)); + collection -> Mono.from(collectionCallback.doInCollection(collection)).flatMap(objectCallback::doWith)); } /** @@ -2308,30 +2733,13 @@ private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback objectCallback, String collectionName) { return createFlux(collectionName, collection -> { - - FindPublisher findPublisher = collectionCallback.doInCollection(collection); - - if (preparer != null) { - findPublisher = preparer.prepare(findPublisher); - } - return Flux.from(findPublisher).map(objectCallback::doWith); + return Flux.from(preparer.initiateFind(collection, collectionCallback::doInCollection)) + .flatMapSequential(objectCallback::doWith); }); } - private T execute(MongoDatabaseCallback action) { - - Assert.notNull(action, "MongoDatabaseCallback must not be null!"); - - try { - MongoDatabase db = this.getMongoDatabase(); - return action.doInDatabase(db); - } catch (RuntimeException e) { - throw potentiallyConvertRuntimeException(e, exceptionTranslator); - } - } - /** - * Exception translation {@link Function} intended for {@link Flux#mapError(Function)}} usage. + * Exception translation {@link Function} intended for {@link Flux#onErrorMap(Function)} usage. * * @return the exception translation {@link Function} */ @@ -2339,8 +2747,8 @@ private Function translateException() { return throwable -> { - if (throwable instanceof RuntimeException) { - return potentiallyConvertRuntimeException((RuntimeException) throwable, exceptionTranslator); + if (throwable instanceof RuntimeException runtimeException) { + return potentiallyConvertRuntimeException(runtimeException, exceptionTranslator); } return throwable; @@ -2366,37 +2774,7 @@ private MongoPersistentEntity getPersistentEntity(@Nullable Class type) { return type == null ? null : mappingContext.getPersistentEntity(type); } - @Nullable - private MongoPersistentProperty getIdPropertyFor(@Nullable Class type) { - - if (type == null) { - return null; - } - - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(type); - return persistentEntity != null ? persistentEntity.getIdProperty() : null; - } - - private String determineEntityCollectionName(@Nullable T obj) { - - if (null != obj) { - return determineCollectionName(obj.getClass()); - } - - return null; - } - - String determineCollectionName(@Nullable Class entityClass) { - - if (entityClass == null) { - throw new InvalidDataAccessApiUsageException( - "No class parameter provided, entity collection can't be determined!"); - } - - return mappingContext.getRequiredPersistentEntity(entityClass).getCollection(); - } - - private static MappingMongoConverter getDefaultMongoConverter() { + private MappingMongoConverter getDefaultMongoConverter() { MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); @@ -2406,98 +2784,68 @@ private static MappingMongoConverter getDefaultMongoConverter() { MappingMongoConverter converter = new MappingMongoConverter(NO_OP_REF_RESOLVER, context); converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(this.mongoDatabaseFactory); converter.afterPropertiesSet(); return converter; } + @Nullable private Document getMappedSortObject(Query query, Class type) { if (query == null) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return getMappedSortObject(query.getSortObject(), type); } - /** - * @param objectToSave - * @param writer - * @return - */ - private Document toDbObject(T objectToSave, MongoWriter writer) { - - if (objectToSave instanceof Document) { - return (Document) objectToSave; - } - - if (!(objectToSave instanceof String)) { - Document dbDoc = new Document(); - writer.write(objectToSave, dbDoc); + @Nullable + private Document getMappedSortObject(Document sortObject, Class type) { - if (dbDoc.containsKey(ID_FIELD) && dbDoc.get(ID_FIELD) == null) { - dbDoc.remove(ID_FIELD); - } - return dbDoc; - } else { - try { - return Document.parse((String) objectToSave); - } catch (JSONParseException | org.bson.json.JsonParseException e) { - throw new MappingException("Could not parse given String to save into a JSON document!", e); - } + if (ObjectUtils.isEmpty(sortObject)) { + return null; } - } - - private void initializeVersionProperty(Object entity) { - MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(entity.getClass()); - - if (mongoPersistentEntity != null && mongoPersistentEntity.hasVersionProperty()) { - ConvertingPropertyAccessor accessor = new ConvertingPropertyAccessor( - mongoPersistentEntity.getPropertyAccessor(entity), mongoConverter.getConversionService()); - accessor.setProperty(mongoPersistentEntity.getRequiredVersionProperty(), 0); - } + return queryMapper.getMappedSort(sortObject, mappingContext.getPersistentEntity(type)); } // Callback implementations /** * Simple {@link ReactiveCollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Oliver Gierke * @author Thomas Risberg + * @author Christoph Strobl */ private static class FindOneCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Optional fields; - private final Optional collation; + private final FindPublisherPreparer preparer; - FindOneCallback(Document query, @Nullable Document fields, @Nullable Collation collation) { + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, FindPublisherPreparer preparer) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = Optional.ofNullable(fields); - this.collation = Optional.ofNullable(collation); + this.preparer = preparer; } @Override public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - FindPublisher publisher = collection.find(query); - - if (LOGGER.isDebugEnabled()) { - - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), - serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName()); - } + FindPublisher publisher = preparer.initiateFind(collectionPreparer.prepare(collection), + col -> col.find(query, Document.class)); if (fields.isPresent()) { publisher = publisher.projection(fields.get()); } - publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); - return publisher.limit(1).first(); } } @@ -2510,14 +2858,17 @@ public Publisher doInCollection(MongoCollection collection) */ private static class FindCallback implements ReactiveCollectionQueryCallback { + private final CollectionPreparer> collectionPreparer; + private final @Nullable Document query; private final @Nullable Document fields; - FindCallback(@Nullable Document query) { - this(query, null); + FindCallback(CollectionPreparer> collectionPreparer, @Nullable Document query) { + this(collectionPreparer, query, null); } - FindCallback(Document query, Document fields) { + FindCallback(CollectionPreparer> collectionPreparer, Document query, Document fields) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; } @@ -2525,14 +2876,15 @@ private static class FindCallback implements ReactiveCollectionQueryCallback doInCollection(MongoCollection collection) { + MongoCollection collectionToUse = collectionPreparer.prepare(collection); FindPublisher findPublisher; - if (query == null || query.isEmpty()) { - findPublisher = collection.find(); + if (ObjectUtils.isEmpty(query)) { + findPublisher = collectionToUse.find(Document.class); } else { - findPublisher = collection.find(query); + findPublisher = collectionToUse.find(query, Document.class); } - if (fields == null || fields.isEmpty()) { + if (ObjectUtils.isEmpty(fields)) { return findPublisher; } else { return findPublisher.projection(fields); @@ -2548,13 +2900,15 @@ public FindPublisher doInCollection(MongoCollection collecti */ private static class FindAndRemoveCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; private final Optional collation; - FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) { - + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -2568,7 +2922,7 @@ public Publisher doInCollection(MongoCollection collection) FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); collation.map(Collation::toMongoCollation).ifPresent(findOneAndDeleteOptions::collation); - return collection.findOneAndDelete(query, findOneAndDeleteOptions); + return collectionPreparer.prepare(collection).findOneAndDelete(query, findOneAndDeleteOptions); } } @@ -2577,19 +2931,23 @@ public Publisher doInCollection(MongoCollection collection) */ private static class FindAndModifyCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; - private final Document update; + private final Object update; + private final List arrayFilters; private final FindAndModifyOptions options; - FindAndModifyCallback(Document query, Document fields, Document sort, Document update, - FindAndModifyOptions options) { + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; this.update = update; + this.arrayFilters = arrayFilters; this.options = options; } @@ -2597,21 +2955,30 @@ private static class FindAndModifyCallback implements ReactiveCollectionCallback public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + MongoCollection collectionToUse = collectionPreparer.prepare(collection); if (options.isRemove()) { FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); findOneAndDeleteOptions = options.getCollation().map(Collation::toMongoCollation) .map(findOneAndDeleteOptions::collation).orElse(findOneAndDeleteOptions); - return collection.findOneAndDelete(query, findOneAndDeleteOptions); + return collectionToUse.findOneAndDelete(query, findOneAndDeleteOptions); + } + + FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort, + arrayFilters); + if (update instanceof Document document) { + return collection.findOneAndUpdate(query, document, findOneAndUpdateOptions); + } else if (update instanceof List) { + return collectionToUse.findOneAndUpdate(query, (List) update, findOneAndUpdateOptions); } - FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort); - return collection.findOneAndUpdate(query, update, findOneAndUpdateOptions); + return Flux + .error(new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update))); } - private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, Document fields, - Document sort) { + private static FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, + Document fields, Document sort, List arrayFilters) { FindOneAndUpdateOptions result = new FindOneAndUpdateOptions(); @@ -2625,6 +2992,64 @@ private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOp result = options.getCollation().map(Collation::toMongoCollation).map(result::collation).orElse(result); + if (!CollectionUtils.isEmpty(arrayFilters)) { + result.arrayFilters(arrayFilters); + } + + return result; + } + } + + /** + * {@link ReactiveCollectionCallback} specific for find and remove operation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + private static class FindAndReplaceCallback implements ReactiveCollectionCallback { + + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document fields; + private final Document sort; + private final Document update; + private final @Nullable com.mongodb.client.model.Collation collation; + private final FindAndReplaceOptions options; + + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, com.mongodb.client.model.Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.collation = collation; + this.options = options; + } + + @Override + public Publisher doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + + FindOneAndReplaceOptions findOneAndReplaceOptions = convertToFindOneAndReplaceOptions(options, fields, sort); + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, findOneAndReplaceOptions); + } + + private FindOneAndReplaceOptions convertToFindOneAndReplaceOptions(FindAndReplaceOptions options, Document fields, + Document sort) { + + FindOneAndReplaceOptions result = new FindOneAndReplaceOptions().collation(collation); + + result = result.projection(fields).sort(sort).upsert(options.isUpsert()); + + if (options.isReturnNew()) { + result = result.returnDocument(ReturnDocument.AFTER); + } else { + result = result.returnDocument(ReturnDocument.BEFORE); + } + return result; } } @@ -2645,7 +3070,7 @@ private static FindOneAndDeleteOptions convertToFindOneAndDeleteOptions(Document interface DocumentCallback { - T doWith(Document object); + Mono doWith(Document object); } /** @@ -2666,6 +3091,7 @@ interface MongoDatabaseCallback { */ interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback { + @Override FindPublisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; } @@ -2674,6 +3100,7 @@ interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback< * {@link EntityReader}. * * @author Mark Paluch + * @author Roman Puchkovskiy */ class ReadDocumentCallback implements DocumentCallback { @@ -2683,66 +3110,68 @@ class ReadDocumentCallback implements DocumentCallback { ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - Assert.notNull(reader, "EntityReader must not be null!"); - Assert.notNull(type, "Entity type must not be null!"); + Assert.notNull(reader, "EntityReader must not be null"); + Assert.notNull(type, "Entity type must not be null"); this.reader = reader; this.type = type; this.collectionName = collectionName; } - public T doWith(@Nullable Document object) { + @Override + public Mono doWith(Document document) { + + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - if (null != object) { - maybeEmitEvent(new AfterLoadEvent(object, type, collectionName)); - } - T source = reader.read(type, object); - if (null != source) { - maybeEmitEvent(new AfterConvertEvent(object, source, collectionName)); + T entity = reader.read(type, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return source; + + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return maybeCallAfterConvert(entity, document, collectionName); } } /** - * {@link MongoTemplate.DocumentCallback} transforming {@link Document} into the given {@code targetType} or - * decorating the {@code sourceType} with a {@literal projection} in case the {@code targetType} is an - * {@litera interface}. + * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the + * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. * * @param * @param * @author Christoph Strobl + * @author Roman Puchkovskiy * @since 2.0 */ - @RequiredArgsConstructor private class ProjectingReadCallback implements DocumentCallback { - private final @NonNull EntityReader reader; - private final @NonNull Class entityType; - private final @NonNull Class targetType; - private final @NonNull String collectionName; - - @Nullable - public T doWith(@Nullable Document object) { + private final MongoConverter reader; + private final EntityProjection projection; + private final String collectionName; - if (object == null) { - return null; - } + ProjectingReadCallback(MongoConverter reader, EntityProjection projection, String collectionName) { + this.reader = reader; + this.projection = projection; + this.collectionName = collectionName; + } - Class typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType - : targetType; + @Override + @SuppressWarnings("unchecked") + public Mono doWith(Document document) { - if (null != object) { - maybeEmitEvent(new AfterLoadEvent<>(object, typeToRead, collectionName)); - } + Class returnType = projection.getMappedType().getType(); + maybeEmitEvent(new AfterLoadEvent<>(document, returnType, collectionName)); - Object source = reader.read(typeToRead, object); - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source; + Object entity = reader.project(projection, document); - if (null != source) { - maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName)); + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return (T) result; + + T castEntity = (T) entity; + maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName)); + return maybeCallAfterConvert(castEntity, document, collectionName); } } @@ -2751,34 +3180,47 @@ public T doWith(@Nullable Document object) { * a delegate and creates a {@link GeoResult} from the result. * * @author Mark Paluch + * @author Chrstoph Strobl + * @author Roman Puchkovskiy */ - static class GeoNearResultDbObjectCallback implements DocumentCallback> { + static class GeoNearResultDocumentCallback implements DocumentCallback> { + private final String distanceField; private final DocumentCallback delegate; private final Metric metric; /** - * Creates a new {@link GeoNearResultDbObjectCallback} using the given {@link DbObjectCallback} delegate for + * Creates a new {@link GeoNearResultDocumentCallback} using the given {@link DocumentCallback} delegate for * {@link GeoResult} content unmarshalling. * + * @param distanceField the field to read the distance from. * @param delegate must not be {@literal null}. + * @param metric the {@link Metric} to apply to the result distance. */ - GeoNearResultDbObjectCallback(DocumentCallback delegate, Metric metric) { + GeoNearResultDocumentCallback(String distanceField, DocumentCallback delegate, Metric metric) { - Assert.notNull(delegate, "DocumentCallback must not be null!"); + Assert.notNull(delegate, "DocumentCallback must not be null"); + this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } - public GeoResult doWith(Document object) { + @Override + public Mono> doWith(Document object) { + + double distance = getDistance(object); + + return delegate.doWith(object).map(doWith -> new GeoResult<>(doWith, new Distance(distance, metric))); + } - double distance = (Double) object.get("dis"); - Document content = (Document) object.get("obj"); + double getDistance(Document object) { - T doWith = delegate.doWith(content); + if (object.containsKey(distanceField)) { + return NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class); + } - return new GeoResult(doWith, new Distance(distance, metric)); + return Double.NaN; } } @@ -2787,63 +3229,89 @@ public GeoResult doWith(Document object) { */ class QueryFindPublisherPreparer implements FindPublisherPreparer { - private final @Nullable Query query; + private final Query query; + + private final Document sortObject; + + private final int limit; + + private final long skip; private final @Nullable Class type; - QueryFindPublisherPreparer(@Nullable Query query, @Nullable Class type) { + QueryFindPublisherPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + + QueryFindPublisherPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } - public FindPublisher prepare(FindPublisher findPublisher) { - - if (query == null) { - return findPublisher; - } - - FindPublisher findPublisherToUse; + @Override + public FindPublisher prepare(FindPublisher findPublisher) { - findPublisherToUse = query.getCollation().map(Collation::toMongoCollation).map(findPublisher::collation) + FindPublisher findPublisherToUse = operations.forType(type) // + .getCollation(query) // + .map(Collation::toMongoCollation) // + .map(findPublisher::collation) // .orElse(findPublisher); - if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues()) { + HintFunction hintFunction = HintFunction.from(query.getHint()); + Meta meta = query.getMeta(); + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues()) { return findPublisherToUse; } try { - if (query.getSkip() > 0) { - findPublisherToUse = findPublisherToUse.skip((int) query.getSkip()); + + if (skip > 0) { + findPublisherToUse = findPublisherToUse.skip((int) skip); } - if (query.getLimit() > 0) { - findPublisherToUse = findPublisherToUse.limit(query.getLimit()); + + if (limit > 0) { + findPublisherToUse = findPublisherToUse.limit(limit); } - if (!ObjectUtils.isEmpty(query.getSortObject())) { - Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; findPublisherToUse = findPublisherToUse.sort(sort); } - BasicDBObject modifiers = new BasicDBObject(); - if (StringUtils.hasText(query.getHint())) { - modifiers.append("$hint", query.getHint()); + if (hintFunction.isPresent()) { + findPublisherToUse = hintFunction.apply(mongoDatabaseFactory, findPublisherToUse::hintString, + findPublisherToUse::hint); } - if (query.getMeta().hasValues()) { - for (Entry entry : query.getMeta().values()) { - modifiers.append(entry.getKey(), entry.getValue()); + if (meta.hasValues()) { + + if (meta.hasComment()) { + findPublisherToUse = findPublisherToUse.comment(meta.getRequiredComment()); + } + + if (meta.hasMaxTime()) { + findPublisherToUse = findPublisherToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.getCursorBatchSize() != null) { + findPublisherToUse = findPublisherToUse.batchSize(meta.getCursorBatchSize()); } - } - if (!modifiers.isEmpty()) { - findPublisherToUse = findPublisherToUse.modifiers(modifiers); + if (meta.getAllowDiskUse() != null) { + findPublisherToUse = findPublisherToUse.allowDiskUse(meta.getAllowDiskUse()); + } } + } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } return findPublisherToUse; } + } class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { @@ -2853,44 +3321,139 @@ class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { } @Override - public FindPublisher prepare(FindPublisher findPublisher) { + public FindPublisher prepare(FindPublisher findPublisher) { return super.prepare(findPublisher.cursorType(CursorType.TailableAwait)); } } - private static List toDocuments(final Collection documents) { + private static List toDocuments(Collection documents) { return new ArrayList<>(documents); } /** - * No-Operation {@link org.springframework.data.mongodb.core.mapping.DBRef} resolver. + * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the + * server through the driver API.
          + * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired + * target method matching the actual arguments plus a {@link ClientSession}. * - * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 */ - static class NoOpDbRefResolver implements DbRefResolver { + static class ReactiveSessionBoundMongoTemplate extends ReactiveMongoTemplate { + + private final ReactiveMongoTemplate delegate; + private final ClientSession session; + + /** + * @param session must not be {@literal null}. + * @param that must not be {@literal null}. + */ + ReactiveSessionBoundMongoTemplate(ClientSession session, ReactiveMongoTemplate that) { + + super(that.mongoDatabaseFactory.withSession(session), that); + + this.delegate = that; + this.session = session; + } @Override - @Nullable - public Object resolveDbRef(@Nonnull MongoPersistentProperty property, @Nonnull DBRef dbref, - @Nonnull DbRefResolverCallback callback, @Nonnull DbRefProxyHandler proxyHandler) { - return null; + public Mono> getCollection(String collectionName) { + + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getCollection(collectionName); } @Override - @Nullable - public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, - MongoPersistentEntity entity, Object id) { - return null; + public Mono getMongoDatabase() { + + // native MongoDB objects that offer methods with ClientSession must not be proxied. + return delegate.getMongoDatabase(); } @Override - public Document fetch(DBRef dbRef) { - return null; + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + return Mono.just(false); + } + } + + class IndexCreatorEventListener implements ApplicationListener> { + + final Consumer subscriptionExceptionHandler; + + public IndexCreatorEventListener(Consumer subscriptionExceptionHandler) { + this.subscriptionExceptionHandler = subscriptionExceptionHandler; } @Override - public List bulkFetch(List dbRefs) { - return Collections.emptyList(); + public void onApplicationEvent(MappingContextEvent event) { + + if (!event.wasEmittedBy(mappingContext)) { + return; + } + + PersistentEntity entity = event.getPersistentEntity(); + + // Double check type as Spring infrastructure does not consider nested generics + if (entity instanceof MongoPersistentEntity mongoPersistentProperties) { + + onCheckForIndexes(mongoPersistentProperties, subscriptionExceptionHandler); + } + } + } + + /** + * Value object chaining together a given source document with its mapped representation and the collection to persist + * it to. + * + * @param + * @author Christoph Strobl + * @since 2.2 + */ + private static class PersistableEntityModel { + + private final T source; + private final @Nullable Document target; + private final String collection; + + private PersistableEntityModel(T source, @Nullable Document target, String collection) { + + this.source = source; + this.target = target; + this.collection = collection; + } + + static PersistableEntityModel of(T source, String collection) { + return new PersistableEntityModel<>(source, null, collection); } + + static PersistableEntityModel of(T source, Document target, String collection) { + return new PersistableEntityModel<>(source, target, collection); + } + + PersistableEntityModel mutate(T source) { + return new PersistableEntityModel(source, target, collection); + } + + PersistableEntityModel addTargetDocument(Document target) { + return new PersistableEntityModel(source, target, collection); + } + + T getSource() { + return source; + } + + @Nullable + Document getTarget() { + return target; + } + + String getCollection() { + return collection; + } + } + + @FunctionalInterface + interface CountExecution { + Mono countDocuments(String collection, Document filter, CountOptions options); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java index 6f46be917c..378f13d917 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; import com.mongodb.client.result.DeleteResult; @@ -68,7 +69,7 @@ interface TerminatingRemove { /** * Remove and return all matching documents.
          - * NOTE The entire list of documents will be fetched before sending the actual delete commands. + * NOTE: The entire list of documents will be fetched before sending the actual delete commands. * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete * operation. * @@ -106,6 +107,18 @@ interface RemoveWithQuery extends TerminatingRemove { * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingRemove matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingRemove matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } interface ReactiveRemove extends RemoveWithCollection {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java index b94440555e..97c9cb0d0e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,6 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -35,62 +31,55 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull ReactiveMongoTemplate tempate; + private final ReactiveMongoTemplate template; + + ReactiveRemoveOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation#remove(java.lang.Class) - */ @Override public ReactiveRemove remove(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveRemoveSupport<>(tempate, domainType, ALL_QUERY, null); + return new ReactiveRemoveSupport<>(template, domainType, ALL_QUERY, null); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ReactiveRemoveSupport implements ReactiveRemove, RemoveWithCollection { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Query query; - String collection; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Query query; + private final String collection; + + ReactiveRemoveSupport(ReactiveMongoTemplate template, Class domainType, Query query, String collection) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.RemoveWithCollection#inCollection(String) - */ @Override public RemoveWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.Query) - */ @Override public TerminatingRemove matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveRemoveSupport<>(template, domainType, query, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.TerminatingRemove#all() - */ @Override public Mono all() { @@ -99,10 +88,6 @@ public Mono all() { return template.doRemove(collectionName, query, domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.TerminatingRemove#findAndRemove() - */ @Override public Flux findAndRemove() { @@ -112,7 +97,7 @@ public Flux findAndRemove() { } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java new file mode 100644 index 0000000000..aeb0e88e24 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.reactivestreams.Publisher; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Callback interface for executing operations within a {@link com.mongodb.reactivestreams.client.ClientSession} using + * reactive infrastructure. + * + * @author Christoph Strobl + * @since 2.1 + * @see com.mongodb.reactivestreams.client.ClientSession + */ +@FunctionalInterface +public interface ReactiveSessionCallback { + + /** + * Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is + * inferred directly into the operation so that no further interaction is necessary. + *
          + * Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and + * others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway + * objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or + * {@link com.mongodb.reactivestreams.client.MongoDatabase} via eg. + * {@link ReactiveMongoOperations#getCollection(String)} we leave responsibility for + * {@link com.mongodb.session.ClientSession} again up to the caller. + * + * @param operations will never be {@literal null}. + * @return never {@literal null}. + */ + Publisher doInSession(ReactiveMongoOperations operations); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java new file mode 100644 index 0000000000..f0ffc1ba60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.util.function.Consumer; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a + * {@link ReactiveSessionCallback}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface ReactiveSessionScoped { + + /** + * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
          + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action, can be {@link Flux#empty()}. + */ + default Flux execute(ReactiveSessionCallback action) { + return execute(action, (session) -> {}); + } + + /** + * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
          + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param doFinally callback object that accepts {@link ClientSession} after invoking {@link ReactiveSessionCallback}. + * This {@link Consumer} is guaranteed to be notified in any case (successful and exceptional outcome of + * {@link ReactiveSessionCallback}). + * @param return type. + * @return a result object returned by the action, can be {@link Flux#empty()}. + */ + Flux execute(ReactiveSessionCallback action, Consumer doFinally); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java index dce516b412..51f75f3265 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,13 +17,17 @@ import reactor.core.publisher.Mono; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import com.mongodb.client.result.UpdateResult; /** - * {@link ReactiveUpdateOperation} allows creation and execution of reactive MongoDB update / findAndModify operations - * in a fluent API style.
          + * {@link ReactiveUpdateOperation} allows creation and execution of reactive MongoDB update / findAndModify / + * findAndReplace operations in a fluent API style.
          * The starting {@literal domainType} is used for mapping the {@link Query} provided via {@code matching}, as well as * the {@link org.springframework.data.mongodb.core.query.Update} via {@code apply} into the MongoDB specific * representations. The collection to operate on is by default derived from the initial {@literal domainType} and can be @@ -68,6 +72,39 @@ interface TerminatingFindAndModify { Mono findAndModify(); } + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + Mono replaceFirst(); + } + + /** + * Compose findAndReplace execution by calling one of the terminating methods. + * + * @author Mark Paluch + * @since 2.1 + */ + interface TerminatingFindAndReplace extends TerminatingReplace { + + /** + * Find, replace and return the first matching document. + * + * @return {@link Mono#empty()} if nothing found. Never {@literal null}. + */ + Mono findAndReplace(); + } + /** * Compose update execution by calling one of the terminating methods. */ @@ -101,13 +138,26 @@ interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModif interface UpdateWithUpdate { /** - * Set the {@link org.springframework.data.mongodb.core.query.Update} to be applied. + * Set the {@link UpdateDefinition} to be applied. * * @param update must not be {@literal null}. * @return new instance of {@link TerminatingUpdate}. Never {@literal null}. * @throws IllegalArgumentException if update is {@literal null}. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - TerminatingUpdate apply(org.springframework.data.mongodb.core.query.Update update); + TerminatingUpdate apply(UpdateDefinition update); + + /** + * Specify {@code replacement} object. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + * @since 2.1 + */ + FindAndReplaceWithProjection replaceWith(T replacement); } /** @@ -139,6 +189,18 @@ interface UpdateWithQuery extends UpdateWithUpdate { * @throws IllegalArgumentException if query is {@literal null}. */ UpdateWithUpdate matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link UpdateWithUpdate}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default UpdateWithUpdate matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** @@ -157,5 +219,61 @@ interface FindAndModifyWithOptions { TerminatingFindAndModify withOptions(FindAndModifyOptions options); } + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + + /** + * Define {@link FindAndReplaceOptions}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { + + /** + * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options); + } + + /** + * Result type override (Optional). + * + * @author Christoph Strobl + * @since 2.1 + */ + interface FindAndReplaceWithProjection extends FindAndReplaceWithOptions { + + /** + * Define the target type fields should be mapped to.
          + * Skip this step if you are anyway only interested in the original domain type. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link FindAndReplaceWithProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + FindAndReplaceWithOptions as(Class resultType); + + } + interface ReactiveUpdate extends UpdateWithCollection, UpdateWithQuery, UpdateWithUpdate {} } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java index 5b7b024214..51cd99dc93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,11 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Mono; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -34,122 +32,169 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull ReactiveMongoTemplate template; + private final ReactiveMongoTemplate template; + + ReactiveUpdateOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation#update(java.lang.Class) - */ @Override public ReactiveUpdate update(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null); + return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ReactiveUpdateSupport - implements ReactiveUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate { - - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Query query; - org.springframework.data.mongodb.core.query.Update update; - String collection; - FindAndModifyOptions options; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.Update) - */ + implements ReactiveUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate, + FindAndReplaceWithOptions, FindAndReplaceWithProjection, TerminatingFindAndReplace { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Query query; + private final org.springframework.data.mongodb.core.query.UpdateDefinition update; + @Nullable private final String collection; + @Nullable private final FindAndModifyOptions findAndModifyOptions; + @Nullable private final FindAndReplaceOptions findAndReplaceOptions; + @Nullable private final Object replacement; + private final Class targetType; + + ReactiveUpdateSupport(ReactiveMongoTemplate template, Class domainType, Query query, UpdateDefinition update, + String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions, + Object replacement, Class targetType) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.update = update; + this.collection = collection; + this.findAndModifyOptions = findAndModifyOptions; + this.findAndReplaceOptions = findAndReplaceOptions; + this.replacement = replacement; + this.targetType = targetType; + } + @Override - public TerminatingUpdate apply(org.springframework.data.mongodb.core.query.Update update) { + public TerminatingUpdate apply(org.springframework.data.mongodb.core.query.UpdateDefinition update) { - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(update, "Update must not be null"); - return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options); + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithCollection#inCollection(java.lang.String) - */ @Override public UpdateWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); - return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options); + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#first() - */ @Override public Mono first() { return doUpdate(false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#upsert() - */ @Override public Mono upsert() { return doUpdate(true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndModify#findAndModify() - */ @Override public Mono findAndModify() { String collectionName = getCollectionName(); - return template.findAndModify(query, update, options, domainType, collectionName); + return template.findAndModify(query, update, + findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), targetType, + collectionName); + } + + @Override + public Mono findAndReplace() { + return template.findAndReplace(query, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.none(), (Class) domainType, + getCollectionName(), targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.Query) - */ @Override public UpdateWithUpdate matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options); + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#all() - */ @Override public Mono all() { return doUpdate(true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions) - */ @Override public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection replaceWith(T replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType); + } + + @Override + public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { + + Assert.notNull(options, "Options must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, options, + replacement, targetType); + } + + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType); + } + + @Override + public FindAndReplaceWithOptions as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, resultType); + } + + @Override + public Mono replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } - return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options); + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); } private Mono doUpdate(boolean multi, boolean upsert) { @@ -157,7 +202,7 @@ private Mono doUpdate(boolean multi, boolean upsert) { } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java new file mode 100644 index 0000000000..00c5815fc9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadConcern}. + *

          + * Typically implemented by cursor or query preparer objects. + * + * @author Mark Paluch + * @since 4.1 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadConcernAware { + + /** + * @return {@literal true} if a {@link ReadConcern} is set. + */ + default boolean hasReadConcern() { + return getReadConcern() != null; + } + + /** + * @return the {@link ReadConcern} to apply or {@literal null} if none set. + */ + @Nullable + ReadConcern getReadConcern(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java new file mode 100644 index 0000000000..74bca9abea --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.ReadPreference; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadPreference}. + *

          + * Typically implemented by cursor or query preparer objects. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadPreferenceAware { + + /** + * @return {@literal true} if a {@link ReadPreference} is set. + */ + default boolean hasReadPreference() { + return getReadPreference() != null; + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none set. + */ + @Nullable + ReadPreference getReadPreference(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java new file mode 100644 index 0000000000..a2e2ba24c0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; + +/** + * Options for {@link org.springframework.data.mongodb.core.MongoOperations#replace(Query, Object) replace operations}. Defaults to + *

          + *
          upsert
          + *
          false
          + *
          + * + * @author Jakub Zurawa + * @author Christoph Strob + * @since 4.2 + */ +public class ReplaceOptions { + + private boolean upsert; + + private static final ReplaceOptions NONE = new ReplaceOptions() { + + private static final String ERROR_MSG = "ReplaceOptions.none() cannot be changed; Please use ReplaceOptions.options() instead"; + + @Override + public ReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + + /** + * Static factory method to create a {@link ReplaceOptions} instance. + *
          + *
          upsert
          + *
          false
          + *
          + * + * @return new instance of {@link ReplaceOptions}. + */ + public static ReplaceOptions replaceOptions() { + return new ReplaceOptions(); + } + + /** + * Static factory method returning an unmodifiable {@link ReplaceOptions} instance. + * + * @return unmodifiable {@link ReplaceOptions} instance. + */ + public static ReplaceOptions none() { + return NONE; + } + + /** + * Insert a new document if not exists. + * + * @return this. + */ + public ReplaceOptions upsert() { + + this.upsert = true; + return this; + } + + /** + * Get the bit indicating if to create a new document if not exists. + * + * @return {@literal true} if set. + */ + public boolean isUpsert() { + return upsert; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java index d847eda669..a01760368a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,19 +21,20 @@ import org.springframework.data.mongodb.core.script.NamedMongoScript; import org.springframework.lang.Nullable; -import com.mongodb.DB; /** - * Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions. + * Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions. * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public interface ScriptOperations { /** - * Store given {@link ExecutableMongoScript} generating a syntheitcal name so that it can be called by it + * Store given {@link ExecutableMongoScript} generating a synthetic name so that it can be called by it * subsequently. * * @param script must not be {@literal null}. @@ -71,10 +72,10 @@ public interface ScriptOperations { Object call(String scriptName, Object... args); /** - * Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name. + * Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name. * * @param scriptName must not be {@literal null} or empty. - * @return false if no {@link ServerSideJavaScript} with given name exists. + * @return false if no {@literal ServerSideJavaScript} with given name exists. */ boolean exists(String scriptName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java new file mode 100644 index 0000000000..85ddce7656 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java @@ -0,0 +1,268 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.IntFunction; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.ScrollPosition.Direction; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.EntityOperations.Entity; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Utilities to run scroll queries and create {@link Window} results. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class ScrollUtils { + + /** + * Create the actual query to run keyset-based pagination. Affects projection, sorting, and the criteria. + * + * @param query + * @param idPropertyName + * @return + */ + static KeysetScrollQuery createKeysetPaginationQuery(Query query, String idPropertyName) { + + KeysetScrollPosition keyset = query.getKeyset(); + KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection()); + Document sortObject = director.getSortObject(idPropertyName, query); + Document fieldsObject = director.getFieldsObject(query.getFieldsObject(), sortObject); + Document queryObject = director.createQuery(keyset, query.getQueryObject(), sortObject); + + return new KeysetScrollQuery(queryObject, fieldsObject, sortObject); + } + + static Window createWindow(Query query, List result, Class sourceType, EntityOperations operations) { + + Document sortObject = query.getSortObject(); + KeysetScrollPosition keyset = query.getKeyset(); + Direction direction = keyset.getDirection(); + KeysetScrollDirector director = KeysetScrollDirector.of(direction); + + List resultsToUse = director.postPostProcessResults(result, query.getLimit()); + + IntFunction positionFunction = value -> { + + T last = resultsToUse.get(value); + Entity entity = operations.forEntity(last); + + Map keys = entity.extractKeys(sortObject, sourceType); + return ScrollPosition.of(keys, direction); + }; + + return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit())); + } + + static Window createWindow(List result, int limit, IntFunction positionFunction) { + return Window.from(getSubList(result, limit), positionFunction, hasMoreElements(result, limit)); + } + + static boolean hasMoreElements(List result, int limit) { + return !result.isEmpty() && result.size() > limit; + } + + static List getSubList(List result, int limit) { + + if (limit > 0 && result.size() > limit) { + return result.subList(0, limit); + } + + return result; + } + + record KeysetScrollQuery(Document query, Document fields, Document sort) { + + } + + /** + * Director for keyset scrolling. + */ + static class KeysetScrollDirector { + + private static final KeysetScrollDirector FORWARD = new KeysetScrollDirector(); + private static final KeysetScrollDirector REVERSE = new ReverseKeysetScrollDirector(); + + /** + * Factory method to obtain the right {@link KeysetScrollDirector}. + * + * @param direction + * @return + */ + public static KeysetScrollDirector of(ScrollPosition.Direction direction) { + return direction == Direction.FORWARD ? FORWARD : REVERSE; + } + + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = query.isSorted() ? query.getSortObject() : new Document(); + sortObject.put(idPropertyName, 1); + + return sortObject; + } + + public Document getFieldsObject(Document fieldsObject, Document sortObject) { + + // make sure we can extract the keyset + if (!fieldsObject.isEmpty()) { + for (String field : sortObject.keySet()) { + fieldsObject.put(field, 1); + } + } + + return fieldsObject; + } + + public Document createQuery(KeysetScrollPosition keyset, Document queryObject, Document sortObject) { + + Map keysetValues = keyset.getKeys(); + List or = (List) queryObject.getOrDefault("$or", new ArrayList<>()); + List sortKeys = new ArrayList<>(sortObject.keySet()); + + // first query doesn't come with a keyset + if (keysetValues.isEmpty()) { + return queryObject; + } + + if (!keysetValues.keySet().containsAll(sortKeys)) { + throw new IllegalStateException("KeysetScrollPosition does not contain all keyset values"); + } + + // build matrix query for keyset paging that contains sort^2 queries + // reflecting a query that follows sort order semantics starting from the last returned keyset + for (int i = 0; i < sortKeys.size(); i++) { + + Document sortConstraint = new Document(); + + for (int j = 0; j < sortKeys.size(); j++) { + + String sortSegment = sortKeys.get(j); + int sortOrder = sortObject.getInteger(sortSegment); + Object o = keysetValues.get(sortSegment); + + if (j >= i) { // tail segment + if (o instanceof BsonNull) { + throw new IllegalStateException( + "Cannot resume from KeysetScrollPosition. Offending key: '%s' is 'null'".formatted(sortSegment)); + } + sortConstraint.put(sortSegment, new Document(getComparator(sortOrder), o)); + break; + } + + sortConstraint.put(sortSegment, o); + } + + if (!sortConstraint.isEmpty()) { + or.add(sortConstraint); + } + } + + if (!or.isEmpty()) { + queryObject.put("$or", or); + } + + return queryObject; + } + + protected String getComparator(int sortOrder) { + return sortOrder == 1 ? "$gt" : "$lt"; + } + + protected List postPostProcessResults(List list, int limit) { + return getFirst(limit, list); + } + + } + + /** + * Reverse scrolling director variant applying {@link KeysetScrollPosition.Direction#BACKWARD}. In reverse scrolling, + * we need to flip directions for the actual query so that we do not get everything from the top position and apply + * the limit but rather flip the sort direction, apply the limit and then reverse the result to restore the actual + * sort order. + */ + private static class ReverseKeysetScrollDirector extends KeysetScrollDirector { + + @Override + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = super.getSortObject(idPropertyName, query); + + // flip sort direction for backward scrolling + + for (String field : sortObject.keySet()) { + sortObject.put(field, sortObject.getInteger(field) == 1 ? -1 : 1); + } + + return sortObject; + } + + @Override + public List postPostProcessResults(List list, int limit) { + + // flip direction of the result list as we need to accomodate for the flipped sort order for proper offset + // querying. + Collections.reverse(list); + + return getLast(limit, list); + } + + } + + /** + * Return the first {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getFirst(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(0, count); + } + + return list; + } + + /** + * Return the last {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getLast(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(list.size() - count, list.size()); + } + + return list; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java new file mode 100644 index 0000000000..55a87ecadf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; + +/** + * Callback interface for executing operations within a {@link com.mongodb.session.ClientSession}. + * + * @author Christoph Strobl + * @since 2.1 + * @see com.mongodb.session.ClientSession + */ +@FunctionalInterface +public interface SessionCallback { + + /** + * Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred + * directly into the operation so that no further interaction is necessary. + *
          + * Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others + * are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like + * {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg. + * {@link MongoOperations#getCollection(String)} we leave responsibility for {@link com.mongodb.session.ClientSession} + * again up to the caller. + * + * @param operations will never be {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + T doInSession(MongoOperations operations); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java new file mode 100644 index 0000000000..33ad9d7318 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Consumer; + +import org.springframework.lang.Nullable; + +import com.mongodb.client.ClientSession; + +/** + * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}. + *
          + * The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +public interface SessionScoped { + + /** + * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
          + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param return type. + * @return a result object returned by the action. Can be {@literal null}. + */ + @Nullable + default T execute(SessionCallback action) { + return execute(action, session -> {}); + } + + /** + * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. + *
          + * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() + * closed} when done. + * + * @param action callback object that specifies the MongoDB action the callback action. Must not be {@literal null}. + * @param doFinally callback object that accepts {@link ClientSession} after invoking {@link SessionCallback}. This + * {@link Consumer} is guaranteed to be notified in any case (successful and exceptional outcome of + * {@link SessionCallback}). + * @param return type. + * @return a result object returned by the action. Can be {@literal null}. + */ + @Nullable + T execute(SessionCallback action, Consumer doFinally); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java new file mode 100644 index 0000000000..2b51b5e077 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java @@ -0,0 +1,92 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.DisposableBean; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.ConnectionString; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoDatabase; + +/** + * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySupport + implements DisposableBean { + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance for the given {@code connectionString}. + * + * @param connectionString connection coordinates for a database connection. Must contain a database name and must not + * be {@literal null} or empty. + * @see MongoDB Connection String reference + */ + public SimpleMongoClientDatabaseFactory(String connectionString) { + this(new ConnectionString(connectionString)); + } + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param connectionString connection coordinates for a database connection. Must contain also a database name and not + * be {@literal null}. + */ + public SimpleMongoClientDatabaseFactory(ConnectionString connectionString) { + this(MongoClients.create(connectionString), connectionString.getDatabase(), true); + } + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + */ + public SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName) { + this(mongoClient, databaseName, false); + } + + /** + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. + * + * @param mongoClient must not be {@literal null}. + * @param databaseName must not be {@literal null} or empty. + * @param mongoInstanceCreated + */ + SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { + super(mongoClient, databaseName, mongoInstanceCreated, MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR); + } + + @Override + public ClientSession getSession(ClientSessionOptions options) { + return getMongoClient().startSession(options); + } + + @Override + protected void closeClient() { + getMongoClient().close(); + } + + @Override + protected MongoDatabase doGetMongoDatabase(String dbName) { + return getMongoClient().getDatabase(dbName); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java deleted file mode 100644 index a2d0d1ef79..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import java.net.UnknownHostException; - -import org.springframework.beans.factory.DisposableBean; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; - -import com.mongodb.DB; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.WriteConcern; -import com.mongodb.client.MongoDatabase; - -/** - * Factory to create {@link DB} instances from a {@link MongoClient} instance. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - */ -public class SimpleMongoDbFactory implements DisposableBean, MongoDbFactory { - - private final MongoClient mongoClient; - private final String databaseName; - private final boolean mongoInstanceCreated; - private final PersistenceExceptionTranslator exceptionTranslator; - - private @Nullable WriteConcern writeConcern; - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClientURI}. - * - * @param uri must not be {@literal null}. - * @throws UnknownHostException - * @since 1.7 - */ - public SimpleMongoDbFactory(MongoClientURI uri) { - this(new MongoClient(uri), uri.getDatabase(), true); - } - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClient}. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null}. - * @since 1.7 - */ - public SimpleMongoDbFactory(MongoClient mongoClient, String databaseName) { - this(mongoClient, databaseName, false); - } - - /** - * @param client - * @param databaseName - * @param mongoInstanceCreated - * @since 1.7 - */ - private SimpleMongoDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { - - Assert.notNull(mongoClient, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); - Assert.isTrue(databaseName.matches("[\\w-]+"), - "Database name must only contain letters, numbers, underscores and dashes!"); - - this.mongoClient = mongoClient; - this.databaseName = databaseName; - this.mongoInstanceCreated = mongoInstanceCreated; - this.exceptionTranslator = new MongoExceptionTranslator(); - } - - /** - * Configures the {@link WriteConcern} to be used on the {@link DB} instance being created. - * - * @param writeConcern the writeConcern to set - */ - public void setWriteConcern(WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb() - */ - public MongoDatabase getDb() throws DataAccessException { - return getDb(databaseName); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String) - */ - public MongoDatabase getDb(String dbName) throws DataAccessException { - - Assert.hasText(dbName, "Database name must not be empty."); - - MongoDatabase db = mongoClient.getDatabase(dbName); - - if (writeConcern == null) { - return db; - } - - return db.withWriteConcern(writeConcern); - } - - /** - * Clean up the Mongo instance if it was created by the factory itself. - * - * @see DisposableBean#destroy() - */ - public void destroy() throws Exception { - if (mongoInstanceCreated) { - mongoClient.close(); - } - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator() - */ - @Override - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; - } - - @SuppressWarnings("deprecation") - @Override - public DB getLegacyDb() { - return mongoClient.getDB(databaseName); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java index c84ffcc469..84edf13d57 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,26 @@ */ package org.springframework.data.mongodb.core; -import java.net.UnknownHostException; +import reactor.core.publisher.Mono; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.aop.framework.ProxyFactory; import org.springframework.beans.factory.DisposableBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import com.mongodb.ClientSessionOptions; import com.mongodb.ConnectionString; import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.ClientSession; import com.mongodb.reactivestreams.client.MongoClient; import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.reactivestreams.client.MongoCollection; import com.mongodb.reactivestreams.client.MongoDatabase; /** @@ -35,6 +42,7 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet * @since 2.0 */ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory { @@ -43,17 +51,15 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React private final String databaseName; private final boolean mongoInstanceCreated; - private final PersistenceExceptionTranslator exceptionTranslator; - + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; private @Nullable WriteConcern writeConcern; /** * Creates a new {@link SimpleReactiveMongoDatabaseFactory} instance from the given {@link ConnectionString}. * * @param connectionString must not be {@literal null}. - * @throws UnknownHostException */ - public SimpleReactiveMongoDatabaseFactory(ConnectionString connectionString) throws UnknownHostException { + public SimpleReactiveMongoDatabaseFactory(ConnectionString connectionString) { this(MongoClients.create(connectionString), connectionString.getDatabase(), true); } @@ -70,15 +76,29 @@ public SimpleReactiveMongoDatabaseFactory(MongoClient mongoClient, String databa private SimpleReactiveMongoDatabaseFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) { - Assert.notNull(client, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); - Assert.isTrue(databaseName.matches("[\\w-]+"), - "Database name must only contain letters, numbers, underscores and dashes!"); + Assert.notNull(client, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); + Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); this.mongo = client; this.databaseName = databaseName; this.mongoInstanceCreated = mongoInstanceCreated; - this.exceptionTranslator = new MongoExceptionTranslator(); + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; } /** @@ -90,24 +110,22 @@ public void setWriteConcern(WriteConcern writeConcern) { this.writeConcern = writeConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase() - */ - public MongoDatabase getMongoDatabase() throws DataAccessException { + @Override + public Mono getMongoDatabase() throws DataAccessException { return getMongoDatabase(databaseName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String) - */ - public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + @Override + public Mono getMongoDatabase(String dbName) throws DataAccessException { + + Assert.hasText(dbName, "Database name must not be empty"); - Assert.hasText(dbName, "Database name must not be empty."); + return Mono.fromSupplier(() -> { - MongoDatabase db = mongo.getDatabase(dbName); - return writeConcern != null ? db.withWriteConcern(writeConcern) : db; + MongoDatabase db = mongo.getDatabase(dbName); + + return writeConcern != null ? db.withWriteConcern(writeConcern) : db; + }); } /** @@ -115,6 +133,7 @@ public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException * * @see DisposableBean#destroy() */ + @Override public void destroy() throws Exception { if (mongoInstanceCreated) { @@ -122,11 +141,132 @@ public void destroy() throws Exception { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getExceptionTranslator() + @Override + public CodecRegistry getCodecRegistry() { + return this.mongo.getDatabase(databaseName).getCodecRegistry(); + } + + @Override + public Mono getSession(ClientSessionOptions options) { + return Mono.from(mongo.startSession(options)); + } + + @Override + public ReactiveMongoDatabaseFactory withSession(ClientSession session) { + return new ClientSessionBoundMongoDbFactory(session, this); + } + + /** + * {@link ClientSession} bound {@link ReactiveMongoDatabaseFactory} decorating the database with a + * {@link SessionAwareMethodInterceptor}. + * + * @author Christoph Strobl + * @since 2.1 */ - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; + static final class ClientSessionBoundMongoDbFactory implements ReactiveMongoDatabaseFactory { + + private final ClientSession session; + private final ReactiveMongoDatabaseFactory delegate; + + ClientSessionBoundMongoDbFactory(ClientSession session, ReactiveMongoDatabaseFactory delegate) { + + this.session = session; + this.delegate = delegate; + } + + @Override + public Mono getMongoDatabase() throws DataAccessException { + return delegate.getMongoDatabase().map(this::decorateDatabase); + } + + @Override + public Mono getMongoDatabase(String dbName) throws DataAccessException { + return delegate.getMongoDatabase(dbName).map(this::decorateDatabase); + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return delegate.getExceptionTranslator(); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } + + @Override + public Mono getSession(ClientSessionOptions options) { + return delegate.getSession(options); + } + + @Override + public ReactiveMongoDatabaseFactory withSession(ClientSession session) { + return delegate.withSession(session); + } + + @Override + public boolean isTransactionActive() { + return session != null && session.hasActiveTransaction(); + } + + private MongoDatabase decorateDatabase(MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, MongoCollection collection) { + return createProxyInstance(session, collection, MongoCollection.class); + } + + private T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class targetType) { + + ProxyFactory factory = new ProxyFactory(); + factory.setTarget(target); + factory.setInterfaces(targetType); + factory.setOpaque(true); + + factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, + this::proxyDatabase, MongoCollection.class, this::proxyCollection)); + + return targetType.cast(factory.getProxy(target.getClass().getClassLoader())); + } + + public ClientSession getSession() { + return this.session; + } + + public ReactiveMongoDatabaseFactory getDelegate() { + return this.delegate; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o; + + if (!ObjectUtils.nullSafeEquals(this.session, that.session)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(this.session); + result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate); + return result; + } + + public String toString() { + return "SimpleReactiveMongoDatabaseFactory.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + + ", delegate=" + this.getDelegate() + ")"; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java new file mode 100644 index 0000000000..c69fb4ad15 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java @@ -0,0 +1,32 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; +import org.springframework.lang.Nullable; + +/** + * {@link CursorPreparer} that exposes its {@link Document sort document}. + * + * @author Christoph Strobl + * @since 4.4.3 + */ +interface SortingQueryCursorPreparer extends CursorPreparer { + + @Nullable + Document getSortObject(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java new file mode 100644 index 0000000000..e50e1088cb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java @@ -0,0 +1,65 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Optional; + +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Nullable; + +/** + * Immutable object holding additional options to be applied when creating a MongoDB + * views. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class ViewOptions { + + private final @Nullable Collation collation; + + static ViewOptions none() { + return new ViewOptions(); + } + + /** + * Creates new instance of {@link ViewOptions}. + */ + public ViewOptions() { + this(null); + } + + private ViewOptions(@Nullable Collation collation) { + this.collation = collation; + } + + /** + * Get the {@link Collation} to be set. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * @param collation the {@link Collation} to use for language-specific string comparison. + * @return new instance of {@link ViewOptions}. + */ + public ViewOptions collation(Collation collation) { + return new ViewOptions(collation); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java new file mode 100644 index 0000000000..d6e4119b20 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Nullable; + +import com.mongodb.WriteConcern; + +/** + * Interface indicating a component that contains and exposes an {@link WriteConcern}. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface WriteConcernAware { + + /** + * @return the {@link WriteConcern} to apply or {@literal null} if none set. + */ + @Nullable + WriteConcern getWriteConcern(); + + /** + * @return {@literal true} if a {@link com.mongodb.WriteConcern} is set. + */ + default boolean hasWriteConcern() { + return getWriteConcern() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java index 9bae719e58..8df4171844 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java index 814cd8b3f9..fbefe4a075 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core; /** - * Enum to represent how strict the check of {@link com.mongodb.WriteResult} shall be. It can either be skipped entirely - * (use {@link #NONE}) or cause an exception to be thrown {@link #EXCEPTION}. + * Enum to represent how strict the check of {@link com.mongodb.WriteConcernResult} shall be. It can either be skipped + * entirely (use {@link #NONE}) or cause an exception to be thrown {@link #EXCEPTION}. * * @author Thomas Risberg * @author Oliver Gierke diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java index a087f612a2..d4cdece411 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,15 +17,27 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import org.bson.Document; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** + * Support class for {@link AggregationExpression} implementations. + * * @author Christoph Strobl + * @author Matt Morrissette + * @author Mark Paluch * @since 1.10 */ abstract class AbstractAggregationExpression implements AggregationExpression { @@ -36,39 +48,13 @@ protected AbstractAggregationExpression(Object value) { this.value = value; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return toDocument(this.value, context); } - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - - Object valueToUse; - if (value instanceof List) { - - List arguments = (List) value; - List args = new ArrayList(arguments.size()); - - for (Object val : arguments) { - args.add(unpack(val, context)); - } - valueToUse = args; - } else if (value instanceof java.util.Map) { - - Document dbo = new Document(); - for (java.util.Map.Entry entry : ((java.util.Map) value).entrySet()) { - dbo.put(entry.getKey(), unpack(entry.getValue(), context)); - } - valueToUse = dbo; - } else { - valueToUse = unpack(value, context); - } - - return new Document(getMongoMethod(), valueToUse); + return new Document(getMongoMethod(), unpack(value, context)); } protected static List asFields(String... fieldRefs) { @@ -83,68 +69,259 @@ protected static List asFields(String... fieldRefs) { @SuppressWarnings("unchecked") private Object unpack(Object value, AggregationOperationContext context) { - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } - if (value instanceof Field) { - return context.getReference((Field) value).toString(); + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + + if (value instanceof Fields fields) { + + List mapped = new ArrayList<>(fields.size()); + + for (Field field : fields) { + mapped.add(unpack(field, context)); + } + + return mapped; + } + + if (value instanceof Sort sort) { + + Document sortDoc = new Document(); + for (Order order : sort) { + + // Check reference + FieldReference reference = context.getReference(order.getProperty()); + sortDoc.put(reference.getRaw(), order.isAscending() ? 1 : -1); + } + return sortDoc; } if (value instanceof List) { List sourceList = (List) value; - List mappedList = new ArrayList(sourceList.size()); + List mappedList = new ArrayList<>(sourceList.size()); - for (Object item : sourceList) { - mappedList.add(unpack(item, context)); + for (Object o : sourceList) { + mappedList.add(unpack(o, context)); } + return mappedList; } + if (value instanceof Map) { + + Document targetDocument = new Document(); + + Map sourceMap = (Map) value; + sourceMap.forEach((k, v) -> targetDocument.append(k, unpack(v, context))); + + return targetDocument; + } + + if (value instanceof SystemVariable) { + return value.toString(); + } + return value; } - protected List append(Object value) { + @SuppressWarnings("unchecked") + protected List append(Object value, Expand expandList) { if (this.value instanceof List) { - List clone = new ArrayList((List) this.value); + List clone = new ArrayList<>((List) this.value); - if (value instanceof List) { - for (Object val : (List) value) { - clone.add(val); - } + if (value instanceof Collection collection && Expand.EXPAND_VALUES.equals(expandList)) { + clone.addAll(collection); } else { clone.add(value); } + return clone; } return Arrays.asList(this.value, value); } - @SuppressWarnings("unchecked") - protected java.util.Map append(String key, Object value) { + /** + * Expand a nested list of values to single entries or keep the list. + */ + protected enum Expand { + EXPAND_VALUES, KEEP_SOURCE + } - if (!(this.value instanceof java.util.Map)) { - throw new IllegalArgumentException("o_O"); - } - java.util.Map clone = new LinkedHashMap((java.util.Map) this.value); + protected List append(Object value) { + return append(value, Expand.EXPAND_VALUES); + } + + @SuppressWarnings({ "unchecked" }) + protected Map append(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return append((Map) this.value, key, value); + } + + private Map append(Map existing, String key, Object value) { + + Map clone = new LinkedHashMap<>(existing); clone.put(key, value); return clone; + } + + @SuppressWarnings("rawtypes") + protected Map appendTo(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + if (this.value instanceof Map map) { + + Map target = new HashMap<>(map); + if (!target.containsKey(key)) { + target.put(key, value); + return target; + } + target.computeIfPresent(key, (k, v) -> { + + if (v instanceof List list) { + List targetList = new ArrayList<>(list); + targetList.add(value); + return targetList; + } + return Arrays.asList(v, value); + }); + return target; + } + throw new IllegalStateException( + String.format("Cannot append value to %s type", ObjectUtils.nullSafeClassName(this.value))); + + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + protected Map remove(String key) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + Map clone = new LinkedHashMap<>((java.util.Map) this.value); + clone.remove(key); + return clone; + } + + /** + * Append the given key at the position in the underlying {@link LinkedHashMap}. + * + * @param index + * @param key + * @param value + * @return + * @since 3.1 + */ + @SuppressWarnings({ "unchecked" }) + protected Map appendAt(int index, String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + Map clone = new LinkedHashMap<>(); + + int i = 0; + for (Map.Entry entry : ((Map) this.value).entrySet()) { + + if (i == index) { + clone.put(key, value); + } + if (!entry.getKey().equals(key)) { + clone.put(entry.getKey(), entry.getValue()); + } + i++; + } + if (i <= index) { + clone.put(key, value); + } + return clone; } + @SuppressWarnings({ "rawtypes" }) protected List values() { if (value instanceof List) { return new ArrayList((List) value); } + if (value instanceof java.util.Map) { return new ArrayList(((java.util.Map) value).values()); } - return new ArrayList(Collections.singletonList(value)); + + return new ArrayList<>(Collections.singletonList(value)); + } + + /** + * Get the value at a given index. + * + * @param index + * @param + * @return + * @since 2.1 + */ + @SuppressWarnings("unchecked") + protected T get(int index) { + return (T) values().get(index); + } + + /** + * Get the value for a given key. + * + * @param key + * @param + * @return + * @since 2.1 + */ + @SuppressWarnings("unchecked") + protected T get(Object key) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return (T) ((Map) this.value).get(key); + } + + protected boolean isArgumentMap() { + return this.value instanceof Map; + } + + /** + * Get the argument map. + * + * @since 2.1 + * @return + */ + @SuppressWarnings("unchecked") + protected Map argumentMap() { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return Collections.unmodifiableMap((java.util.Map) value); + } + + /** + * Check if the given key is available. + * + * @param key + * @return + * @since 2.1 + */ + @SuppressWarnings("unchecked") + protected boolean contains(Object key) { + + if (!(this.value instanceof java.util.Map)) { + return false; + } + + return ((Map) this.value).containsKey(key); } protected abstract String getMongoMethod(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 9511991d25..cf6485c230 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import org.bson.Document; import org.springframework.util.Assert; @@ -25,6 +28,7 @@ * Gateway to {@literal accumulator} aggregation operations. * * @author Christoph Strobl + * @author Julia Lee * @since 1.10 * @soundtrack Rage Against The Machine - Killing In The Name */ @@ -34,7 +38,7 @@ public class AccumulatorOperators { * Take the numeric value referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link AccumulatorOperatorFactory}. */ public static AccumulatorOperatorFactory valueOf(String fieldReference) { return new AccumulatorOperatorFactory(fieldReference); @@ -44,7 +48,7 @@ public static AccumulatorOperatorFactory valueOf(String fieldReference) { * Take the numeric value referenced resulting from given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link AccumulatorOperatorFactory}. */ public static AccumulatorOperatorFactory valueOf(AggregationExpression expression) { return new AccumulatorOperatorFactory(expression); @@ -52,6 +56,7 @@ public static AccumulatorOperatorFactory valueOf(AggregationExpression expressio /** * @author Christoph Strobl + * @author Julia Lee */ public static class AccumulatorOperatorFactory { @@ -65,7 +70,7 @@ public static class AccumulatorOperatorFactory { */ public AccumulatorOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -77,7 +82,7 @@ public AccumulatorOperatorFactory(String fieldReference) { */ public AccumulatorOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -86,7 +91,7 @@ public AccumulatorOperatorFactory(AggregationExpression expression) { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates and * returns the sum. * - * @return + * @return new instance of {@link Sum}. */ public Sum sum() { return usesFieldRef() ? Sum.sumOf(fieldReference) : Sum.sumOf(expression); @@ -96,7 +101,7 @@ public Sum sum() { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * average value. * - * @return + * @return new instance of {@link Avg}. */ public Avg avg() { return usesFieldRef() ? Avg.avgOf(fieldReference) : Avg.avgOf(expression); @@ -106,27 +111,49 @@ public Avg avg() { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * maximum value. * - * @return + * @return new instance of {@link Max}. */ public Max max() { return usesFieldRef() ? Max.maxOf(fieldReference) : Max.maxOf(expression); } + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Max max(int numberOfResults) { + return max().limit(numberOfResults); + } + /** * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * minimum value. * - * @return + * @return new instance of {@link Min}. */ public Min min() { return usesFieldRef() ? Min.minOf(fieldReference) : Min.minOf(expression); } + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Min min(int numberOfResults) { + return min().limit(numberOfResults); + } + /** * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the * population standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevPop}. */ public StdDevPop stdDevPop() { return usesFieldRef() ? StdDevPop.stdDevPopOf(fieldReference) : StdDevPop.stdDevPopOf(expression); @@ -136,17 +163,148 @@ public StdDevPop stdDevPop() { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the * sample standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevSamp}. */ public StdDevSamp stdDevSamp() { return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates + * the exponential moving average of numeric values + * + * @return new instance of {@link ExpMovingAvg}. + * @since 3.3 + */ + public ExpMovingAvgBuilder expMovingAvg() { + + ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference) + : ExpMovingAvg.expMovingAvgOf(expression); + return new ExpMovingAvgBuilder() { + + @Override + public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) { + return expMovingAvg.n(numberOfHistoricalDocuments); + } + + @Override + public ExpMovingAvg alpha(double exponentialDecayValue) { + return expMovingAvg.alpha(exponentialDecayValue); + } + }; + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * associated numeric value expression. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? Percentile.percentileOf(fieldReference) + : Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the median of the associated numeric value expression. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + public Median median() { + return usesFieldRef() ? Median.medianOf(fieldReference) : Median.medianOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } } + /** + * Builder for {@link ExpMovingAvg}. + * + * @since 3.3 + */ + public interface ExpMovingAvgBuilder { + + /** + * Define the number of historical documents with significant mathematical weight. + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments); + + /** + * Define the exponential decay value. + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg alpha(double exponentialDecayValue); + + } + /** * {@link AggregationExpression} for {@code $sum}. * @@ -167,11 +325,11 @@ protected String getMongoMethod() { * Creates new {@link Sum}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ public static Sum sumOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sum(asFields(fieldReference)); } @@ -179,11 +337,11 @@ public static Sum sumOf(String fieldReference) { * Creates new {@link Sum}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ public static Sum sumOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sum(Collections.singletonList(expression)); } @@ -192,11 +350,11 @@ public static Sum sumOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ public Sum and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sum(append(Fields.field(fieldReference))); } @@ -205,25 +363,34 @@ public Sum and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ public Sum and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sum(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Sum} with all previously added arguments appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param value the value to add. + * @return new instance of {@link Sum}. + * @since 2.2 */ + public Sum and(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Sum(append(value)); + } + @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -250,11 +417,11 @@ protected String getMongoMethod() { * Creates new {@link Avg}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ public static Avg avgOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Avg(asFields(fieldReference)); } @@ -262,11 +429,11 @@ public static Avg avgOf(String fieldReference) { * Creates new {@link Avg}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ public static Avg avgOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Avg(Collections.singletonList(expression)); } @@ -275,11 +442,11 @@ public static Avg avgOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ public Avg and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Avg(append(Fields.field(fieldReference))); } @@ -288,25 +455,20 @@ public Avg and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ public Avg and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Avg(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -326,31 +488,31 @@ private Max(Object value) { @Override protected String getMongoMethod() { - return "$max"; + return contains("n") ? "$maxN" : "$max"; } /** * Creates new {@link Max}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ public static Max maxOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Max(asFields(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(Collections.singletonMap("input", Fields.field(fieldReference))); } /** * Creates new {@link Max}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ public static Max maxOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Max(Collections.singletonList(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Max(Collections.singletonMap("input", expression)); } /** @@ -358,12 +520,12 @@ public static Max maxOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ public Max and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Max(append(Fields.field(fieldReference))); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(appendTo("input", Fields.field(fieldReference))); } /** @@ -371,25 +533,39 @@ public Max and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ public Max and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Max(append(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Max(appendTo("input", expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Max} that returns the given number of maximum values ({@literal $maxN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Max}. */ + public Max limit(int numberOfResults) { + return new Max(append("n", numberOfResults)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -409,31 +585,31 @@ private Min(Object value) { @Override protected String getMongoMethod() { - return "$min"; + return contains("n") ? "$minN" : "$min"; } /** * Creates new {@link Min}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ public static Min minOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Min(asFields(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(Collections.singletonMap("input", Fields.field(fieldReference))); } /** * Creates new {@link Min}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ public static Min minOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Min(Collections.singletonList(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Min(Collections.singletonMap("input", expression)); } /** @@ -441,12 +617,12 @@ public static Min minOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ public Min and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Min(append(Fields.field(fieldReference))); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(appendTo("input", Fields.field(fieldReference))); } /** @@ -454,25 +630,40 @@ public Min and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ public Min and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Min(append(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Min(appendTo("input", expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Min} that returns the given number of minimum values ({@literal $minN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Min}. */ + public Min limit(int numberOfResults) { + return new Min(append("n", numberOfResults)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -499,11 +690,11 @@ protected String getMongoMethod() { * Creates new {@link StdDevPop}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ public static StdDevPop stdDevPopOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevPop(asFields(fieldReference)); } @@ -511,11 +702,11 @@ public static StdDevPop stdDevPopOf(String fieldReference) { * Creates new {@link StdDevPop} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ public static StdDevPop stdDevPopOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevPop(Collections.singletonList(expression)); } @@ -524,11 +715,11 @@ public static StdDevPop stdDevPopOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ public StdDevPop and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevPop(append(Fields.field(fieldReference))); } @@ -537,25 +728,20 @@ public StdDevPop and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ public StdDevPop and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevPop(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -582,11 +768,11 @@ protected String getMongoMethod() { * Creates new {@link StdDevSamp}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ public static StdDevSamp stdDevSampOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevSamp(asFields(fieldReference)); } @@ -594,11 +780,11 @@ public static StdDevSamp stdDevSampOf(String fieldReference) { * Creates new {@link StdDevSamp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ public static StdDevSamp stdDevSampOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevSamp(Collections.singletonList(expression)); } @@ -607,11 +793,11 @@ public static StdDevSamp stdDevSampOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ public StdDevSamp and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevSamp(append(Fields.field(fieldReference))); } @@ -620,28 +806,364 @@ public StdDevSamp and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ public StdDevSamp and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevSamp(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); } } + + /** + * {@link AggregationExpression} for {@code $covariancePop}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovariancePop extends AbstractAggregationExpression { + + private CovariancePop(Object value) { + super(value); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovariancePop(asFields(fieldReference)); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(AggregationExpression expression) { + return new CovariancePop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(String fieldReference) { + return new CovariancePop(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(AggregationExpression expression) { + return new CovariancePop(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covariancePop"; + } + } + + /** + * {@link AggregationExpression} for {@code $covarianceSamp}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovarianceSamp extends AbstractAggregationExpression { + + private CovarianceSamp(Object value) { + super(value); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovarianceSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(AggregationExpression expression) { + return new CovarianceSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(String fieldReference) { + return new CovarianceSamp(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(AggregationExpression expression) { + return new CovarianceSamp(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covarianceSamp"; + } + } + + /** + * {@link ExpMovingAvg} calculates the exponential moving average of numeric values. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class ExpMovingAvg extends AbstractAggregationExpression { + + private ExpMovingAvg(Object value) { + super(value); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(String fieldReference) { + return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value + * to be used as input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) { + return new ExpMovingAvg(Collections.singletonMap("input", expression)); + } + + /** + * Define the number of historical documents with significant mathematical weight.
          + * Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both! + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) { + return new ExpMovingAvg(append("N", numberOfHistoricalDocuments)); + } + + /** + * Define the exponential decay value.
          + * Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both! + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg alpha(double exponentialDecayValue) { + return new ExpMovingAvg(append("alpha", exponentialDecayValue)); + } + + @Override + protected String getMongoMethod() { + return "$expMovingAvg"; + } + } + + /** + * {@link AggregationExpression} for {@code $percentile}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Percentile extends AbstractAggregationExpression { + + private Percentile(Object value) { + super(value); + } + + /** + * Creates new {@link Percentile}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Creates new {@link Percentile}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Define the percentile value(s) that must resolve to percentages in the range {@code 0.0 - 1.0} inclusive. + * + * @param percentages must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile percentages(Double... percentages) { + + Assert.notEmpty(percentages, "Percentages must not be null or empty"); + return new Percentile(append("p", Arrays.asList(percentages))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Percentile(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public Percentile and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Percentile(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$percentile"; + } + } + + /** + * {@link AggregationExpression} for {@code $median}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Median extends AbstractAggregationExpression { + + private Median(Object value) { + super(value); + } + + /** + * Creates new {@link Median}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public Median and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Median(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public Median and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Median(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$median"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java new file mode 100644 index 0000000000..b79d978b8b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java @@ -0,0 +1,202 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder.ValueAppender; +import org.springframework.lang.Nullable; + +/** + * Adds new fields to documents. {@code $addFields} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + *
          + * AddFieldsOperation.addField("totalHomework").withValue("A+").and().addField("totalQuiz").withValue("B-")
          + * 
          + * + * @author Christoph Strobl + * @author Kim Sumin + * @since 3.0 + * @see MongoDB Aggregation + * Framework: $addFields + */ +public class AddFieldsOperation extends DocumentEnhancingOperation { + + /** + * Create new instance of {@link AddFieldsOperation} adding map keys as exposed fields. + * + * @param source must not be {@literal null}. + */ + private AddFieldsOperation(Map source) { + super(source); + } + + /** + * Create new instance of {@link AddFieldsOperation} + * + * @param field must not be {@literal null}. + * @param value can be {@literal null}. + */ + public AddFieldsOperation(Object field, @Nullable Object value) { + this(Collections.singletonMap(field, value)); + } + + /** + * Define the {@link AddFieldsOperation} via {@link AddFieldsOperationBuilder}. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public static AddFieldsOperationBuilder builder() { + return new AddFieldsOperationBuilder(); + } + + /** + * Concatenate another field to add. + * + * @param field must not be {@literal null}. + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public static ValueAppender addField(String field) { + return new AddFieldsOperationBuilder().addField(field); + } + + /** + * Append the value for a specific field to the operation. + * + * @param field the target field to add. + * @param value the value to assign. + * @return new instance of {@link AddFieldsOperation}. + */ + public AddFieldsOperation addField(Object field, Object value) { + + LinkedHashMap target = new LinkedHashMap<>(getValueMap()); + target.put(field, value); + + return new AddFieldsOperation(target); + } + + /** + * Concatenate additional fields to add. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public AddFieldsOperationBuilder and() { + return new AddFieldsOperationBuilder(getValueMap()); + } + + @Override + protected String mongoOperator() { + return "$addFields"; + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public static class AddFieldsOperationBuilder { + + private final Map valueMap; + + private AddFieldsOperationBuilder() { + this.valueMap = new LinkedHashMap<>(); + } + + private AddFieldsOperationBuilder(Map source) { + this.valueMap = new LinkedHashMap<>(source); + } + + public AddFieldsOperationBuilder addFieldWithValue(String field, @Nullable Object value) { + return addField(field).withValue(value); + } + + public AddFieldsOperationBuilder addFieldWithValueOf(String field, Object value) { + return addField(field).withValueOf(value); + } + + /** + * Define the field to add. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public ValueAppender addField(String field) { + + return new ValueAppender() { + + @Override + public AddFieldsOperationBuilder withValue(Object value) { + + valueMap.put(field, value); + return AddFieldsOperationBuilder.this; + } + + @Override + public AddFieldsOperationBuilder withValueOf(Object value) { + + valueMap.put(field, value instanceof String stringValue ? Fields.field(stringValue) : value); + return AddFieldsOperationBuilder.this; + } + + @Override + public AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values) { + + valueMap.put(field, new ExpressionProjection(operation, values)); + return AddFieldsOperationBuilder.this; + } + }; + } + + public AddFieldsOperation build() { + return new AddFieldsOperation(valueMap); + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public interface ValueAppender { + + /** + * Define the value to assign as is. + * + * @param value can be {@literal null}. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValue(@Nullable Object value); + + /** + * Define the value to assign. Plain {@link String} values are treated as {@link Field field references}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValueOf(Object value); + + /** + * Adds a generic projection for the current field. + * + * @param operation the operation key, e.g. {@code $add}. + * @param values the values to be set for the projection operation. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 1b3b00f282..45de38ed21 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,18 +21,22 @@ import java.util.List; import org.bson.Document; +import org.bson.conversions.Bson; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder; import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder; import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder; import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder; +import org.springframework.data.mongodb.core.aggregation.LookupOperation.LookupOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -48,6 +52,7 @@ * @author Nikolay Bogdanov * @author Gustavo de Geus * @author Jérôme Guyon + * @author Sangyong Choi * @since 1.3 */ public class Aggregation { @@ -64,10 +69,37 @@ public class Aggregation { */ public static final String CURRENT = SystemVariable.CURRENT.toString(); + /** + * A variable to conditionally exclude a field. In a {@code $projection}, a field set to the variable + * {@literal REMOVE} is excluded from the output. + * + *
          +	 * 
          +	 *
          +	 * db.books.aggregate( [
          +	 * {
          +	 *     $project: {
          +	 *         title: 1,
          +	 *         "author.first": 1,
          +	 *         "author.last" : 1,
          +	 *         "author.middle": {
          +	 *             $cond: {
          +	 *                 if: { $eq: [ "", "$author.middle" ] },
          +	 *                 then: "$$REMOVE",
          +	 *                 else: "$author.middle"
          +	 *             }
          +	 *         }
          +	 *     }
          +	 * } ] )
          +	 * 
          +	 * 
          + */ + public static final String REMOVE = SystemVariable.REMOVE.toString(); + public static final AggregationOperationContext DEFAULT_CONTEXT = AggregationOperationRenderer.DEFAULT_CONTEXT; public static final AggregationOptions DEFAULT_OPTIONS = newAggregationOptions().build(); - protected final List operations; + protected final AggregationPipeline pipeline; private final AggregationOptions options; /** @@ -88,18 +120,29 @@ public static Aggregation newAggregation(AggregationOperation... operations) { return new Aggregation(operations); } + /** + * Creates a new {@link AggregationUpdate} from the given {@link AggregationOperation}s. + * + * @param operations can be {@literal empty} but must not be {@literal null}. + * @return new instance of {@link AggregationUpdate}. + * @since 3.0 + */ + public static AggregationUpdate newUpdate(AggregationOperation... operations) { + return AggregationUpdate.from(Arrays.asList(operations)); + } + /** * Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are * supported in MongoDB version 2.6+. * * @param options must not be {@literal null}. - * @return + * @return new instance of {@link Aggregation}. * @since 1.6 */ public Aggregation withOptions(AggregationOptions options) { - Assert.notNull(options, "AggregationOptions must not be null."); - return new Aggregation(this.operations, options); + Assert.notNull(options, "AggregationOptions must not be null"); + return new Aggregation(this.pipeline.getOperations(), options); } /** @@ -137,7 +180,7 @@ protected Aggregation(AggregationOperation... aggregationOperations) { */ protected static List asAggregationList(AggregationOperation... aggregationOperations) { - Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty!"); + Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty"); return Arrays.asList(aggregationOperations); } @@ -154,30 +197,18 @@ protected Aggregation(List aggregationOperations) { /** * Creates a new {@link Aggregation} from the given {@link AggregationOperation}s. * - * @param aggregationOperations must not be {@literal null} or empty. + * @param aggregationOperations must not be {@literal null}. * @param options must not be {@literal null} or empty. */ protected Aggregation(List aggregationOperations, AggregationOptions options) { - Assert.notNull(aggregationOperations, "AggregationOperations must not be null!"); - Assert.isTrue(!aggregationOperations.isEmpty(), "At least one AggregationOperation has to be provided"); - Assert.notNull(options, "AggregationOptions must not be null!"); + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + Assert.notNull(options, "AggregationOptions must not be null"); - // check $out is the last operation if it exists - for (AggregationOperation aggregationOperation : aggregationOperations) { - if (aggregationOperation instanceof OutOperation && !isLast(aggregationOperation, aggregationOperations)) { - throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline."); - } - } - - this.operations = aggregationOperations; + this.pipeline = new AggregationPipeline(aggregationOperations); this.options = options; } - private boolean isLast(AggregationOperation aggregationOperation, List aggregationOperations) { - return aggregationOperations.indexOf(aggregationOperation) == aggregationOperations.size() - 1; - } - /** * Get the {@link AggregationOptions}. * @@ -194,14 +225,61 @@ public AggregationOptions getOptions() { * @return */ public static String previousOperation() { - return "_id"; + return FieldName.ID.name(); + } + + /** + * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
          + * Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is + * an alias for {@code $addFields}. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + * @see AddFieldsOperation + * @since 3.0 + */ + public static AddFieldsOperationBuilder addFields() { + return AddFieldsOperation.builder(); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link Bson bson value} as is.
          + * + *
          +	 * Aggregation.stage(Aggregates.search(exists(fieldPath("..."))));
          +	 * 
          + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param aggregationOperation the must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(Bson aggregationOperation) { + return new BasicAggregationOperation(aggregationOperation); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link String json value} as is.
          + * + *
          +	 * Aggregation.stage("{ $search : { near : { path : 'released' , origin : ... } } }");
          +	 * 
          + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param json the JSON representation of the pipeline stage. Must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(String json) { + return new BasicAggregationOperation(json); } /** * Creates a new {@link ProjectionOperation} including the given fields. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link ProjectionOperation}. */ public static ProjectionOperation project(String... fields) { return project(fields(fields)); @@ -211,17 +289,30 @@ public static ProjectionOperation project(String... fields) { * Creates a new {@link ProjectionOperation} including the given {@link Fields}. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link ProjectionOperation}. */ public static ProjectionOperation project(Fields fields) { return new ProjectionOperation(fields); } + /** + * Creates a new {@link ProjectionOperation} including all top level fields of the given given {@link Class}. + * + * @param type must not be {@literal null}. + * @return new instance of {@link ProjectionOperation}. + * @since 2.2 + */ + public static ProjectionOperation project(Class type) { + + Assert.notNull(type, "Type must not be null"); + return new ProjectionOperation(type); + } + /** * Factory method to create a new {@link UnwindOperation} for the field with the given name. * * @param field must not be {@literal null} or empty. - * @return + * @return new instance of {@link UnwindOperation}. */ public static UnwindOperation unwind(String field) { return new UnwindOperation(field(field)); @@ -231,7 +322,7 @@ public static UnwindOperation unwind(String field) { * Factory method to create a new {@link ReplaceRootOperation} for the field with the given name. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return new instance of {@link ReplaceRootOperation}. * @since 1.10 */ public static ReplaceRootOperation replaceRoot(String fieldName) { @@ -243,7 +334,7 @@ public static ReplaceRootOperation replaceRoot(String fieldName) { * {@link AggregationExpression}. * * @param aggregationExpression must not be {@literal null}. - * @return + * @return new instance of {@link ReplaceRootOperation}. * @since 1.10 */ public static ReplaceRootOperation replaceRoot(AggregationExpression aggregationExpression) { @@ -290,9 +381,9 @@ public static UnwindOperation unwind(String field, String arrayIndex) { } /** - * Factory method to create a new {@link UnwindOperation} for the field with the given nameincluding the name of a new - * field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. Note - * that extended unwind is supported in MongoDB version 3.2+. + * Factory method to create a new {@link UnwindOperation} for the field with the given name, including the name of a + * new field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. + * Note that extended unwind is supported in MongoDB version 3.2+. * * @param field must not be {@literal null} or empty. * @param arrayIndex must not be {@literal null} or empty. @@ -309,7 +400,7 @@ public static UnwindOperation unwind(String field, String arrayIndex, boolean pr * Creates a new {@link GroupOperation} for the given fields. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link GroupOperation}. */ public static GroupOperation group(String... fields) { return group(fields(fields)); @@ -330,18 +421,32 @@ public static GroupOperation group(Fields fields) { * {@link GraphLookupOperation} given {@literal fromCollection}. * * @param fromCollection must not be {@literal null} or empty. - * @return + * @return new instance of {@link StartWithBuilder} for creating a {@link GraphLookupOperation}. * @since 1.10 */ public static StartWithBuilder graphLookup(String fromCollection) { return GraphLookupOperation.builder().from(fromCollection); } + /** + * Creates a new {@link VectorSearchOperation} by starting from the {@code indexName} to use. + * + * @param indexName must not be {@literal null} or empty. + * @return new instance of {@link VectorSearchOperation.PathContributor}. + * @since 4.5 + */ + public static VectorSearchOperation.PathContributor vectorSearch(String indexName) { + + Assert.hasText(indexName, "Index name must not be null or empty"); + + return VectorSearchOperation.search(indexName); + } + /** * Factory method to create a new {@link SortOperation} for the given {@link Sort}. * * @param sort must not be {@literal null}. - * @return + * @return new instance of {@link SortOperation}. */ public static SortOperation sort(Sort sort) { return new SortOperation(sort); @@ -352,7 +457,7 @@ public static SortOperation sort(Sort sort) { * * @param direction must not be {@literal null}. * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link SortOperation}. */ public static SortOperation sort(Direction direction, String... fields) { return new SortOperation(Sort.by(direction, fields)); @@ -362,7 +467,7 @@ public static SortOperation sort(Direction direction, String... fields) { * Creates a new {@link SortByCountOperation} given {@literal groupByField}. * * @param field must not be {@literal null} or empty. - * @return + * @return new instance of {@link SortByCountOperation}. * @since 2.1 */ public static SortByCountOperation sortByCount(String field) { @@ -373,7 +478,7 @@ public static SortByCountOperation sortByCount(String field) { * Creates a new {@link SortByCountOperation} given {@link AggregationExpression group and sort expression}. * * @param groupAndSortExpression must not be {@literal null}. - * @return + * @return new instance of {@link SortByCountOperation}. * @since 2.1 */ public static SortByCountOperation sortByCount(AggregationExpression groupAndSortExpression) { @@ -384,18 +489,7 @@ public static SortByCountOperation sortByCount(AggregationExpression groupAndSor * Creates a new {@link SkipOperation} skipping the given number of elements. * * @param elementsToSkip must not be less than zero. - * @return - * @deprecated prepare to get this one removed in favor of {@link #skip(long)}. - */ - public static SkipOperation skip(int elementsToSkip) { - return new SkipOperation(elementsToSkip); - } - - /** - * Creates a new {@link SkipOperation} skipping the given number of elements. - * - * @param elementsToSkip must not be less than zero. - * @return + * @return new instance of {@link SkipOperation}. */ public static SkipOperation skip(long elementsToSkip) { return new SkipOperation(elementsToSkip); @@ -405,7 +499,7 @@ public static SkipOperation skip(long elementsToSkip) { * Creates a new {@link LimitOperation} limiting the result to the given number of elements. * * @param maxElements must not be less than zero. - * @return + * @return new instance of {@link LimitOperation}. */ public static LimitOperation limit(long maxElements) { return new LimitOperation(maxElements); @@ -415,7 +509,7 @@ public static LimitOperation limit(long maxElements) { * Creates a new {@link SampleOperation} to select the specified number of documents from its input randomly. * * @param sampleSize must not be less than zero. - * @return + * @return new instance of {@link SampleOperation}. * @since 2.0 */ public static SampleOperation sample(long sampleSize) { @@ -426,7 +520,7 @@ public static SampleOperation sample(long sampleSize) { * Creates a new {@link MatchOperation} using the given {@link Criteria}. * * @param criteria must not be {@literal null}. - * @return + * @return new instance of {@link MatchOperation}. */ public static MatchOperation match(Criteria criteria) { return new MatchOperation(criteria); @@ -436,13 +530,48 @@ public static MatchOperation match(Criteria criteria) { * Creates a new {@link MatchOperation} using the given {@link CriteriaDefinition}. * * @param criteria must not be {@literal null}. - * @return + * @return new instance of {@link MatchOperation}. * @since 1.10 */ public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } + /** + * Creates a new {@link MatchOperation} using the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MatchOperation}. + * @since 3.3 + */ + public static MatchOperation match(AggregationExpression expression) { + return new MatchOperation(expression); + } + + /** + * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The + * {@code distanceField} defines output field that contains the calculated distance. + * + * @param query must not be {@literal null}. + * @param distanceField must not be {@literal null} or empty. + * @return new instance of {@link GeoNearOperation}. + * @since 1.7 + */ + public static GeoNearOperation geoNear(NearQuery query, String distanceField) { + return new GeoNearOperation(query, distanceField); + } + + /** + * Obtain a {@link MergeOperationBuilder builder} instance to create a new {@link MergeOperation}. + * + * @return new instance of {@link MergeOperationBuilder}. + * @see MergeOperation + * @since 3.0 + */ + public static MergeOperationBuilder merge() { + return MergeOperation.builder(); + } + /** * Creates a new {@link OutOperation} using the given collection name. This operation must be the last operation in * the pipeline. @@ -451,7 +580,7 @@ public static MatchOperation match(CriteriaDefinition criteria) { * collection in the current database if one does not already exist. The collection is not visible until the * aggregation completes. If the aggregation fails, MongoDB does not create the collection. Must not be * {@literal null}. - * @return + * @return new instance of {@link OutOperation}. */ public static OutOperation out(String outCollectionName) { return new OutOperation(outCollectionName); @@ -461,7 +590,7 @@ public static OutOperation out(String outCollectionName) { * Creates a new {@link BucketOperation} given {@literal groupByField}. * * @param groupByField must not be {@literal null} or empty. - * @return + * @return new instance of {@link BucketOperation}. * @since 1.10 */ public static BucketOperation bucket(String groupByField) { @@ -472,7 +601,7 @@ public static BucketOperation bucket(String groupByField) { * Creates a new {@link BucketOperation} given {@link AggregationExpression group-by expression}. * * @param groupByExpression must not be {@literal null}. - * @return + * @return new instance of {@link BucketOperation}. * @since 1.10 */ public static BucketOperation bucket(AggregationExpression groupByExpression) { @@ -484,7 +613,7 @@ public static BucketOperation bucket(AggregationExpression groupByExpression) { * * @param groupByField must not be {@literal null} or empty. * @param buckets number of buckets, must be a positive integer. - * @return + * @return new instance of {@link BucketAutoOperation}. * @since 1.10 */ public static BucketAutoOperation bucketAuto(String groupByField, int buckets) { @@ -496,7 +625,7 @@ public static BucketAutoOperation bucketAuto(String groupByField, int buckets) { * * @param groupByExpression must not be {@literal null}. * @param buckets number of buckets, must be a positive integer. - * @return + * @return new instance of {@link BucketAutoOperation}. * @since 1.10 */ public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpression, int buckets) { @@ -506,7 +635,7 @@ public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpres /** * Creates a new {@link FacetOperation}. * - * @return + * @return new instance of {@link FacetOperation}. * @since 1.10 */ public static FacetOperation facet() { @@ -517,7 +646,7 @@ public static FacetOperation facet() { * Creates a new {@link FacetOperationBuilder} given {@link Aggregation}. * * @param aggregationOperations the sub-pipeline, must not be {@literal null}. - * @return + * @return new instance of {@link FacetOperation}. * @since 1.10 */ public static FacetOperationBuilder facet(AggregationOperation... aggregationOperations) { @@ -552,6 +681,23 @@ public static LookupOperation lookup(Field from, Field localField, Field foreign return new LookupOperation(from, localField, foreignField, as); } + /** + * Entrypoint for creating {@link LookupOperation $lookup} using a fluent builder API. + * + *
          +	 * Aggregation.lookup().from("restaurants").localField("restaurant_name").foreignField("name")
          +	 * 		.let(newVariable("orders_drink").forField("drink"))
          +	 * 		.pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
          +	 * 		.as("matches")
          +	 * 
          + * + * @return new instance of {@link LookupOperationBuilder}. + * @since 4.1 + */ + public static LookupOperationBuilder lookup() { + return new LookupOperationBuilder(); + } + /** * Creates a new {@link CountOperationBuilder}. * @@ -562,11 +708,31 @@ public static CountOperationBuilder count() { return new CountOperationBuilder(); } + /** + * Creates a new {@link RedactOperation} that can restrict the content of a document based on information stored + * within the document itself. + * + *
          +	 *
          +	 * Aggregation.redact(ConditionalOperators.when(Criteria.where("level").is(5)) //
          +	 * 		.then(RedactOperation.PRUNE) //
          +	 * 		.otherwise(RedactOperation.DESCEND));
          +	 * 
          + * + * @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or + * {@literal $$KEEP}. Must not be {@literal null}. + * @return new instance of {@link RedactOperation}. Never {@literal null}. + * @since 3.0 + */ + public static RedactOperation redact(AggregationExpression condition) { + return new RedactOperation(condition); + } + /** * Creates a new {@link Fields} instance for the given field names. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link Fields}. * @see Fields#fields(String...) */ public static Fields fields(String... fields) { @@ -578,29 +744,16 @@ public static Fields fields(String... fields) { * * @param name must not be {@literal null} or empty. * @param target must not be {@literal null} or empty. - * @return + * @return new instance of {@link Fields}. */ public static Fields bind(String name, String target) { return Fields.from(field(name, target)); } - /** - * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The - * {@code distanceField} defines output field that contains the calculated distance. - * - * @param query must not be {@literal null}. - * @param distanceField must not be {@literal null} or empty. - * @return - * @since 1.7 - */ - public static GeoNearOperation geoNear(NearQuery query, String distanceField) { - return new GeoNearOperation(query, distanceField); - } - /** * Returns a new {@link AggregationOptions.Builder}. * - * @return + * @return new instance of {@link AggregationOptions.Builder}. * @since 1.6 */ public static AggregationOptions.Builder newAggregationOptions() { @@ -615,12 +768,19 @@ public static AggregationOptions.Builder newAggregationOptions() { * @since 2.1 */ public List toPipeline(AggregationOperationContext rootContext) { - return AggregationOperationRenderer.toDocument(operations, rootContext); + return pipeline.toDocuments(rootContext); } /** - * Converts this {@link Aggregation} specification to a {@link Document}. - *

          + * @return the {@link AggregationPipeline}. + * @since 3.0.2 + */ + public AggregationPipeline getPipeline() { + return pipeline; + } + + /** + * Converts this {@link Aggregation} specification to a {@link Document}.
          * MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render * an aggregation pipeline. * @@ -635,59 +795,8 @@ public Document toDocument(String inputCollectionName, AggregationOperationConte return options.applyAndReturnPotentiallyChangedCommand(command); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return SerializationUtils.serializeToJsonSafely(toDocument("__collection__", DEFAULT_CONTEXT)); } - - /** - * Describes the system variables available in MongoDB aggregation framework pipeline expressions. - * - * @author Thomas Darimont - * @see Aggregation Variables - */ - enum SystemVariable { - - ROOT, CURRENT; - - private static final String PREFIX = "$$"; - - /** - * Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false} - * otherwise. - * - * @param fieldRef may be {@literal null}. - * @return - */ - public static boolean isReferingToSystemVariable(@Nullable String fieldRef) { - - if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) { - return false; - } - - int indexOfFirstDot = fieldRef.indexOf('.'); - String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot); - - for (SystemVariable value : values()) { - if (value.name().equals(candidate)) { - return true; - } - } - - return false; - } - - /* - * (non-Javadoc) - * @see java.lang.Enum#toString() - */ - @Override - public String toString() { - return PREFIX.concat(name()); - } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java index 17f8422c19..1cb38ef362 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,23 +16,58 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.springframework.data.mongodb.MongoExpression; /** * An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like * {@code project} and {@code group}. + *

          + * The {@link AggregationExpression expressions} {@link #toDocument(AggregationOperationContext)} method is called during + * the mapping process to obtain the mapped, ready to use representation that can be handed over to the driver as part + * of an {@link AggregationOperation pipleine stage}. * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl */ -public interface AggregationExpression { +public interface AggregationExpression extends MongoExpression { + + /** + * Create an {@link AggregationExpression} out of a given {@link MongoExpression} to ensure the resulting + * {@link MongoExpression#toDocument() Document} is mapped against the {@link AggregationOperationContext}.
          + * If the given expression is already an {@link AggregationExpression} the very same instance is returned. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + static AggregationExpression from(MongoExpression expression) { + + if (expression instanceof AggregationExpression aggregationExpression) { + return aggregationExpression; + } + + return context -> context.getMappedObject(expression.toDocument()); + } + + /** + * Obtain the as is (unmapped) representation of the {@link AggregationExpression}. Use + * {@link #toDocument(AggregationOperationContext)} with a matching {@link AggregationOperationContext context} to + * engage domain type mapping including field name resolution. + * + * @see org.springframework.data.mongodb.MongoExpression#toDocument() + */ + @Override + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } /** * Turns the {@link AggregationExpression} into a {@link Document} within the given * {@link AggregationOperationContext}. * - * @param context - * @return + * @param context must not be {@literal null}. + * @return the MongoDB native ({@link Document}) form of the expression. */ Document toDocument(AggregationOperationContext context); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java new file mode 100644 index 0000000000..1ae935a92b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java @@ -0,0 +1,58 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * A {@link CriteriaDefinition criteria} to use {@code $expr} within a + * {@link org.springframework.data.mongodb.core.query.Query}. + * + * @author Christoph Strobl + * @since 4.1 + */ +public class AggregationExpressionCriteria implements CriteriaDefinition { + + private final AggregationExpression expression; + + AggregationExpressionCriteria(AggregationExpression expression) { + this.expression = expression; + } + + /** + * @param expression must not be {@literal null}. + * @return new instance of {@link AggregationExpressionCriteria}. + */ + public static AggregationExpressionCriteria whereExpr(AggregationExpression expression) { + return new AggregationExpressionCriteria(expression); + } + + @Override + public Document getCriteriaObject() { + + if (expression instanceof Expr expr) { + return new Document(getKey(), expr.get(0)); + } + return new Document(getKey(), expression); + } + + @Override + public String getKey() { + return "$expr"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java index 9775e1c0b9..00db38329f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -53,21 +53,21 @@ class AggregationExpressionTransformationContext * @param currentNode must not be {@literal null}. * @param parentNode may be {@literal null}. * @param previousOperationObject may be {@literal null}. - * @param aggregationContext must not be {@literal null}. + * @param context must not be {@literal null}. */ public AggregationExpressionTransformationContext(T currentNode, @Nullable ExpressionNode parentNode, @Nullable Document previousOperationObject, AggregationOperationContext context) { super(currentNode, parentNode, previousOperationObject); - Assert.notNull(context, "AggregationOperationContext must not be null!"); + Assert.notNull(context, "AggregationOperationContext must not be null"); this.aggregationContext = context; } /** * Returns the underlying {@link AggregationOperationContext}. * - * @return + * @return never {@literal null}. */ public AggregationOperationContext getAggregationContext() { return aggregationContext; @@ -76,7 +76,7 @@ public AggregationOperationContext getAggregationContext() { /** * Returns the {@link FieldReference} for the current {@link ExpressionNode}. * - * @return + * @return never {@literal null}. */ public FieldReference getFieldReference() { return aggregationContext.getReference(getCurrentNode().getName()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java deleted file mode 100644 index f688b14725..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.aggregation; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.bson.Document; -import org.springframework.util.Assert; - -/** - * An enum of supported {@link AggregationExpression}s in aggregation pipeline stages. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @author Christoph Strobl - * @author Mark Paluch - * @since 1.7 - * @deprecated since 1.10. Please use {@link ArithmeticOperators} and {@link ComparisonOperators} instead. - */ -@Deprecated -public enum AggregationFunctionExpressions { - - SIZE, CMP, EQ, GT, GTE, LT, LTE, NE, SUBTRACT, ADD, MULTIPLY; - - /** - * Returns an {@link AggregationExpression} build from the current {@link Enum} name and the given parameters. - * - * @param parameters must not be {@literal null} - * @return - */ - public AggregationExpression of(Object... parameters) { - - Assert.notNull(parameters, "Parameters must not be null!"); - return new FunctionExpression(name().toLowerCase(), parameters); - } - - /** - * An {@link AggregationExpression} representing a function call. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @since 1.7 - */ - static class FunctionExpression implements AggregationExpression { - - private final String name; - private final List values; - - /** - * Creates a new {@link FunctionExpression} for the given name and values. - * - * @param name must not be {@literal null} or empty. - * @param values must not be {@literal null}. - */ - public FunctionExpression(String name, Object[] values) { - - Assert.hasText(name, "Name must not be null!"); - Assert.notNull(values, "Values must not be null!"); - - this.name = name; - this.values = Arrays.asList(values); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Expression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ - @Override - public Document toDocument(AggregationOperationContext context) { - - List args = new ArrayList(values.size()); - - for (Object value : values) { - args.add(unpack(value, context)); - } - - return new Document("$" + name, args); - } - - private static Object unpack(Object value, AggregationOperationContext context) { - - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); - } - - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } - - return value; - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java index c1938533f0..923a1e73cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,9 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Collections; +import java.util.List; + import org.bson.Document; /** @@ -32,7 +35,34 @@ public interface AggregationOperation { * Turns the {@link AggregationOperation} into a {@link Document} by using the given * {@link AggregationOperationContext}. * + * @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}. * @return the Document + * @deprecated since 2.2 in favor of {@link #toPipelineStages(AggregationOperationContext)}. */ + @Deprecated Document toDocument(AggregationOperationContext context); + + /** + * Turns the {@link AggregationOperation} into list of {@link Document stages} by using the given + * {@link AggregationOperationContext}. This allows a single {@link AggregationOptions} to add additional stages for + * eg. {@code $sort} or {@code $limit}. + * + * @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}. + * @return the pipeline stages to run through. Never {@literal null}. + * @since 2.2 + */ + default List toPipelineStages(AggregationOperationContext context) { + return Collections.singletonList(toDocument(context)); + } + + /** + * Return the MongoDB operator that is used for this {@link AggregationOperation}. Aggregation operations should + * implement this method to avoid document rendering. + * + * @return the operator used for this {@link AggregationOperation}. + * @since 3.0.2 + */ + default String getOperator() { + return toDocument(Aggregation.DEFAULT_CONTEXT).keySet().iterator().next(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java index 8f2285b3de..a49c7e46d5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,30 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.beans.PropertyDescriptor; +import java.lang.reflect.Method; +import java.util.Arrays; + import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.beans.BeanUtils; +import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.MongoClientSettings; /** * The context for an {@link AggregationOperation}. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 */ -public interface AggregationOperationContext { +public interface AggregationOperationContext extends CodecRegistryProvider { /** * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata etc. @@ -33,23 +46,115 @@ public interface AggregationOperationContext { * @param document will never be {@literal null}. * @return must not be {@literal null}. */ - Document getMappedObject(Document document); + default Document getMappedObject(Document document) { + return getMappedObject(document, null); + } + + /** + * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata for the given + * type. + * + * @param document will never be {@literal null}. + * @param type can be {@literal null}. + * @return must not be {@literal null}. + * @since 2.2 + */ + Document getMappedObject(Document document, @Nullable Class type); /** - * Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given - * field. + * Returns a {@link FieldReference} for the given field. * * @param field must not be {@literal null}. - * @return + * @return the {@link FieldReference} for the given {@link Field}. + * @throws IllegalArgumentException if the context does not expose a field with the given name */ FieldReference getReference(Field field); /** - * Returns the {@link FieldReference} for the field with the given name or {@literal null} if the context does not - * expose a field with the given name. + * Returns the {@link FieldReference} for the field with the given name. * * @param name must not be {@literal null} or empty. - * @return + * @return the {@link FieldReference} for the field with given {@literal name}. + * @throws IllegalArgumentException if the context does not expose a field with the given name */ FieldReference getReference(String name); + + /** + * Returns the {@link Fields} exposed by the type. May be a {@literal class} or an {@literal interface}. The default + * implementation uses {@link BeanUtils#getPropertyDescriptors(Class) property descriptors} discover fields from a + * {@link Class}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + * @since 2.2 + * @see BeanUtils#getPropertyDescriptor(Class, String) + */ + default Fields getFields(Class type) { + + Assert.notNull(type, "Type must not be null"); + + return Fields.fields(Arrays.stream(BeanUtils.getPropertyDescriptors(type)) // + .filter(it -> { // object and default methods + Method method = it.getReadMethod(); + if (method == null) { + return false; + } + if (ReflectionUtils.isObjectMethod(method)) { + return false; + } + return !method.isDefault(); + }) // + .map(PropertyDescriptor::getName) // + .toArray(String[]::new)); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that exposes {@link ExposedFields fields}. + *

          + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that inherits exposed fields from this + * context and exposes {@link ExposedFields fields}. + *

          + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for + * its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that + * are not present in one of the previous stages or the input source, throughout the pipeline. + * + * @return a more relaxed {@link AggregationOperationContext}. + * @since 3.0 + * @deprecated since 4.3.1, {@link FieldLookupPolicy} should be specified explicitly when creating the + * AggregationOperationContext. + */ + @Deprecated(since = "4.3.1", forRemoval = true) + default AggregationOperationContext continueOnMissingFieldReference() { + return this; + } + + @Override + default CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java index 3d2850610c..fd5f7ed979 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,6 +24,7 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.lang.Nullable; /** * Rendering support for {@link AggregationOperation} into a {@link List} of {@link org.bson.Document}. @@ -41,7 +42,7 @@ class AggregationOperationRenderer { * {@link Document} representation. * * @param operations must not be {@literal null}. - * @param context must not be {@literal null}. + * @param rootContext must not be {@literal null}. * @return the {@link List} of {@link Document}. */ static List toDocument(List operations, AggregationOperationContext rootContext) { @@ -52,54 +53,76 @@ static List toDocument(List operations, Aggregat for (AggregationOperation operation : operations) { - operationDocuments.add(operation.toDocument(contextToUse)); + operationDocuments.addAll(operation.toPipelineStages(contextToUse)); - if (operation instanceof FieldsExposingAggregationOperation) { + if (operation instanceof FieldsExposingAggregationOperation exposedFieldsOperation) { - FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation; ExposedFields fields = exposedFieldsOperation.getFields(); - if (operation instanceof InheritsFieldsAggregationOperation) { - contextToUse = new InheritingExposedFieldsAggregationOperationContext(fields, contextToUse); + if (operation instanceof InheritsFieldsAggregationOperation || exposedFieldsOperation.inheritsFields()) { + contextToUse = contextToUse.inheritAndExpose(fields); } else { - contextToUse = fields.exposesNoFields() ? DEFAULT_CONTEXT - : new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse); + contextToUse = fields.exposesNoFields() ? ConverterAwareNoOpContext.instance(rootContext) + : contextToUse.expose(fields); } } + } return operationDocuments; } + private static class ConverterAwareNoOpContext implements AggregationOperationContext { + + AggregationOperationContext ctx; + + static ConverterAwareNoOpContext instance(AggregationOperationContext ctx) { + + if(ctx instanceof ConverterAwareNoOpContext noOpContext) { + return noOpContext; + } + + return new ConverterAwareNoOpContext(ctx); + } + + ConverterAwareNoOpContext(AggregationOperationContext ctx) { + this.ctx = ctx; + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return ctx.getMappedObject(document, null); + } + + @Override + public FieldReference getReference(Field field) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + @Override + public FieldReference getReference(String name) { + return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); + } + } + /** * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. * * @author Oliver Gierke + * @author Christoph Strobl */ private static class NoOpAggregationOperationContext implements AggregationOperationContext { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document) - */ @Override - public Document getMappedObject(Document document) { + public Document getMappedObject(Document document, @Nullable Class type) { return document; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ @Override public FieldReference getReference(Field field) { return new DirectFieldReference(new ExposedField(field, true)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java index 29b30a25b1..327d40b8c7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,63 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.Duration; import java.util.Optional; import org.bson.Document; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.DBObject; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** * Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support - * aggregation options can be found in the MongoDB reference documentation - * https://docs.mongodb.org/manual/reference/command/aggregate/#aggregate + * aggregation options can be found in the + * MongoDB reference documentation. + *

          + * As off 4.3 {@link #allowDiskUse} can be {@literal null}, indicating use of server default, and may only be applied if + * {@link #isAllowDiskUseSet() explicitly set}. For compatibility reasons {@link #isAllowDiskUse()} will remain + * returning {@literal false} if the no value has been set. * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Yadhukrishna S Pai + * @author Soumya Prakash Behera * @see Aggregation#withOptions(AggregationOptions) * @see TypedAggregation#withOptions(AggregationOptions) * @since 1.6 */ -public class AggregationOptions { +public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware { private static final String BATCH_SIZE = "batchSize"; private static final String CURSOR = "cursor"; private static final String EXPLAIN = "explain"; private static final String ALLOW_DISK_USE = "allowDiskUse"; private static final String COLLATION = "collation"; + private static final String COMMENT = "comment"; + private static final String MAX_TIME = "maxTimeMS"; + private static final String HINT = "hint"; - private final boolean allowDiskUse; + private final Optional allowDiskUse; private final boolean explain; private final Optional cursor; private final Optional collation; + private final Optional comment; + private final Optional hint; + + private Optional readConcern; + + private Optional readPreference; + private Duration maxTime = Duration.ZERO; + private ResultOptions resultOptions = ResultOptions.READ; + private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED; /** * Creates a new {@link AggregationOptions}. @@ -57,7 +80,7 @@ public class AggregationOptions { * @param explain whether to get the execution plan for the aggregation instead of the actual results. * @param cursor can be {@literal null}, used to pass additional options to the aggregation. */ - public AggregationOptions(boolean allowDiskUse, boolean explain, Document cursor) { + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor) { this(allowDiskUse, explain, cursor, null); } @@ -73,11 +96,48 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, Document cursor */ public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, @Nullable Collation collation) { + this(allowDiskUse, explain, cursor, collation, null, null); + } + + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @param comment execution comment. Can be {@literal null}. + * @since 2.2 + */ + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment) { + this(allowDiskUse, explain, cursor, collation, comment, null); + } - this.allowDiskUse = allowDiskUse; + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @param comment execution comment. Can be {@literal null}. + * @param hint can be {@literal null}, used to provide an index that would be forcibly used by query optimizer. + * @since 3.1 + */ + private AggregationOptions(@Nullable Boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment, @Nullable Object hint) { + + this.allowDiskUse = Optional.ofNullable(allowDiskUse); this.explain = explain; this.cursor = Optional.ofNullable(cursor); this.collation = Optional.ofNullable(collation); + this.comment = Optional.ofNullable(comment); + this.hint = Optional.ofNullable(hint); + this.readConcern = Optional.empty(); + this.readPreference = Optional.empty(); } /** @@ -93,7 +153,7 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, int cursorBatch } /** - * Creates new {@link AggregationOptions} given {@link DBObject} containing aggregation options. + * Creates new {@link AggregationOptions} given {@link Document} containing aggregation options. * * @param document must not be {@literal null}. * @return the {@link AggregationOptions}. @@ -101,15 +161,21 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, int cursorBatch */ public static AggregationOptions fromDocument(Document document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); - boolean allowDiskUse = document.getBoolean(ALLOW_DISK_USE, false); + Boolean allowDiskUse = document.get(ALLOW_DISK_USE, Boolean.class); boolean explain = document.getBoolean(EXPLAIN, false); Document cursor = document.get(CURSOR, Document.class); Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class)) : null; + String comment = document.getString(COMMENT); + Document hint = document.get(HINT, Document.class); - return new AggregationOptions(allowDiskUse, explain, cursor, collation); + AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint); + if (document.containsKey(MAX_TIME)) { + options.maxTime = Duration.ofMillis(document.getLong(MAX_TIME)); + } + return options; } /** @@ -123,19 +189,29 @@ public static Builder builder() { } /** - * Enables writing to temporary files. When set to true, aggregation stages can write data to the _tmp subdirectory in - * the dbPath directory. + * Enables writing to temporary files. When set to {@literal true}, aggregation stages can write data to the + * {@code _tmp} subdirectory in the {@code dbPath} directory. * - * @return + * @return {@literal true} if enabled; {@literal false} otherwise (or if not set). */ public boolean isAllowDiskUse() { - return allowDiskUse; + return allowDiskUse.orElse(false); + } + + /** + * Return whether {@link #isAllowDiskUse} is configured. + * + * @return {@literal true} if is {@code allowDiskUse} is configured, {@literal false} otherwise. + * @since 4.2.5 + */ + public boolean isAllowDiskUseSet() { + return allowDiskUse.isPresent(); } /** * Specifies to return the information on the processing of the pipeline. * - * @return + * @return {@literal true} if enabled. */ public boolean isExplain() { return explain; @@ -160,7 +236,7 @@ public Integer getCursorBatchSize() { /** * Specify a document that contains options that control the creation of the cursor object. * - * @return + * @return never {@literal null}. */ public Optional getCursor() { return cursor; @@ -169,13 +245,99 @@ public Optional getCursor() { /** * Get collation settings for string comparison. * - * @return + * @return never {@literal null}. * @since 2.0 */ public Optional getCollation() { return collation; } + /** + * Get the comment for the aggregation. + * + * @return never {@literal null}. + * @since 2.2 + */ + public Optional getComment() { + return comment; + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 3.1 + * @deprecated since 4.1, use {@link #getHintObject()} instead. + */ + public Optional getHint() { + return hint.map(it -> { + if (it instanceof Document doc) { + return doc; + } + if (it instanceof String hintString) { + if (BsonUtils.isJsonDocument(hintString)) { + return BsonUtils.parse(hintString, null); + } + } + throw new IllegalStateException("Unable to read hint of type %s".formatted(it.getClass())); + }); + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 4.1 + */ + public Optional getHintObject() { + return hint; + } + + @Override + public boolean hasReadConcern() { + return readConcern.isPresent(); + } + + @Override + public ReadConcern getReadConcern() { + return readConcern.orElse(null); + } + + @Override + public boolean hasReadPreference() { + return readPreference.isPresent(); + } + + @Override + public ReadPreference getReadPreference() { + return readPreference.orElse(null); + } + + /** + * @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior. + * @since 3.0 + */ + public Duration getMaxTime() { + return maxTime; + } + + /** + * @return {@literal true} to skip results when running an aggregation. Useful in combination with {@code $merge} or + * {@code $out}. + * @since 3.0.2 + */ + public boolean isSkipResults() { + return ResultOptions.SKIP.equals(resultOptions); + } + + /** + * @return the domain type mapping strategy do apply. Never {@literal null}. + * @since 3.2 + */ + public DomainTypeMapping getDomainTypeMapping() { + return domainTypeMapping; + } + /** * Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration * applied. @@ -187,14 +349,18 @@ Document applyAndReturnPotentiallyChangedCommand(Document command) { Document result = new Document(command); - if (allowDiskUse && !result.containsKey(ALLOW_DISK_USE)) { - result.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet() && !result.containsKey(ALLOW_DISK_USE)) { + result.put(ALLOW_DISK_USE, isAllowDiskUse()); } if (explain && !result.containsKey(EXPLAIN)) { result.put(EXPLAIN, explain); } + if (result.containsKey(HINT)) { + hint.ifPresent(val -> result.append(HINT, val)); + } + if (!result.containsKey(CURSOR)) { cursor.ifPresent(val -> result.put(CURSOR, val)); } @@ -203,29 +369,46 @@ Document applyAndReturnPotentiallyChangedCommand(Document command) { collation.map(Collation::toDocument).ifPresent(val -> result.append(COLLATION, val)); } + if (hasExecutionTimeLimit() && !result.containsKey(MAX_TIME)) { + result.append(MAX_TIME, maxTime.toMillis()); + } + return result; } /** * Returns a {@link Document} representation of this {@link AggregationOptions}. * - * @return + * @return never {@literal null}. */ public Document toDocument() { Document document = new Document(); - document.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet()) { + document.put(ALLOW_DISK_USE, isAllowDiskUse()); + } document.put(EXPLAIN, explain); cursor.ifPresent(val -> document.put(CURSOR, val)); collation.ifPresent(val -> document.append(COLLATION, val.toDocument())); + comment.ifPresent(val -> document.append(COMMENT, val)); + hint.ifPresent(val -> document.append(HINT, val)); + + if (hasExecutionTimeLimit()) { + document.append(MAX_TIME, maxTime.toMillis()); + } return document; } - /* (non-Javadoc) - * @see java.lang.Object#toString() + /** + * @return {@literal true} if {@link #maxTime} is set to a positive value. + * @since 3.0 */ + public boolean hasExecutionTimeLimit() { + return !maxTime.isZero() && !maxTime.isNegative(); + } + @Override public String toString() { return toDocument().toJson(); @@ -243,16 +426,23 @@ static Document createCursor(int cursorBatchSize) { */ public static class Builder { - private boolean allowDiskUse; + private Boolean allowDiskUse; private boolean explain; private @Nullable Document cursor; private @Nullable Collation collation; + private @Nullable String comment; + private @Nullable Object hint; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; + private @Nullable Duration maxTime; + private @Nullable ResultOptions resultOptions; + private @Nullable DomainTypeMapping domainTypeMapping; /** * Defines whether to off-load intensive sort-operations to disk. * - * @param allowDiskUse - * @return + * @param allowDiskUse use {@literal true} to allow disk use during the aggregation. + * @return this. */ public Builder allowDiskUse(boolean allowDiskUse) { @@ -263,8 +453,8 @@ public Builder allowDiskUse(boolean allowDiskUse) { /** * Defines whether to get the execution plan for the aggregation instead of the actual results. * - * @param explain - * @return + * @param explain use {@literal true} to enable explain feature. + * @return this. */ public Builder explain(boolean explain) { @@ -275,8 +465,8 @@ public Builder explain(boolean explain) { /** * Additional options to the aggregation. * - * @param cursor - * @return + * @param cursor must not be {@literal null}. + * @return this. */ public Builder cursor(Document cursor) { @@ -287,8 +477,8 @@ public Builder cursor(Document cursor) { /** * Define the initial cursor batch size. * - * @param batchSize - * @return + * @param batchSize use a positive int. + * @return this. * @since 2.0 */ public Builder cursorBatchSize(int batchSize) { @@ -301,7 +491,8 @@ public Builder cursorBatchSize(int batchSize) { * Define collation settings for string comparison. * * @param collation can be {@literal null}. - * @return + * @return this. + * @since 2.0 */ public Builder collation(@Nullable Collation collation) { @@ -309,13 +500,200 @@ public Builder collation(@Nullable Collation collation) { return this; } + /** + * Define a comment to describe the execution. + * + * @param comment can be {@literal null}. + * @return this. + * @since 2.2 + */ + public Builder comment(@Nullable String comment) { + + this.comment = comment; + return this; + } + + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param hint can be {@literal null}. + * @return this. + * @since 3.1 + */ + public Builder hint(@Nullable Document hint) { + + this.hint = hint; + return this; + } + + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param indexName can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder hint(@Nullable String indexName) { + + this.hint = indexName; + return this; + } + + /** + * Define a {@link ReadConcern} to apply to the aggregation. + * + * @param readConcern can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder readConcern(@Nullable ReadConcern readConcern) { + + this.readConcern = readConcern; + return this; + } + + /** + * Define a {@link ReadPreference} to apply to the aggregation. + * + * @param readPreference can be {@literal null}. + * @return this. + * @since 4.1 + */ + public Builder readPreference(@Nullable ReadPreference readPreference) { + + this.readPreference = readPreference; + return this; + } + + /** + * Set the time limit for processing. + * + * @param maxTime {@link Duration#ZERO} is used for the default unbounded behavior. {@link Duration#isNegative() + * Negative} values will be ignored. + * @return this. + * @since 3.0 + */ + public Builder maxTime(@Nullable Duration maxTime) { + + this.maxTime = maxTime; + return this; + } + + /** + * Run the aggregation, but do NOT read the aggregation result from the store.
          + * If the expected result of the aggregation is rather large, eg. when using an {@literal $out} operation, this + * option allows to execute the aggregation without having the cursor return the operation result. + * + * @return this. + * @since 3.0.2 + */ + public Builder skipOutput() { + + this.resultOptions = ResultOptions.SKIP; + return this; + } + + /** + * Apply a strict domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations throwing errors for non-existent, but referenced fields. + * + * @return this. + * @since 3.2 + */ + public Builder strictMapping() { + + this.domainTypeMapping = DomainTypeMapping.STRICT; + return this; + } + + /** + * Apply a relaxed domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations using the user provided name if a referenced field does not exist. + * + * @return this. + * @since 3.2 + */ + public Builder relaxedMapping() { + + this.domainTypeMapping = DomainTypeMapping.RELAXED; + return this; + } + + /** + * Apply no domain type mapping at all taking the pipeline as-is. + * + * @return this. + * @since 3.2 + */ + public Builder noMapping() { + + this.domainTypeMapping = DomainTypeMapping.NONE; + return this; + } + /** * Returns a new {@link AggregationOptions} instance with the given configuration. * - * @return + * @return new instance of {@link AggregationOptions}. */ public AggregationOptions build() { - return new AggregationOptions(allowDiskUse, explain, cursor, collation); + + AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint); + if (maxTime != null) { + options.maxTime = maxTime; + } + if (resultOptions != null) { + options.resultOptions = resultOptions; + } + if (domainTypeMapping != null) { + options.domainTypeMapping = domainTypeMapping; + } + if (readConcern != null) { + options.readConcern = Optional.of(readConcern); + } + if (readPreference != null) { + options.readPreference = Optional.of(readPreference); + } + + return options; } } + + /** + * @since 3.0 + */ + private enum ResultOptions { + + /** + * Just do it!, and do not read the operation result. + */ + SKIP, + /** + * Read the aggregation result from the cursor. + */ + READ + } + + /** + * Aggregation pipeline Domain type mappings supported by the mapping layer. + * + * @since 3.2 + */ + public enum DomainTypeMapping { + + /** + * Mapping throws errors for non-existent, but referenced fields. + */ + STRICT, + + /** + * Fields that do not exist in the model are treated as-is. + */ + RELAXED, + + /** + * Do not attempt to map fields against the model and treat the entire pipeline as-is. + */ + NONE + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java new file mode 100644 index 0000000000..68662ec0df --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java @@ -0,0 +1,167 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * The {@link AggregationPipeline} holds the collection of {@link AggregationOperation aggregation stages}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0.2 + */ +public class AggregationPipeline { + + private final List pipeline; + + public static AggregationPipeline of(AggregationOperation... stages) { + return new AggregationPipeline(Arrays.asList(stages)); + } + + /** + * Create an empty pipeline + */ + public AggregationPipeline() { + this(new ArrayList<>()); + } + + /** + * Create a new pipeline with given {@link AggregationOperation stages}. + * + * @param aggregationOperations must not be {@literal null}. + */ + public AggregationPipeline(List aggregationOperations) { + + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + pipeline = new ArrayList<>(aggregationOperations); + } + + /** + * Append the given {@link AggregationOperation stage} to the pipeline. + * + * @param aggregationOperation must not be {@literal null}. + * @return this. + */ + public AggregationPipeline add(AggregationOperation aggregationOperation) { + + Assert.notNull(aggregationOperation, "AggregationOperation must not be null"); + + pipeline.add(aggregationOperation); + return this; + } + + /** + * Get the list of {@link AggregationOperation aggregation stages}. + * + * @return never {@literal null}. + */ + public List getOperations() { + return Collections.unmodifiableList(pipeline); + } + + List toDocuments(AggregationOperationContext context) { + + verify(); + return AggregationOperationRenderer.toDocument(pipeline, context); + } + + /** + * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}. + */ + public boolean isOutOrMerge() { + + if (isEmpty()) { + return false; + } + + AggregationOperation operation = pipeline.get(pipeline.size() - 1); + return isOut(operation) || isMerge(operation); + } + + void verify() { + + // check $out/$merge is the last operation if it exists + for (AggregationOperation operation : pipeline) { + + if (isOut(operation) && !isLast(operation)) { + throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline"); + } + + if (isMerge(operation) && !isLast(operation)) { + throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline"); + } + } + } + + /** + * Return whether this aggregation pipeline defines a {@code $unionWith} stage that may contribute documents from + * other collections. Checking for presence of union stages is useful when attempting to determine the aggregation + * element type for mapping metadata computation. + * + * @return {@literal true} the aggregation pipeline makes use of {@code $unionWith}. + * @since 3.1 + */ + public boolean containsUnionWith() { + return containsOperation(AggregationPipeline::isUnionWith); + } + + /** + * @return {@literal true} if the pipeline does not contain any stages. + * @since 3.1 + */ + public boolean isEmpty() { + return pipeline.isEmpty(); + } + + private boolean containsOperation(Predicate predicate) { + + if (isEmpty()) { + return false; + } + + for (AggregationOperation element : pipeline) { + if (predicate.test(element)) { + return true; + } + } + + return false; + } + + private boolean isLast(AggregationOperation aggregationOperation) { + return pipeline.indexOf(aggregationOperation) == pipeline.size() - 1; + } + + private static boolean isUnionWith(AggregationOperation operator) { + return operator instanceof UnionWithOperation || operator.getOperator().equals("$unionWith"); + } + + private static boolean isMerge(AggregationOperation operator) { + return operator instanceof MergeOperation || operator.getOperator().equals("$merge"); + } + + private static boolean isOut(AggregationOperation operator) { + return operator instanceof OutOperation || operator.getOperator().equals("$out"); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java index dcd1a62a29..438eb9e49f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -48,8 +48,8 @@ public class AggregationResults implements Iterable { */ public AggregationResults(List mappedResults, Document rawResults) { - Assert.notNull(mappedResults, "List of mapped results must not be null!"); - Assert.notNull(rawResults, "Raw results must not be null!"); + Assert.notNull(mappedResults, "List of mapped results must not be null"); + Assert.notNull(rawResults, "Raw results must not be null"); this.mappedResults = Collections.unmodifiableList(mappedResults); this.rawResults = rawResults; @@ -59,7 +59,7 @@ public AggregationResults(List mappedResults, Document rawResults) { /** * Returns the aggregation results. * - * @return + * @return the list of already mapped results or an empty one if none found. */ public List getMappedResults() { return mappedResults; @@ -68,19 +68,15 @@ public List getMappedResults() { /** * Returns the unique mapped result. Assumes no result or exactly one. * - * @return + * @return the single already mapped result object or raise an error if more than one found. * @throws IllegalArgumentException in case more than one result is available. */ @Nullable public T getUniqueMappedResult() { - Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one!"); + Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one"); return mappedResults.size() == 1 ? mappedResults.get(0) : null; } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ public Iterator iterator() { return mappedResults.iterator(); } @@ -88,7 +84,7 @@ public Iterator iterator() { /** * Returns the server that has been used to perform the aggregation. * - * @return + * @return can be {@literal null}. */ @Nullable public String getServerUsed() { @@ -98,7 +94,7 @@ public String getServerUsed() { /** * Returns the raw result that was returned by the server. * - * @return + * @return the raw response. * @since 1.6 */ public Document getRawResults() { @@ -109,6 +105,6 @@ public Document getRawResults() { private String parseServerUsed() { Object object = rawResults.get("serverUsed"); - return object instanceof String ? (String) object : null; + return object instanceof String stringValue ? stringValue : null; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java index c1803cc484..1626d672bc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,19 +20,19 @@ /** * An {@link AggregationExpression} that renders a MongoDB Aggregation Framework expression from the AST of a - * SpEL + * SpEL * expression.
          *
          * Samples:
          - * *
          + * 
            * // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
            * expressionOf("qty > 100 && qty < 250);
            *
            * // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
            * expressionOf("cond(a >= 42, 'answer', 'no-answer')");
          - * 
          * + * * * @author Christoph Strobl * @author Mark Paluch @@ -56,17 +56,14 @@ private AggregationSpELExpression(String rawExpression, Object[] parameters) { * * @param expressionString must not be {@literal null}. * @param parameters can be empty. - * @return + * @return new instance of {@link AggregationSpELExpression}. */ public static AggregationSpELExpression expressionOf(String expressionString, Object... parameters) { - Assert.notNull(expressionString, "ExpressionString must not be null!"); + Assert.notNull(expressionString, "ExpressionString must not be null"); return new AggregationSpELExpression(expressionString, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return (Document) TRANSFORMER.transform(rawExpression, context, parameters); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java new file mode 100644 index 0000000000..15d700309e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java @@ -0,0 +1,300 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.StringJoiner; +import java.util.stream.Collectors; + +import org.bson.Document; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Abstraction for {@code db.collection.update()} using an aggregation pipeline. Aggregation pipeline updates use a more + * expressive update statement expressing conditional updates based on current field values or updating one field using + * the value of another field(s). + * + *
          + * AggregationUpdate update = AggregationUpdate.update().set("average")
          + * 		.toValue(ArithmeticOperators.valueOf("tests").avg()).set("grade")
          + * 		.toValue(ConditionalOperators
          + * 				.switchCases(CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"),
          + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"),
          + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"),
          + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D"))
          + * 				.defaultTo("F"));
          + * 
          + * + * The above sample is equivalent to the JSON update statement: + * + *
          + * db.collection.update(
          + *    { },
          + *    [
          + *      { $set: { average : { $avg: "$tests" } } },
          + *      { $set: { grade: { $switch: {
          + *                            branches: [
          + *                                { case: { $gte: [ "$average", 90 ] }, then: "A" },
          + *                                { case: { $gte: [ "$average", 80 ] }, then: "B" },
          + *                                { case: { $gte: [ "$average", 70 ] }, then: "C" },
          + *                                { case: { $gte: [ "$average", 60 ] }, then: "D" }
          + *                            ],
          + *                            default: "F"
          + *      } } } }
          + *    ],
          + *    { multi: true }
          + * )
          + * 
          + * + * @author Christoph Strobl + * @author Mark Paluch + * @see MongoDB + * Reference Documentation + * @since 3.0 + */ +public class AggregationUpdate extends Aggregation implements UpdateDefinition { + + private boolean isolated = false; + private final Set keysTouched = new HashSet<>(); + + /** + * Create new {@link AggregationUpdate}. + */ + protected AggregationUpdate() { + this(new ArrayList<>()); + } + + /** + * Create new {@link AggregationUpdate} with the given aggregation pipeline to apply. + * + * @param pipeline must not be {@literal null}. + */ + protected AggregationUpdate(List pipeline) { + + super(pipeline); + + for (AggregationOperation operation : pipeline) { + if (operation instanceof FieldsExposingAggregationOperation exposingAggregationOperation) { + exposingAggregationOperation.getFields().forEach(it -> keysTouched.add(it.getName())); + } + } + } + + /** + * Start defining the update pipeline to execute. + * + * @return new instance of {@link AggregationUpdate}. + */ + public static AggregationUpdate update() { + return new AggregationUpdate(); + } + + /** + * Create a new AggregationUpdate from the given {@link AggregationOperation}s. + * + * @return new instance of {@link AggregationUpdate}. + */ + public static AggregationUpdate from(List pipeline) { + return new AggregationUpdate(pipeline); + } + + /** + * Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + * @param setOperation must not be {@literal null}. + * @return this. + * @see $set Aggregation Reference + */ + public AggregationUpdate set(SetOperation setOperation) { + + Assert.notNull(setOperation, "SetOperation must not be null"); + + setOperation.getFields().forEach(it -> { + keysTouched.add(it.getName()); + }); + pipeline.add(setOperation); + return this; + } + + /** + * {@code $unset} removes/excludes fields from documents. + * + * @param unsetOperation must not be {@literal null}. + * @return this. + * @see $unset Aggregation + * Reference + */ + public AggregationUpdate unset(UnsetOperation unsetOperation) { + + Assert.notNull(unsetOperation, "UnsetOperation must not be null"); + + pipeline.add(unsetOperation); + keysTouched.addAll(unsetOperation.removedFieldNames()); + return this; + } + + /** + * {@code $replaceWith} replaces the input document with the specified document. The operation replaces all existing + * fields in the input document, including the _id field. + * + * @param replaceWithOperation must not be {@literal null}. + * @return this. + * @see $replaceWith Aggregation + * Reference + */ + public AggregationUpdate replaceWith(ReplaceWithOperation replaceWithOperation) { + + Assert.notNull(replaceWithOperation, "ReplaceWithOperation must not be null"); + pipeline.add(replaceWithOperation); + return this; + } + + /** + * {@code $replaceWith} replaces the input document with the value. + * + * @param value must not be {@literal null}. + * @return this. + */ + public AggregationUpdate replaceWith(Object value) { + + Assert.notNull(value, "Value must not be null"); + return replaceWith(ReplaceWithOperation.replaceWithValue(value)); + } + + /** + * Fluent API variant for {@code $set} adding a single {@link SetOperation pipeline operation} every time. To update + * multiple fields within one {@link SetOperation} use {@link #set(SetOperation)}. + * + * @param key must not be {@literal null}. + * @return new instance of {@link SetValueAppender}. + * @see #set(SetOperation) + */ + public SetValueAppender set(String key) { + + Assert.notNull(key, "Key must not be null"); + + return new SetValueAppender() { + + @Override + public AggregationUpdate toValue(@Nullable Object value) { + return set(SetOperation.builder().set(key).toValue(value)); + } + + @Override + public AggregationUpdate toValueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return set(SetOperation.builder().set(key).toValueOf(value)); + } + }; + } + + /** + * Short for {@link #unset(UnsetOperation)}. + * + * @param keys the fields to remove. + * @return this. + */ + public AggregationUpdate unset(String... keys) { + + Assert.notNull(keys, "Keys must not be null"); + Assert.noNullElements(keys, "Keys must not contain null elements"); + + return unset(new UnsetOperation(Arrays.stream(keys).map(Fields::field).collect(Collectors.toList()))); + } + + /** + * Prevents a write operation that affects multiple documents from yielding to other reads or writes + * once the first document is written.
          + * Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, UpdateDefinition, Class)}. + * + * @return never {@literal null}. + */ + public AggregationUpdate isolated() { + + isolated = true; + return this; + } + + @Override + public Boolean isIsolated() { + return isolated; + } + + @Override + public Document getUpdateObject() { + return new Document("", toPipeline(Aggregation.DEFAULT_CONTEXT)); + } + + @Override + public boolean modifies(String key) { + return keysTouched.contains(key); + } + + @Override + public void inc(String key) { + set(new SetOperation(key, ArithmeticOperators.valueOf(key).add(1))); + } + + @Override + public List getArrayFilters() { + return Collections.emptyList(); + } + + @Override + public String toString() { + + StringJoiner joiner = new StringJoiner(",\n", "[\n", "\n]"); + toPipeline(Aggregation.DEFAULT_CONTEXT).stream().map(SerializationUtils::serializeToJsonSafely) + .forEach(joiner::add); + return joiner.toString(); + } + + /** + * Fluent API AggregationUpdate builder. + * + * @author Christoph Strobl + */ + public interface SetValueAppender { + + /** + * Define the target value as is. + * + * @param value can be {@literal null}. + * @return never {@literal null}. + */ + AggregationUpdate toValue(@Nullable Object value); + + /** + * Define the target value as value, an {@link AggregationExpression} or a {@link Field} reference. + * + * @param value can be {@literal null}. + * @return never {@literal null}. + */ + AggregationUpdate toValueOf(Object value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java index 59927eb50b..e84f7ed1b0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,16 +32,16 @@ interface AggregationUtils { * Converts the given {@link Range} into an array of values. * * @param range must not be {@literal null}. - * @return + * @return never {@literal null}. */ - public static List toRangeValues(Range range) { + static List toRangeValues(Range range) { - Assert.notNull(range, "Range must not be null!"); + Assert.notNull(range, "Range must not be null"); List result = new ArrayList(2); result.add(range.getLowerBound().getValue() - .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded!"))); - range.getUpperBound().getValue().ifPresent(it -> result.add(it)); + .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded"))); + range.getUpperBound().getValue().ifPresent(result::add); return result; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java new file mode 100644 index 0000000000..ed79202345 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java @@ -0,0 +1,133 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * A special field that points to a variable {@code $$} expression. + * + * @author Christoph Strobl + * @since 4.1.3 + */ +public interface AggregationVariable extends Field { + + String PREFIX = "$$"; + + /** + * @return {@literal true} if the fields {@link #getName() name} does not match the defined {@link #getTarget() + * target}. + */ + @Override + default boolean isAliased() { + return !ObjectUtils.nullSafeEquals(getName(), getTarget()); + } + + @Override + default String getName() { + return getTarget(); + } + + @Override + default boolean isInternal() { + return false; + } + + /** + * Create a new {@link AggregationVariable} for the given name. + *

          + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable variable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + }; + } + + /** + * Create a new {@link #isInternal() local} {@link AggregationVariable} for the given name. + *

          + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable localVariable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + + @Override + public boolean isInternal() { + return true; + } + }; + } + + /** + * Check if the given field name reference may be variable. + * + * @param fieldRef can be {@literal null}. + * @return true if given value matches the variable identification pattern. + */ + static boolean isVariable(@Nullable String fieldRef) { + return fieldRef != null && fieldRef.stripLeading().matches("^\\$\\$\\w.*"); + } + + /** + * Check if the given field may be variable. + * + * @param field can be {@literal null}. + * @return true if given {@link Field field} is an {@link AggregationVariable} or if its value is a + * {@link #isVariable(String) variable}. + */ + static boolean isVariable(Field field) { + + if (field instanceof AggregationVariable) { + return true; + } + return isVariable(field.getTarget()); + } + + private static String prefixVariable(String variable) { + + var trimmed = variable.stripLeading(); + return trimmed.startsWith(PREFIX) ? trimmed : (PREFIX + trimmed); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 54f3c430b0..e2c31c6346 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,19 +17,33 @@ import java.util.Collections; import java.util.List; +import java.util.Locale; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Median; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnit; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. * * @author Christoph Strobl + * @author Mark Paluch + * @author Mushtaq Ahmed + * @author Julia Lee * @since 1.10 */ public class ArithmeticOperators { @@ -38,7 +52,7 @@ public class ArithmeticOperators { * Take the field referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArithmeticOperatorFactory}. */ public static ArithmeticOperatorFactory valueOf(String fieldReference) { return new ArithmeticOperatorFactory(fieldReference); @@ -48,12 +62,23 @@ public static ArithmeticOperatorFactory valueOf(String fieldReference) { * Take the value resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArithmeticOperatorFactory}. */ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression) { return new ArithmeticOperatorFactory(expression); } + /** + * Creates new {@link AggregationExpression} that returns a random float between {@code 0} and {@code 1} each time it + * is called. + * + * @return new instance of {@link Rand}. + * @since 3.3 + */ + public static Rand rand() { + return new Rand(); + } + /** * @author Christoph Strobl */ @@ -69,7 +94,7 @@ public static class ArithmeticOperatorFactory { */ public ArithmeticOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -81,7 +106,7 @@ public ArithmeticOperatorFactory(String fieldReference) { */ public ArithmeticOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -89,7 +114,7 @@ public ArithmeticOperatorFactory(AggregationExpression expression) { /** * Creates new {@link AggregationExpression} that returns the absolute value of the associated number. * - * @return + * @return new instance of {@link Abs}. */ public Abs abs() { return usesFieldRef() ? Abs.absoluteValueOf(fieldReference) : Abs.absoluteValueOf(expression); @@ -100,11 +125,11 @@ public Abs abs() { * number. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public Add add(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createAdd().add(fieldReference); } @@ -113,11 +138,11 @@ public Add add(String fieldReference) { * {@link AggregationExpression} to the associated number. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public Add add(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createAdd().add(expression); } @@ -125,11 +150,11 @@ public Add add(AggregationExpression expression) { * Creates new {@link AggregationExpression} that adds the given {@literal value} to the associated number. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public Add add(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createAdd().add(value); } @@ -139,24 +164,64 @@ private Add createAdd() { /** * Creates new {@link AggregationExpression} that returns the smallest integer greater than or equal to the - * assoicated number. + * associated number. * - * @return + * @return new instance of {@link Ceil}. */ public Ceil ceil() { return usesFieldRef() ? Ceil.ceilValueOf(fieldReference) : Ceil.ceilValueOf(expression); } /** - * Creates new {@link AggregationExpression} that ivides the associated number by number referenced via + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by number referenced via * {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public Divide divideBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createDivide().divideBy(fieldReference); } @@ -165,23 +230,23 @@ public Divide divideBy(String fieldReference) { * {@literal expression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public Divide divideBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createDivide().divideBy(expression); } /** * Creates new {@link AggregationExpression} that divides the associated number by given {@literal value}. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. */ public Divide divideBy(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createDivide().divideBy(value); } @@ -192,7 +257,7 @@ private Divide createDivide() { /** * Creates new {@link AggregationExpression} that raises Euler’s number (i.e. e ) on the associated number. * - * @return + * @return new instance of {@link Exp}. */ public Exp exp() { return usesFieldRef() ? Exp.expValueOf(fieldReference) : Exp.expValueOf(expression); @@ -202,17 +267,56 @@ public Exp exp() { * Creates new {@link AggregationExpression} that returns the largest integer less than or equal to the associated * number. * - * @return + * @return new instance of {@link Floor}. */ public Floor floor() { return usesFieldRef() ? Floor.floorValueOf(fieldReference) : Floor.floorValueOf(expression); } /** - * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the assoicated + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral() { + return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Integral integral(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return integral(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral(String unit) { + + Assert.hasText(unit, "Unit must not be empty"); + + return integral().unit(unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the associated * number. * - * @return + * @return new instance of {@link Ln}. */ public Ln ln() { return usesFieldRef() ? Ln.lnValueOf(fieldReference) : Ln.lnValueOf(expression); @@ -223,11 +327,11 @@ public Ln ln() { * referenced via {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public Log log(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createLog().log(fieldReference); } @@ -236,11 +340,11 @@ public Log log(String fieldReference) { * extracted by given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public Log log(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createLog().log(fieldReference); } @@ -249,11 +353,11 @@ public Log log(AggregationExpression expression) { * {@literal base}. * * @param base must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public Log log(Number base) { - Assert.notNull(base, "Base must not be null!"); + Assert.notNull(base, "Base must not be null"); return createLog().log(base); } @@ -264,7 +368,7 @@ private Log createLog() { /** * Creates new {@link AggregationExpression} that calculates the log base 10 for the associated number. * - * @return + * @return new instance of {@link Log10}. */ public Log10 log10() { return usesFieldRef() ? Log10.log10ValueOf(fieldReference) : Log10.log10ValueOf(expression); @@ -275,11 +379,11 @@ public Log10 log10() { * remainder. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public Mod mod(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createMod().mod(fieldReference); } @@ -288,11 +392,11 @@ public Mod mod(String fieldReference) { * remainder. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public Mod mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createMod().mod(expression); } @@ -301,11 +405,11 @@ public Mod mod(AggregationExpression expression) { * remainder. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public Mod mod(Number value) { - Assert.notNull(value, "Base must not be null!"); + Assert.notNull(value, "Base must not be null"); return createMod().mod(value); } @@ -317,11 +421,11 @@ private Mod createMod() { * Creates new {@link AggregationExpression} that multiplies the associated number with another. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public Multiply multiplyBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createMultiply().multiplyBy(fieldReference); } @@ -329,11 +433,11 @@ public Multiply multiplyBy(String fieldReference) { * Creates new {@link AggregationExpression} that multiplies the associated number with another. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public Multiply multiplyBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createMultiply().multiplyBy(expression); } @@ -341,11 +445,11 @@ public Multiply multiplyBy(AggregationExpression expression) { * Creates new {@link AggregationExpression} that multiplies the associated number with another. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public Multiply multiplyBy(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createMultiply().multiplyBy(value); } @@ -357,11 +461,11 @@ private Multiply createMultiply() { * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Pow}. */ public Pow pow(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createPow().pow(fieldReference); } @@ -369,11 +473,11 @@ public Pow pow(String fieldReference) { * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Pow}. */ public Pow pow(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createPow().pow(expression); } @@ -381,11 +485,11 @@ public Pow pow(AggregationExpression expression) { * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Pow}. */ public Pow pow(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createPow().pow(value); } @@ -396,7 +500,7 @@ private Pow createPow() { /** * Creates new {@link AggregationExpression} that calculates the square root of the associated number. * - * @return + * @return new instance of {@link Sqrt}. */ public Sqrt sqrt() { return usesFieldRef() ? Sqrt.sqrtOf(fieldReference) : Sqrt.sqrtOf(expression); @@ -406,11 +510,11 @@ public Sqrt sqrt() { * Creates new {@link AggregationExpression} that subtracts value of given from the associated number. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public Subtract subtract(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createSubtract().subtract(fieldReference); } @@ -418,23 +522,23 @@ public Subtract subtract(String fieldReference) { * Creates new {@link AggregationExpression} that subtracts value of given from the associated number. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public Subtract subtract(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createSubtract().subtract(expression); } /** * Creates new {@link AggregationExpression} that subtracts value from the associated number. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return new instance of {@link Subtract}. */ public Subtract subtract(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createSubtract().subtract(value); } @@ -445,7 +549,7 @@ private Subtract createSubtract() { /** * Creates new {@link AggregationExpression} that truncates a number to its integer. * - * @return + * @return new instance of {@link Trunc}. */ public Trunc trunc() { return usesFieldRef() ? Trunc.truncValueOf(fieldReference) : Trunc.truncValueOf(expression); @@ -454,7 +558,7 @@ public Trunc trunc() { /** * Creates new {@link AggregationExpression} that calculates and returns the sum of numeric values. * - * @return + * @return new instance of {@link Sum}. */ public Sum sum() { return usesFieldRef() ? AccumulatorOperators.Sum.sumOf(fieldReference) @@ -464,7 +568,7 @@ public Sum sum() { /** * Creates new {@link AggregationExpression} that returns the average value of the numeric values. * - * @return + * @return new instance of {@link Avg}. */ public Avg avg() { return usesFieldRef() ? AccumulatorOperators.Avg.avgOf(fieldReference) @@ -474,7 +578,7 @@ public Avg avg() { /** * Creates new {@link AggregationExpression} that returns the maximum value. * - * @return + * @return new instance of {@link Max}. */ public Max max() { return usesFieldRef() ? AccumulatorOperators.Max.maxOf(fieldReference) @@ -484,7 +588,7 @@ public Max max() { /** * Creates new {@link AggregationExpression} that returns the minimum value. * - * @return + * @return new instance of {@link Min}. */ public Min min() { return usesFieldRef() ? AccumulatorOperators.Min.minOf(fieldReference) @@ -494,7 +598,7 @@ public Min min() { /** * Creates new {@link AggregationExpression} that calculates the population standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevPop}. */ public StdDevPop stdDevPop() { return usesFieldRef() ? AccumulatorOperators.StdDevPop.stdDevPopOf(fieldReference) @@ -504,13 +608,359 @@ public StdDevPop stdDevPop() { /** * Creates new {@link AggregationExpression} that calculates the sample standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevSamp}. */ public StdDevSamp stdDevSamp() { return usesFieldRef() ? AccumulatorOperators.StdDevSamp.stdDevSampOf(fieldReference) : AccumulatorOperators.StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal + * place. + * + * @return new instance of {@link Round}. + * @since 3.0 + */ + public Round round() { + return usesFieldRef() ? Round.roundValueOf(fieldReference) : Round.roundValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that rounds a number to a specified decimal place. + * + * @return new instance of {@link Round}. + * @since 3.0 + */ + public Round roundToPlace(int place) { + return round().place(place); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin() { + return sin(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin(AngularUnit unit) { + return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh() { + return sinh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh(AngularUnit unit) { + return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. + * + * @return new instance of {@link ASin}. + * @since 3.3 + */ + public ASin asin() { + return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. + * + * @return new instance of {@link ASinh}. + * @since 3.3 + */ + public ASinh asinh() { + return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos() { + return cos(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos(AngularUnit unit) { + return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh() { + return cosh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh(AngularUnit unit) { + return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse cosine of a numeric value. + * + * @return new instance of {@link ACos}. + * @since 3.4 + */ + public ACos acos() { + return usesFieldRef() ? ACos.acosOf(fieldReference) : ACos.acosOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a numeric value. + * + * @return new instance of {@link ACosh}. + * @since 3.4 + */ + public ACosh acosh() { + return usesFieldRef() ? ACosh.acoshOf(fieldReference) : ACosh.acoshOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan() { + return tan(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. + * + * @return new instance of {@link ATan}. + * @since 3.3 + */ + public ATan atan() { + return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given numeric value in the argument. + * + * @param value the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createATan2().atan2of(value); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given field reference in the argument. + * + * @param fieldReference the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createATan2().atan2of(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given {@link AggregationExpression} in the argument. + * + * @param expression the expression evaluating to a numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createATan2().atan2of(expression); + } + + private ATan2 createATan2() { + + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. + * + * @return new instance of {@link ATanh}. + * @since 3.3 + */ + public ATanh atanh() { + return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan(AngularUnit unit) { + return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tanh tanh() { + return tanh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Tanh}. + * @since 3.3 + */ + public Tanh tanh(AngularUnit unit) { + return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * numeric value. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? AccumulatorOperators.Percentile.percentileOf(fieldReference) + : AccumulatorOperators.Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the + * numeric value. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + public Median median() { + return usesFieldRef() ? AccumulatorOperators.Median.medianOf(fieldReference) + : AccumulatorOperators.Median.medianOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -536,11 +986,11 @@ protected String getMongoMethod() { * Creates new {@link Abs}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Abs}. */ public static Abs absoluteValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Abs(Fields.field(fieldReference)); } @@ -548,11 +998,11 @@ public static Abs absoluteValueOf(String fieldReference) { * Creates new {@link Abs}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Abs}. */ public static Abs absoluteValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Abs(expression); } @@ -560,11 +1010,11 @@ public static Abs absoluteValueOf(AggregationExpression expression) { * Creates new {@link Abs}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Abs}. */ public static Abs absoluteValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Abs(value); } } @@ -589,11 +1039,11 @@ protected String getMongoMethod() { * Creates new {@link Add}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public static Add valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Add(asFields(fieldReference)); } @@ -601,11 +1051,11 @@ public static Add valueOf(String fieldReference) { * Creates new {@link Add}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public static Add valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Add(Collections.singletonList(expression)); } @@ -613,26 +1063,44 @@ public static Add valueOf(AggregationExpression expression) { * Creates new {@link Add}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public static Add valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Add(Collections.singletonList(value)); } + /** + * Add the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Add}. + */ public Add add(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Add(append(Fields.field(fieldReference))); } + /** + * Add the evaluation result of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Add}. + */ public Add add(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Add(append(expression)); } + /** + * Add the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Add}. + */ public Add add(Number value) { return new Add(append(value)); } @@ -658,11 +1126,11 @@ protected String getMongoMethod() { * Creates new {@link Ceil}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ceil}. */ public static Ceil ceilValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ceil(Fields.field(fieldReference)); } @@ -670,11 +1138,11 @@ public static Ceil ceilValueOf(String fieldReference) { * Creates new {@link Ceil}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ceil}. */ public static Ceil ceilValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ceil(expression); } @@ -682,11 +1150,11 @@ public static Ceil ceilValueOf(AggregationExpression expression) { * Creates new {@link Ceil}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ceil}. */ public static Ceil ceilValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ceil(value); } } @@ -711,11 +1179,11 @@ protected String getMongoMethod() { * Creates new {@link Divide}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public static Divide valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Divide(asFields(fieldReference)); } @@ -723,11 +1191,11 @@ public static Divide valueOf(String fieldReference) { * Creates new {@link Divide}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public static Divide valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Divide(Collections.singletonList(expression)); } @@ -735,26 +1203,44 @@ public static Divide valueOf(AggregationExpression expression) { * Creates new {@link Divide}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public static Divide valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Divide(Collections.singletonList(value)); } + /** + * Divide by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Divide}. + */ public Divide divideBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Divide(append(Fields.field(fieldReference))); } + /** + * Divide by the evaluation results of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Divide}. + */ public Divide divideBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Divide(append(expression)); } + /** + * Divide by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. + */ public Divide divideBy(Number value) { return new Divide(append(value)); } @@ -780,11 +1266,11 @@ protected String getMongoMethod() { * Creates new {@link Exp}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Exp}. */ public static Exp expValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Exp(Fields.field(fieldReference)); } @@ -792,11 +1278,11 @@ public static Exp expValueOf(String fieldReference) { * Creates new {@link Exp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Exp}. */ public static Exp expValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Exp(expression); } @@ -804,11 +1290,11 @@ public static Exp expValueOf(AggregationExpression expression) { * Creates new {@link Exp}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Exp}. */ public static Exp expValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Exp(value); } } @@ -833,11 +1319,11 @@ protected String getMongoMethod() { * Creates new {@link Floor}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Floor}. */ public static Floor floorValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Floor(Fields.field(fieldReference)); } @@ -845,11 +1331,11 @@ public static Floor floorValueOf(String fieldReference) { * Creates new {@link Floor}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Floor}. */ public static Floor floorValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Floor(expression); } @@ -857,11 +1343,11 @@ public static Floor floorValueOf(AggregationExpression expression) { * Creates new {@link Floor}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Floor}. */ public static Floor floorValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Floor(value); } } @@ -886,11 +1372,11 @@ protected String getMongoMethod() { * Creates new {@link Ln}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ln}. */ public static Ln lnValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ln(Fields.field(fieldReference)); } @@ -898,11 +1384,11 @@ public static Ln lnValueOf(String fieldReference) { * Creates new {@link Ln}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ln}. */ public static Ln lnValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ln(expression); } @@ -910,11 +1396,11 @@ public static Ln lnValueOf(AggregationExpression expression) { * Creates new {@link Ln}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ln}. */ public static Ln lnValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ln(value); } } @@ -939,11 +1425,11 @@ protected String getMongoMethod() { * Creates new {@link Min}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public static Log valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log(asFields(fieldReference)); } @@ -951,11 +1437,11 @@ public static Log valueOf(String fieldReference) { * Creates new {@link Log}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public static Log valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log(Collections.singletonList(expression)); } @@ -967,22 +1453,40 @@ public static Log valueOf(AggregationExpression expression) { */ public static Log valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Log(Collections.singletonList(value)); } + /** + * Use the value stored at the given field as log base. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Log}. + */ public Log log(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log(append(Fields.field(fieldReference))); } + /** + * Use the evaluated value of the given {@link AggregationExpression} as log base. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Log}. + */ public Log log(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log(append(expression)); } + /** + * Use the given value as log base. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Log}. + */ public Log log(Number base) { return new Log(append(base)); } @@ -1008,11 +1512,11 @@ protected String getMongoMethod() { * Creates new {@link Log10}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Log10}. */ public static Log10 log10ValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log10(Fields.field(fieldReference)); } @@ -1020,11 +1524,11 @@ public static Log10 log10ValueOf(String fieldReference) { * Creates new {@link Log10}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Log10}. */ public static Log10 log10ValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log10(expression); } @@ -1032,11 +1536,11 @@ public static Log10 log10ValueOf(AggregationExpression expression) { * Creates new {@link Log10}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Log10}. */ public static Log10 log10ValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Log10(value); } } @@ -1061,11 +1565,11 @@ protected String getMongoMethod() { * Creates new {@link Mod}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public static Mod valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Mod(asFields(fieldReference)); } @@ -1073,11 +1577,11 @@ public static Mod valueOf(String fieldReference) { * Creates new {@link Mod}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public static Mod valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Mod(Collections.singletonList(expression)); } @@ -1085,26 +1589,44 @@ public static Mod valueOf(AggregationExpression expression) { * Creates new {@link Mod}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public static Mod valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Mod(Collections.singletonList(value)); } + /** + * Use the value stored at the given field as mod base. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Mod}. + */ public Mod mod(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Mod(append(Fields.field(fieldReference))); } + /** + * Use evaluated value of the given {@link AggregationExpression} as mod base. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Mod}. + */ public Mod mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Mod(append(expression)); } + /** + * Use the given value as mod base. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Mod}. + */ public Mod mod(Number base) { return new Mod(append(base)); } @@ -1130,11 +1652,11 @@ protected String getMongoMethod() { * Creates new {@link Multiply}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public static Multiply valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Multiply(asFields(fieldReference)); } @@ -1142,11 +1664,11 @@ public static Multiply valueOf(String fieldReference) { * Creates new {@link Multiply}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public static Multiply valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Multiply(Collections.singletonList(expression)); } @@ -1154,26 +1676,44 @@ public static Multiply valueOf(AggregationExpression expression) { * Creates new {@link Multiply}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public static Multiply valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Multiply(Collections.singletonList(value)); } + /** + * Multiply by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ public Multiply multiplyBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Multiply(append(Fields.field(fieldReference))); } + /** + * Multiply by the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ public Multiply multiplyBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Multiply(append(expression)); } + /** + * Multiply by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ public Multiply multiplyBy(Number value) { return new Multiply(append(value)); } @@ -1203,7 +1743,7 @@ protected String getMongoMethod() { */ public static Pow valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Pow(asFields(fieldReference)); } @@ -1215,7 +1755,7 @@ public static Pow valueOf(String fieldReference) { */ public static Pow valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Pow(Collections.singletonList(expression)); } @@ -1227,22 +1767,40 @@ public static Pow valueOf(AggregationExpression expression) { */ public static Pow valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Pow(Collections.singletonList(value)); } + /** + * Pow by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ public Pow pow(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Pow(append(Fields.field(fieldReference))); } + /** + * Pow by the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ public Pow pow(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Pow(append(expression)); } + /** + * Pow by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ public Pow pow(Number value) { return new Pow(append(value)); } @@ -1268,11 +1826,11 @@ protected String getMongoMethod() { * Creates new {@link Sqrt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Sqrt}. */ public static Sqrt sqrtOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sqrt(Fields.field(fieldReference)); } @@ -1280,11 +1838,11 @@ public static Sqrt sqrtOf(String fieldReference) { * Creates new {@link Sqrt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Sqrt}. */ public static Sqrt sqrtOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sqrt(expression); } @@ -1292,11 +1850,11 @@ public static Sqrt sqrtOf(AggregationExpression expression) { * Creates new {@link Sqrt}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Sqrt}. */ public static Sqrt sqrtOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Sqrt(value); } } @@ -1321,11 +1879,11 @@ protected String getMongoMethod() { * Creates new {@link Subtract}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public static Subtract valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Subtract(asFields(fieldReference)); } @@ -1333,11 +1891,11 @@ public static Subtract valueOf(String fieldReference) { * Creates new {@link Subtract}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public static Subtract valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Subtract(Collections.singletonList(expression)); } @@ -1345,26 +1903,44 @@ public static Subtract valueOf(AggregationExpression expression) { * Creates new {@link Subtract}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public static Subtract valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Subtract(Collections.singletonList(value)); } + /** + * Subtract the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ public Subtract subtract(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Subtract(append(Fields.field(fieldReference))); } + /** + * Subtract the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ public Subtract subtract(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Subtract(append(expression)); } + /** + * Subtract the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ public Subtract subtract(Number value) { return new Subtract(append(value)); } @@ -1390,11 +1966,11 @@ protected String getMongoMethod() { * Creates new {@link Trunc}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Trunc}. */ public static Trunc truncValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Trunc(Fields.field(fieldReference)); } @@ -1402,11 +1978,11 @@ public static Trunc truncValueOf(String fieldReference) { * Creates new {@link Trunc}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Trunc}. */ public static Trunc truncValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Trunc(expression); } @@ -1414,12 +1990,1232 @@ public static Trunc truncValueOf(AggregationExpression expression) { * Creates new {@link Trunc}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Trunc}. */ public static Trunc truncValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Trunc(value); } } + + /** + * {@link Round} rounds a number to a whole integer or to a specified decimal place. + *

            + *
          • If {@link Round#place(int)} resolves to a positive integer, {@code $round} rounds to the given decimal + * places.
          • + *
          • If {@link Round#place(int)} resolves to a negative integer, {@code $round} rounds to the left of the + * decimal.
          • + *
          • If {@link Round#place(int)} resolves to a zero, {@code $round} rounds using the first digit to the right of the + * decimal.
          • + *
          + * + * @since 3.0 + */ + public static class Round extends AbstractAggregationExpression { + + private Round(Object value) { + super(value); + } + + /** + * Round the value of the field that resolves to an integer, double, decimal, or long. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round roundValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Round(Collections.singletonList(Fields.field(fieldReference))); + } + + /** + * Round the outcome of the given expression hat resolves to an integer, double, decimal, or long. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round roundValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Round(Collections.singletonList(expression)); + } + + /** + * Round the given numeric (integer, double, decimal, or long) value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round round(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Round(Collections.singletonList(value)); + } + + /** + * The place to round to. Can be between -20 and 100, exclusive. + * + * @param place value between -20 and 100, exclusive. + * @return new instance of {@link Round}. + */ + public Round place(int place) { + return new Round(append(place)); + } + + /** + * The place to round to defined by an expression that resolves to an integer between -20 and 100, exclusive. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public Round placeOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Round(append(expression)); + } + + /** + * The place to round to defined by via a field reference that resolves to an integer between -20 and 100, + * exclusive. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public Round placeOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + return new Round(append(Fields.field(fieldReference))); + } + + @Override + protected String getMongoMethod() { + return "$round"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the average rate of change + * within the specified window. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Derivative extends AbstractAggregationExpression { + + private Derivative(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Derivative} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(String fieldReference) { + return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Derivative} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(AggregationExpression expression) { + return new Derivative(Collections.singletonMap("input", expression)); + } + + public static Derivative derivativeOfValue(Number value) { + return new Derivative(Collections.singletonMap("input", value)); + } + + public Derivative unit(String unit) { + return new Derivative(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$derivative"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the approximation for the + * mathematical integral value. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Integral extends AbstractAggregationExpression { + + private Integral(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Integral} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(String fieldReference) { + return new Integral(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Integral} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(AggregationExpression expression) { + return new Integral(Collections.singletonMap("input", expression)); + } + + /** + * Set the unit of measure. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + */ + public Integral unit(String unit) { + return new Integral(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$integral"; + } + } + + /** + * The unit of measure for computations that operate upon angles. + * + * @author Christoph Strobl + * @since 3.3 + */ + public enum AngularUnit { + RADIANS, DEGREES + } + + /** + * An {@link AggregationExpression expression} that calculates the sine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sin extends AbstractAggregationExpression { + + private Sin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $sinh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference) { + return sinOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference, AngularUnit unit) { + return sin(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression) { + return sinOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression, AngularUnit unit) { + return sin(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value) { + return sin(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sin(value); + } + + @Override + protected String getMongoMethod() { + return "$sin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sinh extends AbstractAggregationExpression { + + private Sinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference) { + return sinhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + *
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $sinh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference, AngularUnit unit) { + return sinh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression) { + return sinhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression, AngularUnit unit) { + return sinh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value) { + return sinh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sinh(value); + } + + @Override + protected String getMongoMethod() { + return "$sinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse sine of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASin extends AbstractAggregationExpression { + + private ASin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ASin(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + *
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(AggregationExpression expression) { + return new ASin(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(Number value) { + return new ASin(value); + } + + @Override + protected String getMongoMethod() { + return "$asin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASinh extends AbstractAggregationExpression { + + private ASinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(String fieldReference) { + return new ASinh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + *
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(AggregationExpression expression) { + return new ASinh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(Object value) { + return new ASinh(value); + } + + @Override + protected String getMongoMethod() { + return "$asinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cos extends AbstractAggregationExpression { + + private Cos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
          + * Use {@code cosOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $cos : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference) { + return cosOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference, AngularUnit unit) { + return cos(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression) { + return cosOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression, AngularUnit unit) { + return cos(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value) { + return cos(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cos(value); + } + + @Override + protected String getMongoMethod() { + return "$cos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cosh extends AbstractAggregationExpression { + + private Cosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference) { + return coshOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + *
          + * Use {@code coshOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $cosh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference, AngularUnit unit) { + return cosh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression) { + return coshOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression, AngularUnit unit) { + return cosh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value) { + return cosh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cosh(value); + } + + @Override + protected String getMongoMethod() { + return "$cosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACos extends AbstractAggregationExpression { + + private ACos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ACos(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + *
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(AggregationExpression expression) { + return new ACos(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(Number value) { + return new ACos(value); + } + + @Override + protected String getMongoMethod() { + return "$acos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACosh extends AbstractAggregationExpression { + + private ACosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(String fieldReference) { + return new ACosh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + *
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(AggregationExpression expression) { + return new ACosh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(Object value) { + return new ACosh(value); + } + + @Override + protected String getMongoMethod() { + return "$acosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tan extends AbstractAggregationExpression { + + private Tan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS radians}. + *
          + * Use {@code tanOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $tan : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference) { + return tanOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference, AngularUnit unit) { + return tan(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression) { + return tanOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression, AngularUnit unit) { + return tan(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value) { + return tan(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tan(value); + } + + @Override + protected String getMongoMethod() { + return "$tan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan extends AbstractAggregationExpression { + + private ATan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(AggregationExpression expression) { + return new ATan(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(Number value) { + return new ATan(value); + } + + @Override + protected String getMongoMethod() { + return "$atan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of y / x, where y and x are the + * first and second values passed to the expression respectively. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan2 extends AbstractAggregationExpression { + + private ATan2(List value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(asFields(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2((Collections.singletonList(expression))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(append(Fields.field(fieldReference))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2(append(expression)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param value of type {@link Number} + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(Number value) { + return new ATan2(append(value)); + } + + @Override + protected String getMongoMethod() { + return "$atan2"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tanh extends AbstractAggregationExpression { + + private Tanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference) { + return tanhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + *
          + * Use {@code tanhOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $tanh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference, AngularUnit unit) { + return tanh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + *
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression) { + return tanhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression, AngularUnit unit) { + return tanh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value) { + return tanh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tanh(value); + } + + @Override + protected String getMongoMethod() { + return "$tanh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic tangent of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATanh extends AbstractAggregationExpression { + + private ATanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(String fieldReference) { + return new ATanh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. + *
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(AggregationExpression expression) { + return new ATanh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(Object value) { + return new ATanh(value); + } + + @Override + protected String getMongoMethod() { + return "$atanh"; + } + } + + /** + * {@link Rand} returns a floating value between 0 and 1. + * + * @author Mushtaq Ahmed + * @since 3.3 + */ + public static class Rand implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rand", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java index 81adc4035c..a8cb58d17c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,11 +17,14 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import org.bson.Document; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.PropertyExpression; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; @@ -33,6 +36,8 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Shashank Sharma + * @author Divya Srivastava * @since 1.0 */ public class ArrayOperators { @@ -41,7 +46,7 @@ public class ArrayOperators { * Take the array referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArrayOperatorFactory}. */ public static ArrayOperatorFactory arrayOf(String fieldReference) { return new ArrayOperatorFactory(fieldReference); @@ -51,19 +56,31 @@ public static ArrayOperatorFactory arrayOf(String fieldReference) { * Take the array referenced resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArrayOperatorFactory}. */ public static ArrayOperatorFactory arrayOf(AggregationExpression expression) { return new ArrayOperatorFactory(expression); } + /** + * Take the given {@link Collection values} {@link AggregationExpression}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ArrayOperatorFactory}. + * @since 2.2 + */ + public static ArrayOperatorFactory arrayOf(Collection values) { + return new ArrayOperatorFactory(values); + } + /** * @author Christoph Strobl */ public static class ArrayOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; + private final @Nullable Collection values; /** * Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}. @@ -72,9 +89,10 @@ public static class ArrayOperatorFactory { */ public ArrayOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; + this.values = null; } /** @@ -84,17 +102,32 @@ public ArrayOperatorFactory(String fieldReference) { */ public ArrayOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; + this.values = null; + } + + /** + * Creates new {@link ArrayOperatorFactory} for given values. + * + * @param values must not be {@literal null}. + * @since 2.2 + */ + public ArrayOperatorFactory(Collection values) { + + Assert.notNull(values, "Values must not be null"); + this.fieldReference = null; + this.expression = null; + this.values = values; } /** * Creates new {@link AggregationExpression} that takes the associated array and returns the element at the * specified array {@literal position}. * - * @param position - * @return + * @param position the element index. + * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(int position) { return createArrayElemAt().elementAt(position); @@ -105,11 +138,11 @@ public ArrayElemAt elementAt(int position) { * resulting form the given {@literal expression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createArrayElemAt().elementAt(expression); } @@ -118,16 +151,21 @@ public ArrayElemAt elementAt(AggregationExpression expression) { * defined by the referenced {@literal field}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createArrayElemAt().elementAt(fieldReference); } private ArrayElemAt createArrayElemAt() { - return usesFieldRef() ? ArrayElemAt.arrayOf(fieldReference) : ArrayElemAt.arrayOf(expression); + + if (usesFieldRef()) { + return ArrayElemAt.arrayOf(fieldReference); + } + + return usesExpression() ? ArrayElemAt.arrayOf(expression) : ArrayElemAt.arrayOf(values); } /** @@ -135,11 +173,11 @@ private ArrayElemAt createArrayElemAt() { * {@literal arrayFieldReference} to it. * * @param arrayFieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public ConcatArrays concat(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); return createConcatArrays().concat(arrayFieldReference); } @@ -148,53 +186,81 @@ public ConcatArrays concat(String arrayFieldReference) { * the given {@literal expression} to it. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public ConcatArrays concat(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createConcatArrays().concat(expression); } private ConcatArrays createConcatArrays() { - return usesFieldRef() ? ConcatArrays.arrayOf(fieldReference) : ConcatArrays.arrayOf(expression); + + if (usesFieldRef()) { + return ConcatArrays.arrayOf(fieldReference); + } + + return usesExpression() ? ConcatArrays.arrayOf(expression) : ConcatArrays.arrayOf(values); } /** * Creates new {@link AggregationExpression} that takes the associated array and selects a subset of the array to * return based on the specified condition. * - * @return + * @return new instance of {@link AsBuilder} to create a {@link Filter}. */ public AsBuilder filter() { - return Filter.filter(fieldReference); + + if (usesFieldRef()) { + return Filter.filter(fieldReference); + } + + if (usesExpression()) { + return Filter.filter(expression); + } + + Assert.state(values != null, "Values must not be null"); + return Filter.filter(new ArrayList<>(values)); } /** * Creates new {@link AggregationExpression} that takes the associated array and an check if its an array. * - * @return + * @return new instance of {@link IsArray}. */ public IsArray isArray() { + + Assert.state(values == null, "Does it make sense to call isArray on an array; Maybe just skip it"); + return usesFieldRef() ? IsArray.isArray(fieldReference) : IsArray.isArray(expression); } /** * Creates new {@link AggregationExpression} that takes the associated array and retrieves its length. * - * @return + * @return new instance of {@link Size}. */ public Size length() { - return usesFieldRef() ? Size.lengthOfArray(fieldReference) : Size.lengthOfArray(expression); + + if (usesFieldRef()) { + return Size.lengthOfArray(fieldReference); + } + + return usesExpression() ? Size.lengthOfArray(expression) : Size.lengthOfArray(values); } /** * Creates new {@link AggregationExpression} that takes the associated array and selects a subset from it. * - * @return + * @return new instance of {@link Slice}. */ public Slice slice() { - return usesFieldRef() ? Slice.sliceArrayOf(fieldReference) : Slice.sliceArrayOf(expression); + + if (usesFieldRef()) { + return Slice.sliceArrayOf(fieldReference); + } + + return usesExpression() ? Slice.sliceArrayOf(expression) : Slice.sliceArrayOf(values); } /** @@ -202,20 +268,31 @@ public Slice slice() { * value and returns the array index (zero-based) of the first occurrence. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public IndexOfArray indexOf(Object value) { - return usesFieldRef() ? IndexOfArray.arrayOf(fieldReference).indexOf(value) - : IndexOfArray.arrayOf(expression).indexOf(value); + + if (usesFieldRef()) { + return IndexOfArray.arrayOf(fieldReference).indexOf(value); + } + + return usesExpression() ? IndexOfArray.arrayOf(expression).indexOf(value) + : IndexOfArray.arrayOf(values).indexOf(value); } /** * Creates new {@link AggregationExpression} that returns an array with the elements in reverse order. * - * @return + * @return new instance of {@link ReverseArray}. */ public ReverseArray reverse() { - return usesFieldRef() ? ReverseArray.reverseArrayOf(fieldReference) : ReverseArray.reverseArrayOf(expression); + + if (usesFieldRef()) { + return ReverseArray.reverseArrayOf(fieldReference); + } + + return usesExpression() ? ReverseArray.reverseArrayOf(expression) + : ReverseArray.reverseArrayOf(Collections.singletonList(values)); } /** @@ -223,7 +300,7 @@ public ReverseArray reverse() { * an array and combines them into a single value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ReduceInitialValueBuilder} to create {@link Reduce}. */ public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(AggregationExpression expression) { @@ -235,8 +312,8 @@ public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(AggregationExpressi * Start creating new {@link AggregationExpression} that applies an {@link AggregationExpression} to each element in * an array and combines them into a single value. * - * @param expressions - * @return + * @param expressions must not be {@literal null}. + * @return new instance of {@link ReduceInitialValueBuilder} to create {@link Reduce}. */ public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(PropertyExpression... expressions) { @@ -244,16 +321,53 @@ public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(PropertyExpression. .withInitialValue(initialValue).reduce(expressions); } + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort + * order}. + * + * @return new instance of {@link SortArray}. + * @since 4.0 + */ + public SortArray sort(Sort sort) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).by(sort); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).by(sort); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given + * {@link Direction order}. + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray sort(Direction direction) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).direction(direction); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).direction(direction); + } + /** * Creates new {@link AggregationExpression} that transposes an array of input arrays so that the first element of * the output array would be an array containing, the first element of the first input array, the first element of * the second input array, etc. * * @param arrays must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ public Zip zipWith(Object... arrays) { - return (usesFieldRef() ? Zip.arrayOf(fieldReference) : Zip.arrayOf(expression)).zip(arrays); + + if (usesFieldRef()) { + return Zip.arrayOf(fieldReference).zip(arrays); + } + + return (usesExpression() ? Zip.arrayOf(expression) : Zip.arrayOf(values)).zip(arrays); } /** @@ -261,10 +375,63 @@ public Zip zipWith(Object... arrays) { * associated array. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link In}. */ public In containsValue(Object value) { - return (usesFieldRef() ? In.arrayOf(fieldReference) : In.arrayOf(expression)).containsValue(value); + + if (usesFieldRef()) { + return In.arrayOf(fieldReference).containsValue(value); + } + + return (usesExpression() ? In.arrayOf(expression) : In.arrayOf(values)).containsValue(value); + } + + /** + * Creates new {@link AggregationExpression} that converts the associated expression into an object. + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link ArrayToObject}. + * @since 2.1 + */ + public ArrayToObject toObject() { + + if (usesFieldRef()) { + return ArrayToObject.arrayValueOfToObject(fieldReference); + } + + return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values); + } + + /** + * Creates new {@link AggregationExpression} that return the first element in the associated array. + * NOTE: Requires MongoDB 4.4 or later. + * + * @return new instance of {@link First}. + * @since 3.4 + */ + public First first() { + + if (usesFieldRef()) { + return First.firstOf(fieldReference); + } + + return usesExpression() ? First.firstOf(expression) : First.first(values); + } + + /** + * Creates new {@link AggregationExpression} that return the last element in the given array. NOTE: + * Requires MongoDB 4.4 or later. + * + * @return new instance of {@link Last}. + * @since 3.4 + */ + public Last last() { + + if (usesFieldRef()) { + return Last.lastOf(fieldReference); + } + + return usesExpression() ? Last.lastOf(expression) : Last.last(values); } /** @@ -281,9 +448,20 @@ public interface ReduceInitialValueBuilder { Reduce startingWith(Object initialValue); } + /** + * @return {@literal true} if {@link #fieldReference} is not {@literal null}. + */ private boolean usesFieldRef() { return fieldReference != null; } + + /** + * @return {@literal true} if {@link #expression} is not {@literal null}. + * @since 2.2 + */ + private boolean usesExpression() { + return expression != null; + } } /** @@ -306,11 +484,11 @@ protected String getMongoMethod() { * Creates new {@link ArrayElemAt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public static ArrayElemAt arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ArrayElemAt(asFields(fieldReference)); } @@ -318,27 +496,58 @@ public static ArrayElemAt arrayOf(String fieldReference) { * Creates new {@link ArrayElemAt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public static ArrayElemAt arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ArrayElemAt(Collections.singletonList(expression)); } + /** + * Creates new {@link ArrayElemAt}. + * + * @param values The array members. Must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + * @since 2.2 + */ + public static ArrayElemAt arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new ArrayElemAt(Collections.singletonList(values)); + } + + /** + * Use the element with given index number. + * + * @param index the index number + * @return new instance of {@link ArrayElemAt}. + */ public ArrayElemAt elementAt(int index) { return new ArrayElemAt(append(index)); } + /** + * Use the element at the index number evaluated from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ public ArrayElemAt elementAt(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ArrayElemAt(append(expression)); } + /** + * Use the element at the index number taken from the given field. + * + * @param arrayFieldReference the field name. + * @return new instance of {@link ArrayElemAt}. + */ public ArrayElemAt elementAt(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayReference must not be null"); return new ArrayElemAt(append(Fields.field(arrayFieldReference))); } } @@ -363,11 +572,11 @@ protected String getMongoMethod() { * Creates new {@link ConcatArrays}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public static ConcatArrays arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ConcatArrays(asFields(fieldReference)); } @@ -375,23 +584,48 @@ public static ConcatArrays arrayOf(String fieldReference) { * Creates new {@link ConcatArrays}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public static ConcatArrays arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ConcatArrays(Collections.singletonList(expression)); } + /** + * Creates new {@link ConcatArrays}. + * + * @param values The array members. Must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + * @since 2.2 + */ + public static ConcatArrays arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new ConcatArrays(Collections.singletonList(values)); + } + + /** + * Concat with the array stored at the given field. + * + * @param arrayFieldReference must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ public ConcatArrays concat(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); return new ConcatArrays(append(Fields.field(arrayFieldReference))); } + /** + * Concat with the array resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ public ConcatArrays concat(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ConcatArrays(append(expression)); } } @@ -421,7 +655,7 @@ private Filter() { */ public static AsBuilder filter(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return filter(Fields.field(field)); } @@ -433,26 +667,35 @@ public static AsBuilder filter(String field) { */ public static AsBuilder filter(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new FilterExpressionBuilder().filter(field); } + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + public static AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Field must not be null"); + return new FilterExpressionBuilder().filter(expression); + } + /** * Set the {@literal values} to apply the {@code $filter} to. * * @param values must not be {@literal null}. - * @return + * @return new instance of {@link AsBuilder} to create the {@link Filter}. */ public static AsBuilder filter(List values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new FilterExpressionBuilder().filter(values); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(final AggregationOperationContext context) { return toFilter(ExposedFields.from(as), context); @@ -461,8 +704,7 @@ public Document toDocument(final AggregationOperationContext context) { private Document toFilter(ExposedFields exposedFields, AggregationOperationContext context) { Document filterExpression = new Document(); - InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext( - exposedFields, context); + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); filterExpression.putAll(context.getMappedObject(new Document("input", getMappedInput(context)))); filterExpression.put("as", as.getTarget()); @@ -473,18 +715,27 @@ private Document toFilter(ExposedFields exposedFields, AggregationOperationConte } private Object getMappedInput(AggregationOperationContext context) { - return input instanceof Field ? context.getReference((Field) input).toString() : input; + + if (input instanceof Field field) { + return context.getReference(field).toString(); + } + + if (input instanceof AggregationExpression expression) { + return expression.toDocument(context); + } + + return input; } private Object getMappedCondition(AggregationOperationContext context) { - if (!(condition instanceof AggregationExpression)) { + if (!(condition instanceof AggregationExpression aggregationExpression)) { return condition; } NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext( - context); - return ((AggregationExpression) condition).toDocument(nea); + context, Collections.singleton(as)); + return aggregationExpression.toDocument(nea); } /** @@ -507,6 +758,15 @@ public interface InputBuilder { * @return */ AsBuilder filter(Field field); + + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return + * @since 4.1.1 + */ + AsBuilder filter(AggregationExpression expression); } /** @@ -518,7 +778,7 @@ public interface AsBuilder { * Set the {@literal variableName} for the elements in the input array. * * @param variableName must not be {@literal null}. - * @return + * @return never {@literal null}. */ ConditionBuilder as(String variableName); } @@ -532,7 +792,7 @@ public interface ConditionBuilder { * Set the {@link AggregationExpression} that determines whether to include the element in the resulting array. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ Filter by(AggregationExpression expression); @@ -540,7 +800,7 @@ public interface ConditionBuilder { * Set the {@literal expression} that determines whether to include the element in the resulting array. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ Filter by(String expression); @@ -548,7 +808,7 @@ public interface ConditionBuilder { * Set the {@literal expression} that determines whether to include the element in the resulting array. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ Filter by(Document expression); } @@ -567,80 +827,64 @@ static final class FilterExpressionBuilder implements InputBuilder, AsBuilder, C /** * Creates new {@link InputBuilder}. * - * @return + * @return new instance of {@link FilterExpressionBuilder}. */ public static InputBuilder newBuilder() { return new FilterExpressionBuilder(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.InputBuilder#filter(java.util.List) - */ @Override public AsBuilder filter(List array) { - Assert.notNull(array, "Array must not be null!"); - filter.input = new ArrayList(array); + Assert.notNull(array, "Array must not be null"); + filter.input = new ArrayList<>(array); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.InputBuilder#filter(org.springframework.data.mongodb.core.aggregation.Field) - */ @Override public AsBuilder filter(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); filter.input = field; return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder#as(java.lang.String) - */ + @Override + public AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + filter.input = expression; + return this; + } + @Override public ConditionBuilder as(String variableName) { - Assert.notNull(variableName, "Variable name must not be null!"); + Assert.notNull(variableName, "Variable name must not be null"); filter.as = new ExposedField(variableName, true); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public Filter by(AggregationExpression condition) { - Assert.notNull(condition, "Condition must not be null!"); + Assert.notNull(condition, "Condition must not be null"); filter.condition = condition; return filter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(java.lang.String) - */ @Override public Filter by(String expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); filter.condition = expression; return filter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(org.bson.Document) - */ @Override public Filter by(Document expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); filter.condition = expression; return filter; } @@ -667,11 +911,11 @@ protected String getMongoMethod() { * Creates new {@link IsArray}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IsArray}. */ public static IsArray isArray(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IsArray(Fields.field(fieldReference)); } @@ -679,11 +923,11 @@ public static IsArray isArray(String fieldReference) { * Creates new {@link IsArray}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IsArray}. */ public static IsArray isArray(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IsArray(expression); } } @@ -708,11 +952,11 @@ protected String getMongoMethod() { * Creates new {@link Size}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Size}. */ public static Size lengthOfArray(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Size(Fields.field(fieldReference)); } @@ -720,13 +964,26 @@ public static Size lengthOfArray(String fieldReference) { * Creates new {@link Size}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Size}. */ public static Size lengthOfArray(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Size(expression); } + + /** + * Creates new {@link Size}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Size}. + * @since 2.2 + */ + public static Size lengthOfArray(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new Size(Collections.singletonList(values)); + } } /** @@ -749,11 +1006,11 @@ protected String getMongoMethod() { * Creates new {@link Slice}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Slice}. */ public static Slice sliceArrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Slice(asFields(fieldReference)); } @@ -761,41 +1018,101 @@ public static Slice sliceArrayOf(String fieldReference) { * Creates new {@link Slice}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Slice}. */ public static Slice sliceArrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Slice(Collections.singletonList(expression)); } - public Slice itemCount(int nrElements) { - return new Slice(append(nrElements)); + /** + * Creates new {@link Slice}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Slice}. + * @since 2.2 + */ + public static Slice sliceArrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new Slice(Collections.singletonList(values)); } - public SliceElementsBuilder offset(final int position) { + /** + * Slice the number of elements. + * + * @param count number of elements to slice. + * @return new instance of {@link Slice}. + */ + public Slice itemCount(int count) { + return new Slice(append(count)); + } - return new SliceElementsBuilder() { + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(count)); + } - @Override - public Slice itemCount(int nrElements) { - return new Slice(append(position)).itemCount(nrElements); - } - }; + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(int position) { + return new SliceElementsBuilder(position); + } + + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(AggregationExpression position) { + return new SliceElementsBuilder(position); } /** * @author Christoph Strobl */ - public interface SliceElementsBuilder { + public class SliceElementsBuilder { + + private final Object position; + + SliceElementsBuilder(Object position) { + this.position = position; + } /** - * Set the number of elements given {@literal nrElements}. + * Set the number of elements given {@literal count}. * - * @param nrElements - * @return + * @param count number of elements to slice. + * @return new instance of {@link Slice}. */ - Slice itemCount(int nrElements); + public Slice itemCount(int count) { + return new Slice(append(position)).itemCount(count); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(position)).itemCount(count); + } } } @@ -819,11 +1136,11 @@ protected String getMongoMethod() { * Start creating new {@link IndexOfArray}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public static IndexOfArrayBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IndexOfArrayBuilder(Fields.field(fieldReference)); } @@ -831,14 +1148,33 @@ public static IndexOfArrayBuilder arrayOf(String fieldReference) { * Start creating new {@link IndexOfArray}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public static IndexOfArrayBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IndexOfArrayBuilder(expression); } + /** + * Start creating new {@link IndexOfArray}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link IndexOfArrayBuilder} to create {@link IndexOfArray}. + * @since 2.2 + */ + public static IndexOfArrayBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new IndexOfArrayBuilder(values); + } + + /** + * Lookup within a given range. + * + * @param range the lookup range. + * @return new instance of {@link IndexOfArray}. + */ public IndexOfArray within(Range range) { return new IndexOfArray(append(AggregationUtils.toRangeValues(range))); } @@ -858,11 +1194,11 @@ private IndexOfArrayBuilder(Object targetArray) { * Set the {@literal value} to check for its index in the array. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public IndexOfArray indexOf(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new IndexOfArray(Arrays.asList(targetArray, value)); } } @@ -888,7 +1224,7 @@ protected String getMongoMethod() { * Start creating new {@link RangeOperator}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperatorBuilder} to create {@link RangeOperator}. */ public static RangeOperatorBuilder rangeStartingAt(String fieldReference) { return new RangeOperatorBuilder(Fields.field(fieldReference)); @@ -898,7 +1234,7 @@ public static RangeOperatorBuilder rangeStartingAt(String fieldReference) { * Start creating new {@link RangeOperator}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperatorBuilder} to create {@link RangeOperator}. */ public static RangeOperatorBuilder rangeStartingAt(AggregationExpression expression) { return new RangeOperatorBuilder(expression); @@ -908,7 +1244,7 @@ public static RangeOperatorBuilder rangeStartingAt(AggregationExpression express * Start creating new {@link RangeOperator}. * * @param value - * @return + * @return new instance of {@link RangeOperator}. */ public static RangeOperatorBuilder rangeStartingAt(long value) { return new RangeOperatorBuilder(value); @@ -930,7 +1266,7 @@ private RangeOperatorBuilder(Object startPoint) { * Creates new {@link RangeOperator}. * * @param index - * @return + * @return new instance of {@link RangeOperator}. */ public RangeOperator to(long index) { return new RangeOperator(Arrays.asList(startPoint, index)); @@ -940,7 +1276,7 @@ public RangeOperator to(long index) { * Creates new {@link RangeOperator}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperator}. */ public RangeOperator to(AggregationExpression expression) { return new RangeOperator(Arrays.asList(startPoint, expression)); @@ -950,7 +1286,7 @@ public RangeOperator to(AggregationExpression expression) { * Creates new {@link RangeOperator}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperator}. */ public RangeOperator to(String fieldReference) { return new RangeOperator(Arrays.asList(startPoint, Fields.field(fieldReference))); @@ -978,7 +1314,7 @@ protected String getMongoMethod() { * Creates new {@link ReverseArray} given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ReverseArray}. */ public static ReverseArray reverseArrayOf(String fieldReference) { return new ReverseArray(Fields.field(fieldReference)); @@ -988,11 +1324,22 @@ public static ReverseArray reverseArrayOf(String fieldReference) { * Creates new {@link ReverseArray} given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ReverseArray}. */ public static ReverseArray reverseArrayOf(AggregationExpression expression) { return new ReverseArray(expression); } + + /** + * Creates new {@link ReverseArray}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ReverseArray}. + * @since 2.2 + */ + public static ReverseArray reverseArrayOf(Collection values) { + return new ReverseArray(values); + } } /** @@ -1013,9 +1360,6 @@ private Reduce(Object input, Object initialValue, List re this.reduceExpressions = reduceExpressions; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -1043,10 +1387,10 @@ private Object getMappedValue(Object value, AggregationOperationContext context) if (value instanceof Document) { return value; } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); - } else if (value instanceof Field) { - return context.getReference(((Field) value)).toString(); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else if (value instanceof Field field) { + return context.getReference(field).toString(); } else { return context.getMappedObject(new Document("###val###", value)).get("###val###"); } @@ -1056,7 +1400,7 @@ private Object getMappedValue(Object value, AggregationOperationContext context) * Start creating new {@link Reduce}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link InitialValueBuilder} to create {@link Reduce}. */ public static InitialValueBuilder arrayOf(final String fieldReference) { @@ -1095,7 +1439,7 @@ public Reduce reduce(PropertyExpression... expressions) { * Start creating new {@link Reduce}. * * @param arrayValueExpression must not be {@literal null}. - * @return + * @return new instance of {@link InitialValueBuilder} to create {@link Reduce}. */ public static InitialValueBuilder arrayOf(final AggregationExpression arrayValueExpression) { @@ -1135,7 +1479,7 @@ public interface InitialValueBuilder { * Define the initial cumulative value set before in is applied to the first element of the input array. * * @param initialValue must not be {@literal null}. - * @return + * @return never {@literal null}. */ ReduceBuilder withInitialValue(Object initialValue); } @@ -1152,7 +1496,7 @@ public interface ReduceBuilder { * {@link Variable#VALUE} are available. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Reduce}. */ Reduce reduce(AggregationExpression expression); @@ -1163,7 +1507,7 @@ public interface ReduceBuilder { * {@link Variable#VALUE} are available. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link Reduce}. */ Reduce reduce(PropertyExpression... expressions); } @@ -1178,8 +1522,8 @@ public static class PropertyExpression implements AggregationExpression { protected PropertyExpression(String propertyName, AggregationExpression aggregationExpression) { - Assert.notNull(propertyName, "Property name must not be null!"); - Assert.notNull(aggregationExpression, "AggregationExpression must not be null!"); + Assert.notNull(propertyName, "Property name must not be null"); + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); this.propertyName = propertyName; this.aggregationExpression = aggregationExpression; @@ -1189,7 +1533,7 @@ protected PropertyExpression(String propertyName, AggregationExpression aggregat * Define a result property for an {@link AggregationExpression} used in {@link Reduce}. * * @param name must not be {@literal null}. - * @return + * @return new instance of {@link AsBuilder} to create {@link Reduce}. */ public static AsBuilder property(final String name) { @@ -1202,9 +1546,6 @@ public PropertyExpression definedAs(AggregationExpression expression) { }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(propertyName, aggregationExpression.toDocument(context)); @@ -1219,30 +1560,21 @@ public interface AsBuilder { * Set the {@link AggregationExpression} resulting in the properties value. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ PropertyExpression definedAs(AggregationExpression expression); } } - public enum Variable implements Field { + public enum Variable implements AggregationVariable { THIS { - @Override - public String getName() { - return "$$this"; - } @Override public String getTarget() { return "$$this"; } - @Override - public boolean isAliased() { - return false; - } - @Override public String toString() { return getName(); @@ -1250,33 +1582,29 @@ public String toString() { }, VALUE { - @Override - public String getName() { - return "$$value"; - } @Override public String getTarget() { return "$$value"; } - @Override - public boolean isAliased() { - return false; - } - @Override public String toString() { return getName(); } }; + @Override + public boolean isInternal() { + return true; + } + /** * Create a {@link Field} reference to a given {@literal property} prefixed with the {@link Variable} identifier. * eg. {@code $$value.product} * * @param property must not be {@literal null}. - * @return + * @return never {@literal null}. */ public Field referringTo(final String property) { @@ -1302,6 +1630,16 @@ public String toString() { } }; } + + public static boolean isVariable(Field field) { + + for (Variable var : values()) { + if (field.getTarget().startsWith(var.getTarget())) { + return true; + } + } + return false; + } } } @@ -1325,11 +1663,11 @@ protected String getMongoMethod() { * Start creating new {@link Zip}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ZipBuilder} to create {@link Zip}. */ public static ZipBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ZipBuilder(Fields.field(fieldReference)); } @@ -1337,18 +1675,31 @@ public static ZipBuilder arrayOf(String fieldReference) { * Start creating new {@link Zip}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ZipBuilder} to create {@link Zip}. */ public static ZipBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ZipBuilder(expression); } + /** + * Start creating new {@link Zip}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Zip}. + * @since 2.2 + */ + public static ZipBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Expression must not be null"); + return new ZipBuilder(values); + } + /** * Create new {@link Zip} and set the {@code useLongestLength} property to {@literal true}. * - * @return + * @return new instance of {@link Zip}. */ public Zip useLongestLength() { return new Zip(append("useLongestLength", true)); @@ -1358,11 +1709,11 @@ public Zip useLongestLength() { * Optionally provide a default value. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ public Zip defaultTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Zip(append("defaults", Fields.field(fieldReference))); } @@ -1370,11 +1721,11 @@ public Zip defaultTo(String fieldReference) { * Optionally provide a default value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ public Zip defaultTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Zip(append("defaults", expression)); } @@ -1382,11 +1733,11 @@ public Zip defaultTo(AggregationExpression expression) { * Optionally provide a default value. * * @param array must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ public Zip defaultTo(Object[] array) { - Assert.notNull(array, "Array must not be null!"); + Assert.notNull(array, "Array must not be null"); return new Zip(append("defaults", Arrays.asList(array))); } @@ -1396,7 +1747,7 @@ public static class ZipBuilder { private ZipBuilder(Object sourceArray) { - this.sourceArrays = new ArrayList(); + this.sourceArrays = new ArrayList<>(); this.sourceArrays.add(sourceArray); } @@ -1406,21 +1757,21 @@ private ZipBuilder(Object sourceArray) { * array, etc. * * @param arrays arrays to zip the referenced one with. must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ public Zip zip(Object... arrays) { - Assert.notNull(arrays, "Arrays must not be null!"); + Assert.notNull(arrays, "Arrays must not be null"); for (Object value : arrays) { - if (value instanceof String) { - sourceArrays.add(Fields.field((String) value)); + if (value instanceof String stringValue) { + sourceArrays.add(Fields.field(stringValue)); } else { sourceArrays.add(value); } } - return new Zip(Collections. singletonMap("inputs", sourceArrays)); + return new Zip(Collections.singletonMap("inputs", sourceArrays)); } } } @@ -1429,6 +1780,10 @@ public Zip zip(Object... arrays) { * {@link AggregationExpression} for {@code $in}. * * @author Christoph Strobl + * @author Shashank Sharma + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/in/ + * @since 2.2 */ public static class In extends AbstractAggregationExpression { @@ -1445,20 +1800,16 @@ protected String getMongoMethod() { * Start creating {@link In}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link InBuilder} to create {@link In}. */ - public static InBuilder arrayOf(final String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); + public static InBuilder arrayOf(String fieldReference) { - return new InBuilder() { + Assert.notNull(fieldReference, "FieldReference must not be null"); - @Override - public In containsValue(Object value) { + return value -> { - Assert.notNull(value, "Value must not be null!"); - return new In(Arrays.asList(value, Fields.field(fieldReference))); - } + Assert.notNull(value, "Value must not be null"); + return new In(Arrays.asList(value, Fields.field(fieldReference))); }; } @@ -1466,20 +1817,36 @@ public In containsValue(Object value) { * Start creating {@link In}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link InBuilder} to create {@link In}. */ - public static InBuilder arrayOf(final AggregationExpression expression) { + public static InBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); - return new InBuilder() { + return value -> { - @Override - public In containsValue(Object value) { + Assert.notNull(value, "Value must not be null"); - Assert.notNull(value, "Value must not be null!"); - return new In(Arrays.asList(value, expression)); - } + return new In(Arrays.asList(value, expression)); + }; + } + + /** + * Support for Aggregation In Search an Element in List of Objects to Filter Start creating {@link In}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link InBuilder}. + * @since 2.2 + */ + public static InBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + + return value -> { + + Assert.notNull(value, "Value must not be null"); + + return new In(Arrays.asList(value, values)); }; } @@ -1492,9 +1859,251 @@ public interface InBuilder { * Set the {@literal value} to check for existence in the array. * * @param value must not be {@literal value}. - * @return + * @return new instance of {@link In}. */ In containsValue(Object value); } } + + /** + * {@link AggregationExpression} for {@code $arrayToObject} that transforms an array into a single document.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/ + * @since 2.1 + */ + public static class ArrayToObject extends AbstractAggregationExpression { + + private ArrayToObject(Object value) { + super(value); + } + + /** + * Converts the given array (e.g. an array of two-element arrays, a field reference to an array,...) to an object. + * + * @param array must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayToObject(Object array) { + return new ArrayToObject(array); + } + + /** + * Converts the array pointed to by the given {@link Field field reference} to an object. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayValueOfToObject(String fieldReference) { + return new ArrayToObject(Fields.field(fieldReference)); + } + + /** + * Converts the result array of the given {@link AggregationExpression expression} to an object. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayValueOfToObject(AggregationExpression expression) { + return new ArrayToObject(expression); + } + + @Override + protected String getMongoMethod() { + return "$arrayToObject"; + } + } + + /** + * {@link AggregationExpression} for {@code $first} that returns the first element in an array.
          + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class First extends AbstractAggregationExpression { + + private First(Object value) { + super(value); + } + + /** + * Returns the first element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First first(Object array) { + return new First(array); + } + + /** + * Returns the first element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(String fieldReference) { + return new First(Fields.field(fieldReference)); + } + + /** + * Returns the first element of the array computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(AggregationExpression expression) { + return new First(expression); + } + + @Override + protected String getMongoMethod() { + return "$first"; + } + } + + /** + * {@link AggregationExpression} for {@code $last} that returns the last element in an array.
          + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class Last extends AbstractAggregationExpression { + + private Last(Object value) { + super(value); + } + + /** + * Returns the last element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last last(Object array) { + return new Last(array); + } + + /** + * Returns the last element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(String fieldReference) { + return new Last(Fields.field(fieldReference)); + } + + /** + * Returns the last element of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(AggregationExpression expression) { + return new Last(expression); + } + + @Override + protected String getMongoMethod() { + return "$last"; + } + } + + /** + * {@link AggregationExpression} for {@code $sortArray} that sorts elements in an array.
          + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class SortArray extends AbstractAggregationExpression { + + private SortArray(Object value) { + super(value); + } + + /** + * Returns the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArray(Object array) { + return new SortArray(Collections.singletonMap("input", array)); + } + + /** + * Sorts the elements in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(String fieldReference) { + return sortArray(Fields.field(fieldReference)); + } + + /** + * Sorts the elements of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(AggregationExpression expression) { + return sortArray(expression); + } + + /** + * Set the order to put elements in. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public SortArray by(Sort sort) { + return new SortArray(append("sortBy", sort)); + } + + /** + * Order the values for the array in the given direction. + * + * @param direction must not be {@literal null}. + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray direction(Direction direction) { + return new SortArray(append("sortBy", direction.isAscending() ? 1 : -1)); + } + + /** + * Sort the array elements by their values in ascending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueAscending() { + return direction(Direction.ASC); + } + + /** + * Sort the array elements by their values in descending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueDescending() { + return direction(Direction.DESC); + } + + @Override + protected String getMongoMethod() { + return "$sortArray"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java new file mode 100644 index 0000000000..4d321c4715 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.bson.conversions.Bson; + +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.ObjectUtils; + +/** + * {@link AggregationOperation} implementation that can return a {@link Document} from a {@link Bson} or {@link String} + * document. + * + * @author Christoph Strobl + * @since 4.0 + */ +record BasicAggregationOperation(Object value) implements AggregationOperation { + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (value instanceof Bson bson) { + return BsonUtils.asDocument(bson, context.getCodecRegistry()); + } + + if (value instanceof String json && BsonUtils.isJsonDocument(json)) { + return BsonUtils.parse(json, context); + } + + throw new IllegalStateException( + String.format("%s cannot be converted to org.bson.Document", ObjectUtils.nullSafeClassName(value))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java index df93fd8919..69689908c9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,7 +34,7 @@ public class BooleanOperators { * Take the array referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link BooleanOperatorFactory}. */ public static BooleanOperatorFactory valueOf(String fieldReference) { return new BooleanOperatorFactory(fieldReference); @@ -44,7 +44,7 @@ public static BooleanOperatorFactory valueOf(String fieldReference) { * Take the value resulting of the given {@link AggregationExpression}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link BooleanOperatorFactory}. */ public static BooleanOperatorFactory valueOf(AggregationExpression fieldReference) { return new BooleanOperatorFactory(fieldReference); @@ -55,7 +55,7 @@ public static BooleanOperatorFactory valueOf(AggregationExpression fieldReferenc * opposite boolean value. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(String fieldReference) { return Not.not(fieldReference); @@ -66,7 +66,7 @@ public static Not not(String fieldReference) { * and returns the opposite boolean value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(AggregationExpression expression) { return Not.not(expression); @@ -87,7 +87,7 @@ public static class BooleanOperatorFactory { */ public BooleanOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -99,7 +99,7 @@ public BooleanOperatorFactory(String fieldReference) { */ public BooleanOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -109,11 +109,11 @@ public BooleanOperatorFactory(AggregationExpression expression) { * all of the expressions are {@literal true}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createAnd().andExpression(expression); } @@ -122,11 +122,11 @@ public And and(AggregationExpression expression) { * all of the expressions are {@literal true}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createAnd().andField(fieldReference); } @@ -139,11 +139,11 @@ private And createAnd() { * any of the expressions are {@literal true}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or or(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createOr().orExpression(expression); } @@ -152,11 +152,11 @@ public Or or(AggregationExpression expression) { * any of the expressions are {@literal true}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or or(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createOr().orField(fieldReference); } @@ -167,7 +167,7 @@ private Or createOr() { /** * Creates new {@link AggregationExpression} that evaluates a boolean and returns the opposite boolean value. * - * @return + * @return new instance of {@link Not}. */ public Not not() { return usesFieldRef() ? Not.not(fieldReference) : Not.not(expression); @@ -198,8 +198,8 @@ protected String getMongoMethod() { * Creates new {@link And} that evaluates one or more expressions and returns {@literal true} if all of the * expressions are {@literal true}. * - * @param expressions - * @return + * @param expressions must not be {@literal null}. + * @return new instance of {@link And}. */ public static And and(Object... expressions) { return new And(Arrays.asList(expressions)); @@ -209,11 +209,11 @@ public static And and(Object... expressions) { * Creates new {@link And} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And andExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new And(append(expression)); } @@ -221,11 +221,11 @@ public And andExpression(AggregationExpression expression) { * Creates new {@link And} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And andField(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new And(append(Fields.field(fieldReference))); } @@ -233,11 +233,11 @@ public And andField(String fieldReference) { * Creates new {@link And} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And andValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new And(append(value)); } } @@ -263,11 +263,11 @@ protected String getMongoMethod() { * expressions are {@literal true}. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public static Or or(Object... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); + Assert.notNull(expressions, "Expressions must not be null"); return new Or(Arrays.asList(expressions)); } @@ -275,11 +275,11 @@ public static Or or(Object... expressions) { * Creates new {@link Or} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or orExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Or(append(expression)); } @@ -287,11 +287,11 @@ public Or orExpression(AggregationExpression expression) { * Creates new {@link Or} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or orField(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Or(append(Fields.field(fieldReference))); } @@ -299,11 +299,11 @@ public Or orField(String fieldReference) { * Creates new {@link Or} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or orValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Or(append(value)); } } @@ -329,11 +329,11 @@ protected String getMongoMethod() { * value. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Not(asFields(fieldReference)); } @@ -342,11 +342,11 @@ public static Not not(String fieldReference) { * returns the opposite boolean value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Not(Collections.singletonList(expression)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java index 15b4cff218..36492e2a81 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.BucketAutoOperationOutputBuilder; import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; import org.springframework.util.Assert; -import org.bson.Document; - /** * Encapsulates the aggregation framework {@code $bucketAuto}-operation.
          * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into a @@ -29,8 +28,7 @@ * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating * instances of this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ * @see BucketOperationSupport * @author Mark Paluch * @author Christoph Strobl @@ -52,7 +50,7 @@ public BucketAutoOperation(Field groupByField, int buckets) { super(groupByField); - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); this.buckets = buckets; this.granularity = null; @@ -68,7 +66,7 @@ public BucketAutoOperation(AggregationExpression groupByExpression, int buckets) super(groupByExpression); - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); this.buckets = buckets; this.granularity = null; @@ -90,9 +88,6 @@ private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, St this.granularity = granularity; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -106,18 +101,23 @@ public Document toDocument(AggregationOperationContext context) { options.putAll(super.toDocument(context)); - return new Document("$bucketAuto", options); + return new Document(getOperator(), options); + } + + @Override + public String getOperator() { + return "$bucketAuto"; } /** * Configures a number of bucket {@literal buckets} and return a new {@link BucketAutoOperation}. * * @param buckets must be a positive number. - * @return + * @return new instance of {@link BucketAutoOperation}. */ public BucketAutoOperation withBuckets(int buckets) { - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); return new BucketAutoOperation(this, buckets, granularity); } @@ -128,42 +128,30 @@ public BucketAutoOperation withBuckets(int buckets) { * Use either predefined {@link Granularities} or provide a own one. * * @param granularity must not be {@literal null}. - * @return + * @return new instance of {@link BucketAutoOperation}. */ public BucketAutoOperation withGranularity(Granularity granularity) { - Assert.notNull(granularity, "Granularity must not be null!"); + Assert.notNull(granularity, "Granularity must not be null"); return new BucketAutoOperation(this, buckets, granularity.getMongoRepresentation()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) - */ @Override protected BucketAutoOperation newBucketOperation(Outputs outputs) { return new BucketAutoOperation(this, outputs); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) - */ @Override public ExpressionBucketAutoOperationBuilder andOutputExpression(String expression, Object... params) { return new ExpressionBucketAutoOperationBuilder(expression, this, params); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public BucketAutoOperationOutputBuilder andOutput(AggregationExpression expression) { return new BucketAutoOperationOutputBuilder(expression, this); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) - */ @Override public BucketAutoOperationOutputBuilder andOutput(String fieldName) { return new BucketAutoOperationOutputBuilder(Fields.field(fieldName), this); @@ -185,9 +173,6 @@ protected BucketAutoOperationOutputBuilder(Object value, BucketAutoOperation ope super(value, operation); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); @@ -209,16 +194,13 @@ public static class ExpressionBucketAutoOperationBuilder * * @param expression must not be {@literal null}. * @param operation must not be {@literal null}. - * @param parameters + * @param parameters must not be {@literal null}. */ protected ExpressionBucketAutoOperationBuilder(String expression, BucketAutoOperation operation, Object[] parameters) { super(expression, operation, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); @@ -240,8 +222,7 @@ public interface Granularity { /** * Supported MongoDB granularities. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity * @author Mark Paluch */ public enum Granularities implements Granularity { @@ -264,9 +245,6 @@ public enum Granularities implements Granularity { this.granularity = granularity; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GranularitytoMongoGranularity() - */ @Override public String getMongoRepresentation() { return granularity; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java index 7ee57da27c..525789e628 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,17 +20,14 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.BucketOperation.BucketOperationOutputBuilder; import org.springframework.util.Assert; -import org.bson.Document; - /** * Encapsulates the aggregation framework {@code $bucket}-operation.
          - * * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into * groups, called buckets, based on a specified expression and bucket boundaries.
          - * * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of * this class directly. * @@ -83,13 +80,10 @@ private BucketOperation(BucketOperation bucketOperation, List boundaries super(bucketOperation); - this.boundaries = new ArrayList(boundaries); + this.boundaries = new ArrayList<>(boundaries); this.defaultBucket = defaultBucket; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -103,18 +97,23 @@ public Document toDocument(AggregationOperationContext context) { options.putAll(super.toDocument(context)); - return new Document("$bucket", options); + return new Document(getOperator(), options); + } + + @Override + public String getOperator() { + return "$bucket"; } /** * Configures a default bucket {@literal literal} and return a new {@link BucketOperation}. * * @param literal must not be {@literal null}. - * @return + * @return new instance of {@link BucketOperation}. */ public BucketOperation withDefaultBucket(Object literal) { - Assert.notNull(literal, "Default bucket literal must not be null!"); + Assert.notNull(literal, "Default bucket literal must not be null"); return new BucketOperation(this, boundaries, literal); } @@ -123,47 +122,35 @@ public BucketOperation withDefaultBucket(Object literal) { * preserved and the new {@literal boundaries} are appended. * * @param boundaries must not be {@literal null}. - * @return + * @return new instance of {@link BucketOperation}. */ public BucketOperation withBoundaries(Object... boundaries) { - Assert.notNull(boundaries, "Boundaries must not be null!"); - Assert.noNullElements(boundaries, "Boundaries must not contain null values!"); + Assert.notNull(boundaries, "Boundaries must not be null"); + Assert.noNullElements(boundaries, "Boundaries must not contain null values"); - List newBoundaries = new ArrayList(this.boundaries.size() + boundaries.length); + List newBoundaries = new ArrayList<>(this.boundaries.size() + boundaries.length); newBoundaries.addAll(this.boundaries); newBoundaries.addAll(Arrays.asList(boundaries)); return new BucketOperation(this, newBoundaries, defaultBucket); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) - */ @Override protected BucketOperation newBucketOperation(Outputs outputs) { return new BucketOperation(this, outputs); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) - */ @Override public ExpressionBucketOperationBuilder andOutputExpression(String expression, Object... params) { return new ExpressionBucketOperationBuilder(expression, this, params); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public BucketOperationOutputBuilder andOutput(AggregationExpression expression) { return new BucketOperationOutputBuilder(expression, this); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) - */ @Override public BucketOperationOutputBuilder andOutput(String fieldName) { return new BucketOperationOutputBuilder(Fields.field(fieldName), this); @@ -185,9 +172,6 @@ protected BucketOperationOutputBuilder(Object value, BucketOperation operation) super(value, operation); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketOperationOutputBuilder(operationOutput, this.operation); @@ -204,20 +188,17 @@ public static class ExpressionBucketOperationBuilder extends ExpressionBucketOperationBuilderSupport { /** - * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperation} - * and parameters. + * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperation} and + * parameters. * * @param expression must not be {@literal null}. * @param operation must not be {@literal null}. - * @param parameters + * @param parameters must not be {@literal null}. */ protected ExpressionBucketOperationBuilder(String expression, BucketOperation operation, Object[] parameters) { super(expression, operation, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketOperationOutputBuilder(operationOutput, this.operation); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java index f8eec4ddbb..e19ad59a3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,14 +21,13 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder; import org.springframework.expression.spel.ast.Projection; import org.springframework.util.Assert; -import org.bson.Document; - /** * Base class for bucket operations that support output expressions the aggregation framework.
          * Bucket stages collect documents into buckets and can contribute output fields.
          @@ -52,7 +51,7 @@ public abstract class BucketOperationSupport operationSupport) */ protected BucketOperationSupport(BucketOperationSupport operationSupport, Outputs outputs) { - Assert.notNull(operationSupport, "BucketOperationSupport must not be null!"); - Assert.notNull(outputs, "Outputs must not be null!"); + Assert.notNull(operationSupport, "BucketOperationSupport must not be null"); + Assert.notNull(outputs, "Outputs must not be null"); this.groupByField = operationSupport.groupByField; this.groupByExpression = operationSupport.groupByExpression; @@ -104,7 +103,7 @@ protected BucketOperationSupport(BucketOperationSupport operationSupport, * * @param expression the SpEL expression, must not be {@literal null} or empty. * @param params must not be {@literal null} - * @return + * @return new instance of {@link ExpressionBucketOperationBuilderSupport} to create {@link BucketOperation}. */ public abstract ExpressionBucketOperationBuilderSupport andOutputExpression(String expression, Object... params); @@ -114,7 +113,7 @@ public abstract ExpressionBucketOperationBuilderSupport andOutputExpressio * resulting bucket documents. * * @param expression the SpEL expression, must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ public abstract B andOutput(AggregationExpression expression); @@ -124,14 +123,14 @@ public abstract ExpressionBucketOperationBuilderSupport andOutputExpressio * {@literal fieldName}. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ public abstract B andOutput(String fieldName); /** * Creates a new {@link BucketOperationSupport} given to add a count field to the resulting bucket documents. * - * @return + * @return never {@literal null}. */ public B andOutputCount() { return andOutput(new AggregationExpression() { @@ -142,9 +141,6 @@ public Document toDocument(AggregationOperationContext context) { }); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -160,9 +156,6 @@ public Document toDocument(AggregationOperationContext context) { return document; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return outputs.asExposedFields(); @@ -220,8 +213,8 @@ public abstract static class OutputBuilder, T exte */ protected OutputBuilder(Object value, T operation) { - Assert.notNull(value, "Value must not be null or empty!"); - Assert.notNull(operation, "ProjectionOperation must not be null!"); + Assert.notNull(value, "Value must not be null or empty"); + Assert.notNull(operation, "ProjectionOperation must not be null"); this.value = value; this.operation = operation; @@ -231,7 +224,7 @@ protected OutputBuilder(Object value, T operation) { * Generates a builder for a {@code $sum}-expression.
          * Count expressions are emulated via {@code $sum: 1}. * - * @return + * @return never {@literal null}. */ public B count() { return sum(1); @@ -240,7 +233,7 @@ public B count() { /** * Generates a builder for a {@code $sum}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B sum() { return apply(Accumulators.SUM); @@ -249,8 +242,8 @@ public B sum() { /** * Generates a builder for a {@code $sum}-expression for the given {@literal value}. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return never {@literal null}. */ public B sum(Number value) { return apply(new OperationOutput(Accumulators.SUM.getMongoOperator(), Collections.singleton(value))); @@ -259,7 +252,7 @@ public B sum(Number value) { /** * Generates a builder for an {@code $last}-expression for the current value.. * - * @return + * @return never {@literal null}. */ public B last() { return apply(Accumulators.LAST); @@ -268,7 +261,7 @@ public B last() { /** * Generates a builder for a {@code $first}-expression the current value. * - * @return + * @return never {@literal null}. */ public B first() { return apply(Accumulators.FIRST); @@ -277,8 +270,7 @@ public B first() { /** * Generates a builder for an {@code $avg}-expression for the current value. * - * @param reference - * @return + * @return never {@literal null}. */ public B avg() { return apply(Accumulators.AVG); @@ -287,7 +279,7 @@ public B avg() { /** * Generates a builder for an {@code $min}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B min() { return apply(Accumulators.MIN); @@ -296,7 +288,7 @@ public B min() { /** * Generates a builder for an {@code $max}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B max() { return apply(Accumulators.MAX); @@ -305,7 +297,7 @@ public B max() { /** * Generates a builder for an {@code $push}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B push() { return apply(Accumulators.PUSH); @@ -314,7 +306,7 @@ public B push() { /** * Generates a builder for an {@code $addToSet}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B addToSet() { return apply(Accumulators.ADDTOSET); @@ -325,14 +317,14 @@ public B addToSet() { * * @param operation the operation name, must not be {@literal null} or empty. * @param values must not be {@literal null}. - * @return + * @return never {@literal null}. */ public B apply(String operation, Object... values) { - Assert.hasText(operation, "Operation must not be empty or null!"); - Assert.notNull(value, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be empty or null"); + Assert.notNull(value, "Values must not be null"); - List objects = new ArrayList(values.length + 1); + List objects = new ArrayList<>(values.length + 1); objects.add(value); objects.addAll(Arrays.asList(values)); return apply(new OperationOutput(operation, objects)); @@ -342,7 +334,7 @@ public B apply(String operation, Object... values) { * Apply an {@link OperationOutput} to this output. * * @param operationOutput must not be {@literal null}. - * @return + * @return never {@literal null}. */ protected abstract B apply(OperationOutput operationOutput); @@ -354,16 +346,16 @@ private B apply(Accumulators operation) { * Returns the finally to be applied {@link BucketOperation} with the given alias. * * @param alias will never be {@literal null} or empty. - * @return + * @return never {@literal null}. */ public T as(String alias) { - if (value instanceof OperationOutput) { - return this.operation.andOutput(((OperationOutput) this.value).withAlias(alias)); + if (value instanceof OperationOutput operationOutput) { + return this.operation.andOutput(operationOutput.withAlias(alias)); } if (value instanceof Field) { - throw new IllegalStateException("Cannot add a field as top-level output. Use accumulator expressions."); + throw new IllegalStateException("Cannot add a field as top-level output; Use accumulator expressions"); } return this.operation @@ -376,7 +368,7 @@ private enum Accumulators { SUM("$sum"), AVG("$avg"), FIRST("$first"), LAST("$last"), MAX("$max"), MIN("$min"), PUSH("$push"), ADDTOSET( "$addToSet"); - private String mongoOperator; + private final String mongoOperator; Accumulators(String mongoOperator) { this.mongoOperator = mongoOperator; @@ -396,7 +388,7 @@ protected static class Outputs implements AggregationExpression { protected static final Outputs EMPTY = new Outputs(); - private List outputs; + private final List outputs; /** * Creates a new, empty {@link Outputs}. @@ -445,7 +437,7 @@ protected ExposedFields asExposedFields() { */ protected Outputs and(Output output) { - Assert.notNull(output, "BucketOutput must not be null!"); + Assert.notNull(output, "BucketOutput must not be null"); return new Outputs(this.outputs, output); } @@ -456,9 +448,6 @@ protected boolean isEmpty() { return outputs.isEmpty(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -491,7 +480,7 @@ protected abstract static class Output implements AggregationExpression { */ protected Output(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); } @@ -527,11 +516,11 @@ public OperationOutput(String operation, Collection values) { super(Fields.field(operation)); - Assert.hasText(operation, "Operation must not be null or empty!"); - Assert.notNull(values, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be null or empty"); + Assert.notNull(values, "Values must not be null"); this.operation = operation; - this.values = new ArrayList(values); + this.values = new ArrayList<>(values); } private OperationOutput(Field field, OperationOutput operationOutput) { @@ -542,32 +531,27 @@ private OperationOutput(Field field, OperationOutput operationOutput) { this.values = operationOutput.values; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { List operationArguments = getOperationArguments(context); - return new Document(operation, - operationArguments.size() == 1 ? operationArguments.get(0) : operationArguments); + return new Document(operation, operationArguments.size() == 1 ? operationArguments.get(0) : operationArguments); } protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values != null ? values.size() : 1); + List result = new ArrayList<>(values != null ? values.size() : 1); for (Object element : values) { - if (element instanceof Field) { - result.add(context.getReference((Field) element).toString()); - } else if (element instanceof Fields) { - for (Field field : (Fields) element) { + if (element instanceof Field field) { + result.add(context.getReference(field).toString()); + } else if (element instanceof Fields fields) { + for (Field field : fields) { result.add(context.getReference(field).toString()); } - } else if (element instanceof AggregationExpression) { - result.add(((AggregationExpression) element).toDocument(context)); + } else if (element instanceof AggregationExpression aggregationExpression) { + result.add(aggregationExpression.toDocument(context)); } else { result.add(element); } @@ -579,7 +563,7 @@ protected List getOperationArguments(AggregationOperationContext context /** * Returns the field that holds the {@link ProjectionOperationBuilder.OperationProjection}. * - * @return + * @return never {@literal null}. */ protected Field getField() { return getExposedField(); @@ -589,7 +573,7 @@ protected Field getField() { * Creates a new instance of this {@link OperationOutput} with the given alias. * * @param alias the alias to set - * @return + * @return new instance of {@link OperationOutput}. */ public OperationOutput withAlias(String alias) { @@ -632,16 +616,13 @@ public SpelExpressionOutput(String expression, Object[] parameters) { super(Fields.field(expression)); - Assert.hasText(expression, "Expression must not be null!"); - Assert.notNull(parameters, "Parameters must not be null!"); + Assert.hasText(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); this.expression = expression; this.params = parameters.clone(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return (Document) TRANSFORMER.transform(expression, context, params); @@ -658,8 +639,8 @@ private static class AggregationExpressionOutput extends Output { /** * Creates a new {@link AggregationExpressionOutput}. * - * @param field - * @param expression + * @param field must not be {@literal null}. + * @param expression must not be {@literal null}. */ protected AggregationExpressionOutput(Field field, AggregationExpression expression) { @@ -668,9 +649,6 @@ protected AggregationExpressionOutput(Field field, AggregationExpression express this.expression = expression; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return expression.toDocument(context); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java index 112afddbd0..f27b7f16cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,7 +32,7 @@ public class ComparisonOperators { * Take the field referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ComparisonOperatorFactory}. */ public static ComparisonOperatorFactory valueOf(String fieldReference) { return new ComparisonOperatorFactory(fieldReference); @@ -42,7 +42,7 @@ public static ComparisonOperatorFactory valueOf(String fieldReference) { * Take the value resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ComparisonOperatorFactory}. */ public static ComparisonOperatorFactory valueOf(AggregationExpression expression) { return new ComparisonOperatorFactory(expression); @@ -60,7 +60,7 @@ public static class ComparisonOperatorFactory { */ public ComparisonOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -72,7 +72,7 @@ public ComparisonOperatorFactory(String fieldReference) { */ public ComparisonOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -81,7 +81,7 @@ public ComparisonOperatorFactory(AggregationExpression expression) { * Creates new {@link AggregationExpression} that compares two values. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareTo(String fieldReference) { return createCmp().compareTo(fieldReference); @@ -91,7 +91,7 @@ public Cmp compareTo(String fieldReference) { * Creates new {@link AggregationExpression} that compares two values. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareTo(AggregationExpression expression) { return createCmp().compareTo(expression); @@ -101,7 +101,7 @@ public Cmp compareTo(AggregationExpression expression) { * Creates new {@link AggregationExpression} that compares two values. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareToValue(Object value) { return createCmp().compareToValue(value); @@ -116,7 +116,7 @@ private Cmp createCmp() { * value is equal to the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalTo(String fieldReference) { return createEq().equalTo(fieldReference); @@ -127,7 +127,7 @@ public Eq equalTo(String fieldReference) { * value is equal to the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalTo(AggregationExpression expression) { return createEq().equalTo(expression); @@ -138,7 +138,7 @@ public Eq equalTo(AggregationExpression expression) { * value is equal to the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalToValue(Object value) { return createEq().equalToValue(value); @@ -153,7 +153,7 @@ private Eq createEq() { * value is greater than the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThan(String fieldReference) { return createGt().greaterThan(fieldReference); @@ -164,7 +164,7 @@ public Gt greaterThan(String fieldReference) { * value is greater than the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThan(AggregationExpression expression) { return createGt().greaterThan(expression); @@ -175,7 +175,7 @@ public Gt greaterThan(AggregationExpression expression) { * value is greater than the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThanValue(Object value) { return createGt().greaterThanValue(value); @@ -190,7 +190,7 @@ private Gt createGt() { * value is greater than or equivalent to the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualTo(String fieldReference) { return createGte().greaterThanEqualTo(fieldReference); @@ -201,7 +201,7 @@ public Gte greaterThanEqualTo(String fieldReference) { * value is greater than or equivalent to the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualTo(AggregationExpression expression) { return createGte().greaterThanEqualTo(expression); @@ -212,7 +212,7 @@ public Gte greaterThanEqualTo(AggregationExpression expression) { * value is greater than or equivalent to the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualToValue(Object value) { return createGte().greaterThanEqualToValue(value); @@ -227,7 +227,7 @@ private Gte createGte() { * value is less than the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThan(String fieldReference) { return createLt().lessThan(fieldReference); @@ -238,7 +238,7 @@ public Lt lessThan(String fieldReference) { * value is less than the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThan(AggregationExpression expression) { return createLt().lessThan(expression); @@ -249,7 +249,7 @@ public Lt lessThan(AggregationExpression expression) { * value is less than to the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThanValue(Object value) { return createLt().lessThanValue(value); @@ -264,7 +264,7 @@ private Lt createLt() { * value is less than or equivalent to the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualTo(String fieldReference) { return createLte().lessThanEqualTo(fieldReference); @@ -275,7 +275,7 @@ public Lte lessThanEqualTo(String fieldReference) { * value is less than or equivalent to the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualTo(AggregationExpression expression) { return createLte().lessThanEqualTo(expression); @@ -285,8 +285,8 @@ public Lte lessThanEqualTo(AggregationExpression expression) { * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first * value is less than or equivalent to the given value. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return new instance of {@link Lte}. */ public Lte lessThanEqualToValue(Object value) { return createLte().lessThanEqualToValue(value); @@ -301,7 +301,7 @@ private Lte createLte() { * are not equivalent. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualTo(String fieldReference) { return createNe().notEqualTo(fieldReference); @@ -312,7 +312,7 @@ public Ne notEqualTo(String fieldReference) { * are not equivalent. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualTo(AggregationExpression expression) { return createNe().notEqualTo(expression); @@ -323,7 +323,7 @@ public Ne notEqualTo(AggregationExpression expression) { * are not equivalent. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualToValue(Object value) { return createNe().notEqualToValue(value); @@ -358,11 +358,11 @@ protected String getMongoMethod() { * Creates new {@link Cmp}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public static Cmp valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cmp(asFields(fieldReference)); } @@ -370,11 +370,11 @@ public static Cmp valueOf(String fieldReference) { * Creates new {@link Cmp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public static Cmp valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Cmp(Collections.singletonList(expression)); } @@ -382,11 +382,11 @@ public static Cmp valueOf(AggregationExpression expression) { * Creates new {@link Cmp} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cmp(append(Fields.field(fieldReference))); } @@ -394,11 +394,11 @@ public Cmp compareTo(String fieldReference) { * Creates new {@link Cmp} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Cmp(append(expression)); } @@ -406,12 +406,12 @@ public Cmp compareTo(AggregationExpression expression) { * Creates new {@link Cmp} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); - return new Cmp(append(value)); + Assert.notNull(value, "Value must not be null"); + return new Cmp(append(value, Expand.KEEP_SOURCE)); } } @@ -435,11 +435,11 @@ protected String getMongoMethod() { * Creates new {@link Eq}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public static Eq valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Eq(asFields(fieldReference)); } @@ -447,11 +447,11 @@ public static Eq valueOf(String fieldReference) { * Creates new {@link Eq}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public static Eq valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Eq(Collections.singletonList(expression)); } @@ -459,11 +459,11 @@ public static Eq valueOf(AggregationExpression expression) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Eq(append(Fields.field(fieldReference))); } @@ -471,11 +471,11 @@ public Eq equalTo(String fieldReference) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Eq(append(expression)); } @@ -483,12 +483,12 @@ public Eq equalTo(AggregationExpression expression) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); - return new Eq(append(value)); + Assert.notNull(value, "Value must not be null"); + return new Eq(append(value, Expand.KEEP_SOURCE)); } } @@ -512,11 +512,11 @@ protected String getMongoMethod() { * Creates new {@link Gt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public static Gt valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gt(asFields(fieldReference)); } @@ -524,11 +524,11 @@ public static Gt valueOf(String fieldReference) { * Creates new {@link Gt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public static Gt valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gt(Collections.singletonList(expression)); } @@ -536,11 +536,11 @@ public static Gt valueOf(AggregationExpression expression) { * Creates new {@link Gt} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThan(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gt(append(Fields.field(fieldReference))); } @@ -548,11 +548,11 @@ public Gt greaterThan(String fieldReference) { * Creates new {@link Gt} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThan(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gt(append(expression)); } @@ -560,11 +560,11 @@ public Gt greaterThan(AggregationExpression expression) { * Creates new {@link Gt} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThanValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Gt(append(value)); } } @@ -589,11 +589,11 @@ protected String getMongoMethod() { * Creates new {@link Lt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public static Lt valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lt(asFields(fieldReference)); } @@ -601,11 +601,11 @@ public static Lt valueOf(String fieldReference) { * Creates new {@link Lt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public static Lt valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lt(Collections.singletonList(expression)); } @@ -613,11 +613,11 @@ public static Lt valueOf(AggregationExpression expression) { * Creates new {@link Lt} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThan(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lt(append(Fields.field(fieldReference))); } @@ -625,11 +625,11 @@ public Lt lessThan(String fieldReference) { * Creates new {@link Lt} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThan(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lt(append(expression)); } @@ -637,11 +637,11 @@ public Lt lessThan(AggregationExpression expression) { * Creates new {@link Lt} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThanValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Lt(append(value)); } } @@ -666,11 +666,11 @@ protected String getMongoMethod() { * Creates new {@link Gte}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public static Gte valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gte(asFields(fieldReference)); } @@ -678,11 +678,11 @@ public static Gte valueOf(String fieldReference) { * Creates new {@link Gte}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public static Gte valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gte(Collections.singletonList(expression)); } @@ -690,11 +690,11 @@ public static Gte valueOf(AggregationExpression expression) { * Creates new {@link Gte} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gte(append(Fields.field(fieldReference))); } @@ -702,11 +702,11 @@ public Gte greaterThanEqualTo(String fieldReference) { * Creates new {@link Gte} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gte(append(expression)); } @@ -714,11 +714,11 @@ public Gte greaterThanEqualTo(AggregationExpression expression) { * Creates new {@link Gte} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Gte(append(value)); } } @@ -743,11 +743,11 @@ protected String getMongoMethod() { * Creates new {@link Lte}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public static Lte valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lte(asFields(fieldReference)); } @@ -755,11 +755,11 @@ public static Lte valueOf(String fieldReference) { * Creates new {@link Lte}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public static Lte valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lte(Collections.singletonList(expression)); } @@ -767,11 +767,11 @@ public static Lte valueOf(AggregationExpression expression) { * Creates new {@link Lte} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lte(append(Fields.field(fieldReference))); } @@ -779,11 +779,11 @@ public Lte lessThanEqualTo(String fieldReference) { * Creates new {@link Lte} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lte(append(expression)); } @@ -791,11 +791,11 @@ public Lte lessThanEqualTo(AggregationExpression expression) { * Creates new {@link Lte} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Lte(append(value)); } } @@ -820,11 +820,11 @@ protected String getMongoMethod() { * Creates new {@link Ne}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public static Ne valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ne(asFields(fieldReference)); } @@ -832,11 +832,11 @@ public static Ne valueOf(String fieldReference) { * Creates new {@link Ne}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public static Ne valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ne(Collections.singletonList(expression)); } @@ -844,11 +844,11 @@ public static Ne valueOf(AggregationExpression expression) { * Creates new {@link Ne} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ne(append(Fields.field(fieldReference))); } @@ -856,11 +856,11 @@ public Ne notEqualTo(String fieldReference) { * Creates new {@link Ne} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ne(append(expression)); } @@ -868,12 +868,12 @@ public Ne notEqualTo(AggregationExpression expression) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); - return new Ne(append(value)); + Assert.notNull(value, "Value must not be null"); + return new Ne(append(value, Expand.KEEP_SOURCE)); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 18cd232473..323a11895b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; @@ -43,7 +44,7 @@ public class ConditionalOperators { * Take the field referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ConditionalOperatorFactory}. */ public static ConditionalOperatorFactory when(String fieldReference) { return new ConditionalOperatorFactory(fieldReference); @@ -53,7 +54,7 @@ public static ConditionalOperatorFactory when(String fieldReference) { * Take the value resulting from the given {@literal expression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ConditionalOperatorFactory}. */ public static ConditionalOperatorFactory when(AggregationExpression expression) { return new ConditionalOperatorFactory(expression); @@ -63,7 +64,7 @@ public static ConditionalOperatorFactory when(AggregationExpression expression) * Take the value resulting from the given {@literal criteriaDefinition}. * * @param criteriaDefinition must not be {@literal null}. - * @return + * @return new instance of {@link ConditionalOperatorFactory}. */ public static ConditionalOperatorFactory when(CriteriaDefinition criteriaDefinition) { return new ConditionalOperatorFactory(criteriaDefinition); @@ -75,11 +76,11 @@ public static ConditionalOperatorFactory when(CriteriaDefinition criteriaDefinit * instances of undefined values or missing fields, returns the value of the replacement expression. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IfNull.ThenBuilder} to create {@link IfNull}. */ public static IfNull.ThenBuilder ifNull(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return IfNull.ifNull(fieldReference); } @@ -89,11 +90,11 @@ public static IfNull.ThenBuilder ifNull(String fieldReference) { * instances of undefined values or missing fields, returns the value of the replacement expression. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IfNull.ThenBuilder} to create {@link IfNull}. */ public static IfNull.ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return IfNull.ifNull(expression); } @@ -103,7 +104,7 @@ public static IfNull.ThenBuilder ifNull(AggregationExpression expression) { * out of the control flow. * * @param conditions must not be {@literal null}. - * @return + * @return new instance of {@link Switch}. */ public static Switch switchCases(CaseOperator... conditions) { return Switch.switchCases(conditions); @@ -115,7 +116,7 @@ public static Switch switchCases(CaseOperator... conditions) { * out of the control flow. * * @param conditions must not be {@literal null}. - * @return + * @return new instance of {@link Switch}. */ public static Switch switchCases(List conditions) { return Switch.switchCases(conditions); @@ -136,7 +137,7 @@ public static class ConditionalOperatorFactory { */ public ConditionalOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; @@ -150,7 +151,7 @@ public ConditionalOperatorFactory(String fieldReference) { */ public ConditionalOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; @@ -164,7 +165,7 @@ public ConditionalOperatorFactory(AggregationExpression expression) { */ public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { - Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null!"); + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); this.fieldReference = null; this.expression = null; @@ -176,11 +177,11 @@ public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { * return expressions. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. */ public OtherwiseBuilder then(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createThenBuilder().then(value); } @@ -189,11 +190,11 @@ public OtherwiseBuilder then(Object value) { * return expressions. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. */ public OtherwiseBuilder thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createThenBuilder().then(expression); } @@ -202,12 +203,12 @@ public OtherwiseBuilder thenValueOf(AggregationExpression expression) { * return expressions. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. */ public OtherwiseBuilder thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return createThenBuilder().then(fieldReference); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createThenBuilder().thenValueOf(fieldReference); } private ThenBuilder createThenBuilder() { @@ -233,9 +234,9 @@ private boolean usesCriteriaDefinition() { * field references}, {@link AggregationExpression expressions}, values of simple MongoDB types or values that can be * converted to a simple MongoDB type. * + * @author Mark Paluch * @see https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ - * @author Mark Paluch */ public static class IfNull implements AggregationExpression { @@ -251,13 +252,13 @@ private IfNull(Object condition, Object value) { /** * Creates new {@link IfNull}. * - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null} - * . - * @return + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return never {@literal null}. */ public static ThenBuilder ifNull(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IfNullOperatorBuilder().ifNull(fieldReference); } @@ -266,42 +267,48 @@ public static ThenBuilder ifNull(String fieldReference) { * * @param expression the expression to check for a {@literal null} value, field reference must not be * {@literal null}. - * @return + * @return never {@literal null}. */ public static ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IfNullOperatorBuilder().ifNull(expression); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - List list = new ArrayList(); + List list = new ArrayList<>(); - if (condition instanceof Field) { - list.add(context.getReference((Field) condition).toString()); - } else if (condition instanceof AggregationExpression) { - list.add(((AggregationExpression) condition).toDocument(context)); + if (condition instanceof Collection collection) { + for (Object val : collection) { + list.add(mapCondition(val, context)); + } } else { - list.add(condition); + list.add(mapCondition(condition, context)); } list.add(resolve(value, context)); - return new Document("$ifNull", list); } + private Object mapCondition(Object condition, AggregationOperationContext context) { + + if (condition instanceof Field field) { + return context.getReference(field).toString(); + } else if (condition instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else { + return condition; + } + } + private Object resolve(Object value, AggregationOperationContext context) { - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } else if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof Field field) { + return context.getReference(field).toString(); + } else if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } else if (value instanceof Document) { return value; } @@ -324,33 +331,53 @@ public interface IfNullBuilder { /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} * or empty. - * @return the {@link ThenBuilder} + * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); } + /** + * @author Christoph Strobl + * @since 3.3 + */ + public interface OrBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder orIfNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, + * @return the {@link ThenBuilder}. + */ + ThenBuilder orIfNull(AggregationExpression expression); + } + /** * @author Mark Paluch */ - public interface ThenBuilder { + public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB * representation but must not be {@literal null}. - * @return + * @return new instance of {@link IfNull}. */ IfNull then(Object value); /** * @param fieldReference the field holding the replacement value, must not be {@literal null}. - * @return + * @return new instance of {@link IfNull}. */ IfNull thenValueOf(String fieldReference); /** * @param expression the expression yielding to the replacement value, must not be {@literal null}. - * @return + * @return new instance of {@link IfNull}. */ IfNull thenValueOf(AggregationExpression expression); } @@ -362,9 +389,11 @@ public interface ThenBuilder { */ static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder { - private @Nullable Object condition; + private @Nullable List conditions; - private IfNullOperatorBuilder() {} + private IfNullOperatorBuilder() { + conditions = new ArrayList<>(); + } /** * Creates a new builder for {@link IfNull}. @@ -375,50 +404,45 @@ public static IfNullOperatorBuilder newBuilder() { return new IfNullOperatorBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.IfNullBuilder#ifNull(java.lang.String) - */ public ThenBuilder ifNull(String fieldReference) { - Assert.hasText(fieldReference, "FieldReference name must not be null or empty!"); - this.condition = Fields.field(fieldReference); + Assert.hasText(fieldReference, "FieldReference name must not be null or empty"); + this.conditions.add(Fields.field(fieldReference)); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.IfNullBuilder#ifNull(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression name must not be null or empty!"); - this.condition = expression; + Assert.notNull(expression, "AggregationExpression name must not be null or empty"); + this.conditions.add(expression); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object) - */ + @Override + public ThenBuilder orIfNull(String fieldReference) { + return ifNull(fieldReference); + } + + @Override + public ThenBuilder orIfNull(AggregationExpression expression) { + return ifNull(expression); + } + public IfNull then(Object value) { - return new IfNull(condition, value); + return new IfNull(conditions, value); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#thenValueOf(java.lang.String) - */ public IfNull thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IfNull(condition, Fields.field(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IfNull(conditions, Fields.field(fieldReference)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#thenValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ public IfNull thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new IfNull(condition, expression); + Assert.notNull(expression, "Expression must not be null"); + return new IfNull(conditions, expression); } } } @@ -446,7 +470,7 @@ protected String getMongoMethod() { */ public static Switch switchCases(CaseOperator... conditions) { - Assert.notNull(conditions, "Conditions must not be null!"); + Assert.notNull(conditions, "Conditions must not be null"); return switchCases(Arrays.asList(conditions)); } @@ -457,10 +481,16 @@ public static Switch switchCases(CaseOperator... conditions) { */ public static Switch switchCases(List conditions) { - Assert.notNull(conditions, "Conditions must not be null!"); - return new Switch(Collections. singletonMap("branches", new ArrayList(conditions))); + Assert.notNull(conditions, "Conditions must not be null"); + return new Switch(Collections.singletonMap("branches", new ArrayList<>(conditions))); } + /** + * Set the default value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Switch}. + */ public Switch defaultTo(Object value) { return new Switch(append("default", value)); } @@ -481,31 +511,28 @@ private CaseOperator(AggregationExpression when, Object then) { public static ThenBuilder when(final AggregationExpression condition) { - Assert.notNull(condition, "Condition must not be null!"); + Assert.notNull(condition, "Condition must not be null"); return new ThenBuilder() { @Override public CaseOperator then(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new CaseOperator(condition, value); } }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { Document dbo = new Document("case", when.toDocument(context)); - if (then instanceof AggregationExpression) { - dbo.put("then", ((AggregationExpression) then).toDocument(context)); - } else if (then instanceof Field) { - dbo.put("then", context.getReference((Field) then).toString()); + if (then instanceof AggregationExpression aggregationExpression) { + dbo.put("then", aggregationExpression.toDocument(context)); + } else if (then instanceof Field field) { + dbo.put("then", context.getReference(field).toString()); } else { dbo.put("then", then); } @@ -522,7 +549,7 @@ public interface ThenBuilder { * Set the then {@literal value}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link CaseOperator}. */ CaseOperator then(Object value); } @@ -536,10 +563,10 @@ public interface ThenBuilder { * {@link AggregationExpression expressions}, values of simple MongoDB types or values that can be converted to a * simple MongoDB type. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ * @author Mark Paluch * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ */ public static class Cond implements AggregationExpression { @@ -571,9 +598,9 @@ private Cond(CriteriaDefinition condition, Object thenValue, Object otherwiseVal private Cond(Object condition, Object thenValue, Object otherwiseValue) { - Assert.notNull(condition, "Condition must not be null!"); - Assert.notNull(thenValue, "Then value must not be null!"); - Assert.notNull(otherwiseValue, "Otherwise value must not be null!"); + Assert.notNull(condition, "Condition must not be null"); + Assert.notNull(thenValue, "Then value must not be null"); + Assert.notNull(otherwiseValue, "Otherwise value must not be null"); assertNotBuilder(condition, "Condition"); assertNotBuilder(thenValue, "Then value"); @@ -584,10 +611,6 @@ private Cond(Object condition, Object thenValue, Object otherwiseValue) { this.otherwiseValue = otherwiseValue; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -606,8 +629,8 @@ private Object resolveValue(AggregationOperationContext context, Object value) { return resolve(context, value); } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } return context.getMappedObject(new Document("$set", value)).get("$set"); @@ -619,27 +642,24 @@ private Object resolveCriteria(AggregationOperationContext context, Object value return resolve(context, value); } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } - if (value instanceof CriteriaDefinition) { - - Document mappedObject = context.getMappedObject(((CriteriaDefinition) value).getCriteriaObject()); - List clauses = new ArrayList(); - - clauses.addAll(getClauses(context, mappedObject)); + if (value instanceof CriteriaDefinition criteriaDefinition) { + Document mappedObject = context.getMappedObject(criteriaDefinition.getCriteriaObject()); + List clauses = getClauses(context, mappedObject); return clauses.size() == 1 ? clauses.get(0) : clauses; } throw new InvalidDataAccessApiUsageException( - String.format("Invalid value in condition. Supported: Document, Field references, Criteria, got: %s", value)); + String.format("Invalid value in condition; Supported: Document, Field references, Criteria, got: %s", value)); } private List getClauses(AggregationOperationContext context, Document mappedObject) { - List clauses = new ArrayList(); + List clauses = new ArrayList<>(); for (String key : mappedObject.keySet()) { @@ -652,22 +672,20 @@ private List getClauses(AggregationOperationContext context, Document ma private List getClauses(AggregationOperationContext context, String key, Object predicate) { - List clauses = new ArrayList(); + List clauses = new ArrayList<>(); + + if (predicate instanceof List predicates) { - if (predicate instanceof List) { + List args = new ArrayList<>(predicates.size()); - List args = new ArrayList(); - for (Object clause : (List) predicate) { - if (clause instanceof Document) { - args.addAll(getClauses(context, (Document) clause)); + for (Object clause : predicates) { + if (clause instanceof Document document) { + args.addAll(getClauses(context, document)); } } clauses.add(new Document(key, args)); - - } else if (predicate instanceof Document) { - - Document nested = (Document) predicate; + } else if (predicate instanceof Document nested) { for (String s : nested.keySet()) { @@ -675,15 +693,14 @@ private List getClauses(AggregationOperationContext context, String key, continue; } - List args = new ArrayList(); + List args = new ArrayList<>(2); args.add("$" + key); args.add(nested.get(s)); clauses.add(new Document(s, args)); } - } else if (!isKeyword(key)) { - List args = new ArrayList(); + List args = new ArrayList<>(2); args.add("$" + key); args.add(predicate); clauses.add(new Document("$eq", args)); @@ -704,8 +721,8 @@ private boolean isKeyword(String candidate) { private Object resolve(AggregationOperationContext context, Object value) { - if (value instanceof Document) { - return context.getMappedObject((Document) value); + if (value instanceof Document document) { + return context.getMappedObject(document); } return context.getReference((Field) value).toString(); @@ -868,111 +885,81 @@ public static ConditionalExpressionBuilder newBuilder() { return new ConditionalExpressionBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.bson.Document) - */ @Override public ConditionalExpressionBuilder when(Document booleanExpression) { - Assert.notNull(booleanExpression, "'Boolean expression' must not be null!"); + Assert.notNull(booleanExpression, "'Boolean expression' must not be null"); this.condition = booleanExpression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.springframework.data.mongodb.core.query.CriteriaDefinition) - */ @Override public ThenBuilder when(CriteriaDefinition criteria) { - Assert.notNull(criteria, "Criteria must not be null!"); + Assert.notNull(criteria, "Criteria must not be null"); this.condition = criteria; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public ThenBuilder when(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression field must not be null!"); + Assert.notNull(expression, "AggregationExpression field must not be null"); this.condition = expression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(java.lang.String) - */ @Override public ThenBuilder when(String booleanField) { - Assert.hasText(booleanField, "Boolean field name must not be null or empty!"); + Assert.hasText(booleanField, "Boolean field name must not be null or empty"); this.condition = Fields.field(booleanField); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#then(java.lang.Object) - */ @Override public OtherwiseBuilder then(Object thenValue) { - Assert.notNull(thenValue, "Then-value must not be null!"); + Assert.notNull(thenValue, "Then-value must not be null"); this.thenValue = thenValue; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#thenValueOf(java.lang.String) - */ @Override public OtherwiseBuilder thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.thenValue = Fields.field(fieldReference); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#thenValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public OtherwiseBuilder thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); this.thenValue = expression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwise(java.lang.Object) - */ @Override public Cond otherwise(Object otherwiseValue) { - Assert.notNull(otherwiseValue, "Value must not be null!"); + Assert.notNull(otherwiseValue, "Value must not be null"); return new Cond(condition, thenValue, otherwiseValue); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwiseValueOf(java.lang.String) - */ @Override public Cond otherwiseValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cond(condition, thenValue, Fields.field(fieldReference)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwiseValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public Cond otherwiseValueOf(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); return new Cond(condition, thenValue, expression); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java new file mode 100644 index 0000000000..aa085b2a29 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -0,0 +1,754 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; + +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal convert} aggregation operations. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ConvertOperators { + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static ConvertOperatorFactory valueOf(String fieldReference) { + return new ConvertOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return + */ + public static ConvertOperatorFactory valueOf(AggregationExpression expression) { + return new ConvertOperatorFactory(expression); + } + + /** + * @author Christoph Strobl + */ + public static class ConvertOperatorFactory { + + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; + + /** + * Creates new {@link ConvertOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ConvertOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link ConvertOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ConvertOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@code stringTypeIdentifier}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param stringTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(String stringTypeIdentifier) { + return createConvert().to(stringTypeIdentifier); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@code numericTypeIdentifier}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param numericTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(int numericTypeIdentifier) { + return createConvert().to(numericTypeIdentifier); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@link Type}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param type must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(Type type) { + return createConvert().to(type); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the value of the given {@link Field field reference}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertToTypeOf(String fieldReference) { + return createConvert().toTypeOf(fieldReference); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@link AggregationExpression expression}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertToTypeOf(AggregationExpression expression) { + return createConvert().toTypeOf(expression); + } + + /** + * Creates new {@link ToBool aggregation expression} for {@code $toBool} that converts a value to boolean. Shorthand + * for {@link #convertTo(String) #convertTo("bool")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToBool}. + */ + public ToBool convertToBoolean() { + return ToBool.toBoolean(valueObject()); + } + + /** + * Creates new {@link ToDate aggregation expression} for {@code $toDate} that converts a value to a date. Shorthand + * for {@link #convertTo(String) #convertTo("date")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDate}. + */ + public ToDate convertToDate() { + return ToDate.toDate(valueObject()); + } + + /** + * Creates new {@link ToDecimal aggregation expression} for {@code $toDecimal} that converts a value to a decimal. + * Shorthand for {@link #convertTo(String) #convertTo("decimal")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDecimal}. + */ + public ToDecimal convertToDecimal() { + return ToDecimal.toDecimal(valueObject()); + } + + /** + * Creates new {@link ToDouble aggregation expression} for {@code $toDouble} that converts a value to a decimal. + * Shorthand for {@link #convertTo(String) #convertTo("double")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDouble}. + */ + public ToDouble convertToDouble() { + return ToDouble.toDouble(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toInt} that converts a value to an int. Shorthand + * for {@link #convertTo(String) #convertTo("int")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToInt convertToInt() { + return ToInt.toInt(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toLong} that converts a value to a long. Shorthand + * for {@link #convertTo(String) #convertTo("long")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToLong convertToLong() { + return ToLong.toLong(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toObjectId} that converts a value to a objectId. Shorthand + * for {@link #convertTo(String) #convertTo("objectId")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToObjectId convertToObjectId() { + return ToObjectId.toObjectId(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toString} that converts a value to a string. Shorthand + * for {@link #convertTo(String) #convertTo("string")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToString convertToString() { + return ToString.toString(valueObject()); + } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to + * radians. + * + * @return new instance of {@link DegreesToRadians}. + * @since 3.3 + */ + public DegreesToRadians convertDegreesToRadians() { + return DegreesToRadians.degreesToRadians(valueObject()); + } + + private Convert createConvert() { + return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression); + } + + private Object valueObject() { + return usesFieldRef() ? Fields.field(fieldReference) : expression; + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $convert} that converts a value to a specified type.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/convert/ + * @since 2.1 + */ + public static class Convert extends AbstractAggregationExpression { + + private Convert(Object value) { + super(value); + } + + /** + * Creates new {@link Convert} using the given value for the {@literal input} attribute. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValue(Object value) { + return new Convert(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link Convert} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValueOf(String fieldReference) { + return convertValue(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Convert} using the result of the provided {@link AggregationExpression expression} as + * {@literal input} value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValueOf(AggregationExpression expression) { + return convertValue(expression); + } + + /** + * Specify the conversion target type via its {@link String} representation. + *
            + *
          • double
          • + *
          • string
          • + *
          • objectId
          • + *
          • bool
          • + *
          • date
          • + *
          • int
          • + *
          • long
          • + *
          • decimal
          • + *
          + * + * @param stringTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(String stringTypeIdentifier) { + return new Convert(append("to", stringTypeIdentifier)); + } + + /** + * Specify the conversion target type via its numeric representation. + *
          + *
          1
          + *
          double
          + *
          2
          + *
          string
          + *
          7
          + *
          objectId
          + *
          8
          + *
          bool
          + *
          9
          + *
          date
          + *
          16
          + *
          int
          + *
          18
          + *
          long
          + *
          19
          + *
          decimal
          + *
          + * + * @param numericTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(int numericTypeIdentifier) { + return new Convert(append("to", numericTypeIdentifier)); + } + + /** + * Specify the conversion target type. + * + * @param type must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(Type type) { + + String typeString = Type.BOOLEAN.equals(type) ? "bool" : type.value().toString(); + return to(typeString); + } + + /** + * Specify the conversion target type via the value of the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert toTypeOf(String fieldReference) { + return new Convert(append("to", Fields.field(fieldReference))); + } + + /** + * Specify the conversion target type via the value of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert toTypeOf(AggregationExpression expression) { + return new Convert(append("to", expression)); + } + + /** + * Optionally specify the value to return on encountering an error during conversion. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturn(Object value) { + return new Convert(append("onError", value)); + } + + /** + * Optionally specify the field holding the value to return on encountering an error during conversion. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturnValueOf(String fieldReference) { + return onErrorReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return on encountering an error during conversion. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturnValueOf(AggregationExpression expression) { + return onErrorReturn(expression); + } + + /** + * Optionally specify the value to return when the input is {@literal null} or missing. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturn(Object value) { + return new Convert(append("onNull", value)); + } + + /** + * Optionally specify the field holding the value to return when the input is {@literal null} or missing. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturnValueOf(String fieldReference) { + return onNullReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return when the input is {@literal null} or missing. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturnValueOf(AggregationExpression expression) { + return onNullReturn(expression); + } + + @Override + protected String getMongoMethod() { + return "$convert"; + } + } + + /** + * {@link AggregationExpression} for {@code $toBool} that converts a value to {@literal boolean}. Shorthand for + * {@link Convert#to(String) Convert#to("bool")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toBool/ + * @since 2.1 + */ + public static class ToBool extends AbstractAggregationExpression { + + private ToBool(Object value) { + super(value); + } + + /** + * Creates new {@link ToBool} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToBool}. + */ + public static ToBool toBoolean(Object value) { + return new ToBool(value); + } + + @Override + protected String getMongoMethod() { + return "$toBool"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDate} that converts a value to {@literal date}. Shorthand for + * {@link Convert#to(String) Convert#to("date")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDate/ + * @since 2.1 + */ + public static class ToDate extends AbstractAggregationExpression { + + private ToDate(Object value) { + super(value); + } + + /** + * Creates new {@link ToDate} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDate}. + */ + public static ToDate toDate(Object value) { + return new ToDate(value); + } + + @Override + protected String getMongoMethod() { + return "$toDate"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDecimal} that converts a value to {@literal decimal}. Shorthand for + * {@link Convert#to(String) Convert#to("decimal")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDecimal/ + * @since 2.1 + */ + public static class ToDecimal extends AbstractAggregationExpression { + + private ToDecimal(Object value) { + super(value); + } + + /** + * Creates new {@link ToDecimal} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDecimal}. + */ + public static ToDecimal toDecimal(Object value) { + return new ToDecimal(value); + } + + @Override + protected String getMongoMethod() { + return "$toDecimal"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDouble} that converts a value to {@literal double}. Shorthand for + * {@link Convert#to(String) Convert#to("double")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDouble/ + * @since 2.1 + */ + public static class ToDouble extends AbstractAggregationExpression { + + private ToDouble(Object value) { + super(value); + } + + /** + * Creates new {@link ToDouble} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDouble}. + */ + public static ToDouble toDouble(Object value) { + return new ToDouble(value); + } + + @Override + protected String getMongoMethod() { + return "$toDouble"; + } + } + + /** + * {@link AggregationExpression} for {@code $toInt} that converts a value to {@literal integer}. Shorthand for + * {@link Convert#to(String) Convert#to("int")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toInt/ + * @since 2.1 + */ + public static class ToInt extends AbstractAggregationExpression { + + private ToInt(Object value) { + super(value); + } + + /** + * Creates new {@link ToInt} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToInt}. + */ + public static ToInt toInt(Object value) { + return new ToInt(value); + } + + @Override + protected String getMongoMethod() { + return "$toInt"; + } + } + + /** + * {@link AggregationExpression} for {@code $toLong} that converts a value to {@literal long}. Shorthand for + * {@link Convert#to(String) Convert#to("long")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toLong/ + * @since 2.1 + */ + public static class ToLong extends AbstractAggregationExpression { + + private ToLong(Object value) { + super(value); + } + + /** + * Creates new {@link ToLong} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToLong}. + */ + public static ToLong toLong(Object value) { + return new ToLong(value); + } + + @Override + protected String getMongoMethod() { + return "$toLong"; + } + } + + /** + * {@link AggregationExpression} for {@code $toObjectId} that converts a value to {@literal objectId}. Shorthand for + * {@link Convert#to(String) Convert#to("objectId")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toObjectId/ + * @since 2.1 + */ + public static class ToObjectId extends AbstractAggregationExpression { + + private ToObjectId(Object value) { + super(value); + } + + /** + * Creates new {@link ToObjectId} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToObjectId}. + */ + public static ToObjectId toObjectId(Object value) { + return new ToObjectId(value); + } + + @Override + protected String getMongoMethod() { + return "$toObjectId"; + } + } + + /** + * {@link AggregationExpression} for {@code $toString} that converts a value to {@literal string}. Shorthand for + * {@link Convert#to(String) Convert#to("string")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toString/ + * @since 2.1 + */ + public static class ToString extends AbstractAggregationExpression { + + private ToString(Object value) { + super(value); + } + + /** + * Creates new {@link ToString} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToString}. + */ + public static ToString toString(Object value) { + return new ToString(value); + } + + @Override + protected String getMongoMethod() { + return "$toString"; + } + } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DegreesToRadians extends AbstractAggregationExpression { + + private DegreesToRadians(Object value) { + super(value); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(String fieldName) { + return degreesToRadians(Fields.field(fieldName)); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) { + return degreesToRadians(expression); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadians(Object value) { + return new DegreesToRadians(value); + } + + @Override + protected String getMongoMethod() { + return "$degreesToRadians"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java index 8a1c953440..6a6108f832 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,8 +24,7 @@ * We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class * directly. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ * @author Mark Paluch * @since 1.10 */ @@ -34,27 +33,26 @@ public class CountOperation implements FieldsExposingAggregationOperation { private final String fieldName; /** - * Creates a new {@link CountOperation} given the {@link fieldName} field name. + * Creates a new {@link CountOperation} given the {@literal fieldName} field name. * - * @param asFieldName must not be {@literal null} or empty. + * @param fieldName must not be {@literal null} or empty. */ public CountOperation(String fieldName) { - Assert.hasText(fieldName, "Field name must not be null or empty!"); + Assert.hasText(fieldName, "Field name must not be null or empty"); this.fieldName = fieldName; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$count", fieldName); + return new Document(getOperator(), fieldName); + } + + @Override + public String getOperator() { + return "$count"; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return ExposedFields.from(new ExposedField(fieldName, true)); @@ -71,7 +69,7 @@ public static class CountOperationBuilder { * Returns the finally to be applied {@link CountOperation} with the given alias. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return new instance of {@link CountOperation}. */ public CountOperation as(String fieldName) { return new CountOperation(fieldName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java index 9a83753a17..26a85bf2c3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -30,7 +30,7 @@ public class DataTypeOperators { * Return the BSON data type of the given {@literal field}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Type}. */ public static Type typeOf(String fieldReference) { return Type.typeOf(fieldReference); @@ -56,11 +56,11 @@ protected String getMongoMethod() { * Creates new {@link Type}. * * @param field must not be {@literal null}. - * @return + * @return new instance of {@link Type}. */ public static Type typeOf(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new Type(Fields.field(field)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index 8e12773785..ff6ed7e983 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,27 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; -import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.ArithmeticOperatorFactory; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Date} aggregation operations. * * @author Christoph Strobl + * @author Matt Morrissette * @since 1.10 */ public class DateOperators { @@ -32,806 +44,3371 @@ public class DateOperators { * Take the date referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateOperatorFactory}. */ public static DateOperatorFactory dateOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new DateOperatorFactory(fieldReference); } + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new DateOperatorFactory(fieldReference).withTimezone(timezone); + } + /** * Take the date resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link DateOperatorFactory}. */ public static DateOperatorFactory dateOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new DateOperatorFactory(expression); } + /** + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) { + + Assert.notNull(expression, "Expression must not be null"); + return new DateOperatorFactory(expression).withTimezone(timezone); + } + + /** + * Take the given value as date.
          + * This can be one of: + *
            + *
          • {@link java.util.Date}
          • + *
          • {@link java.util.Calendar}
          • + *
          • {@link java.time.Instant}
          • + *
          • {@link java.time.ZonedDateTime}
          • + *
          • {@link java.lang.Long}
          • + *
          • {@link Field}
          • + *
          • {@link AggregationExpression}
          • + *
          + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 2.1 + */ + public static DateOperatorFactory dateValue(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new DateOperatorFactory(value); + } + + /** + * Construct a Date object by providing the date’s constituent properties.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateFromPartsOperatorFactory}. + * @since 2.1 + */ + public static DateFromPartsOperatorFactory dateFromParts() { + return new DateFromPartsOperatorFactory(Timezone.none()); + } + + /** + * Construct a Date object from the given date {@link String}.
          + * To use a {@link Field field reference} or {@link AggregationExpression} as source of the date string consider + * {@link DateOperatorFactory#fromString()} or {@link DateFromString#fromStringOf(AggregationExpression)}.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateFromPartsOperatorFactory}. + * @since 2.1 + */ + public static DateFromString dateFromString(String value) { + return DateFromString.fromString(value); + } + + /** + * Timezone represents a MongoDB timezone abstraction which can be represented with a timezone ID or offset as a + * {@link String}. Also accepts a {@link AggregationExpression} or {@link Field} that resolves to a {@link String} of + * either Olson Timezone Identifier or a UTC Offset.
          + * + * + * + * + * + * + * + * + * + * + * + * + * + *
          FormatExample
          Olson Timezone Identifier"America/New_York"
          + * "Europe/London"
          + * "GMT"
          UTC Offset+/-[hh]:[mm], e.g. "+04:45"
          + * -[hh][mm], e.g. "-0530"
          + * +/-[hh], e.g. "+03"
          + * NOTE: Support for timezones in aggregations Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ + public static class Timezone { + + private static final Timezone NONE = new Timezone(null); + + private final @Nullable Object value; + + private Timezone(@Nullable Object value) { + this.value = value; + } + + /** + * Return an empty {@link Timezone}. + * + * @return never {@literal null}. + */ + public static Timezone none() { + return NONE; + } + + /** + * Create a {@link Timezone} for the given value which must be a valid expression that resolves to a {@link String} + * representing an Olson Timezone Identifier or UTC Offset. + * + * @param value the plain timezone {@link String}, a {@link Field} holding the timezone or an + * {@link AggregationExpression} resulting in the timezone. + * @return new instance of {@link Timezone}. + */ + public static Timezone valueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new Timezone(value); + } + + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link TimeZone} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null"); + + return fromOffset( + ZoneOffset.ofTotalSeconds(Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(timeZone.getRawOffset())))); + } + + /** + * Create a {@link Timezone} for the given {@link ZoneOffset} rendering the offset as UTC offset. + * + * @param offset {@link ZoneOffset} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(ZoneOffset offset) { + + Assert.notNull(offset, "ZoneOffset must not be null"); + return new Timezone(offset.toString()); + } + + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link Timezone} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null"); + + return valueOf(timeZone.getID()); + } + + /** + * Create a {@link Timezone} for the given {@link java.time.ZoneId} rendering the offset as UTC offset. + * + * @param zoneId {@link ZoneId} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(ZoneId zoneId) { + + Assert.notNull(zoneId, "ZoneId must not be null"); + return new Timezone(zoneId.toString()); + } + + /** + * Create a {@link Timezone} for the {@link Field} reference holding the Olson Timezone Identifier or UTC Offset. + * + * @param fieldReference the {@link Field} holding the timezone. + * @return new instance of {@link Timezone}. + */ + public static Timezone ofField(String fieldReference) { + return valueOf(Fields.field(fieldReference)); + } + + /** + * Create a {@link Timezone} for the {@link AggregationExpression} resulting in the Olson Timezone Identifier or UTC + * Offset. + * + * @param expression the {@link AggregationExpression} resulting in the timezone. + * @return new instance of {@link Timezone}. + */ + public static Timezone ofExpression(AggregationExpression expression) { + return valueOf(expression); + } + + @Nullable + Object getValue() { + return value; + } + } + /** * @author Christoph Strobl + * @author Matt Morrissette */ public static class DateOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable Object dateValue; + private final @Nullable AggregationExpression expression; + private final Timezone timezone; + + /** + * @param fieldReference + * @param expression + * @param value + * @param timezone + * @since 2.1 + */ + private DateOperatorFactory(@Nullable String fieldReference, @Nullable AggregationExpression expression, + @Nullable Object value, Timezone timezone) { + + this.fieldReference = fieldReference; + this.expression = expression; + this.dateValue = value; + this.timezone = timezone; + } + + /** + * Creates new {@link DateOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public DateOperatorFactory(String fieldReference) { + + this(fieldReference, null, null, Timezone.none()); + + Assert.notNull(fieldReference, "FieldReference must not be null"); + } + + /** + * Creates new {@link DateOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public DateOperatorFactory(AggregationExpression expression) { + + this(null, expression, null, Timezone.none()); + + Assert.notNull(expression, "Expression must not be null"); + } + + /** + * Creates new {@link DateOperatorFactory} for given {@code value} that resolves to a Date.
          + *
            + *
          • {@link java.util.Date}
          • + *
          • {@link java.util.Calendar}
          • + *
          • {@link java.time.Instant}
          • + *
          • {@link java.time.ZonedDateTime}
          • + *
          • {@link java.lang.Long}
          • + *
          + * + * @param value must not be {@literal null}. + * @since 2.1 + */ + public DateOperatorFactory(Object value) { + + this(null, null, value, Timezone.none()); + + Assert.notNull(value, "Value must not be null"); + } + + /** + * Create a new {@link DateOperatorFactory} bound to a given {@link Timezone}.
          + * NOTE: Requires Mongo 3.6 or later. + * + * @param timezone must not be {@literal null}. Use {@link Timezone#none()} instead. + * @return new instance of {@link DateOperatorFactory}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + public DateOperatorFactory withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DateOperatorFactory(fieldReference, expression, dateValue, timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone(DateAdd.addValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, String unit) { + return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateAdd.addValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, String unit) { + return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateAdd.addValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateSubtract.subtractValueOf(expression, unit).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone( + DateSubtract.subtractValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).fromDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(String fieldReference, String unit) { + return applyTimezone(DateSubtract.subtractValueOf(fieldReference, unit).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtractValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateSubtract.subtractValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).fromDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtract(Object value, String unit) { + return applyTimezone(DateSubtract.subtractValue(value, unit).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that subtracts the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + * @since 4.0 + */ + public DateSubtract subtract(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateSubtract.subtractValue(value, unit.name().toLowerCase(Locale.ROOT)).fromDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that truncates a date to the given {@literal unit}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + * @since 4.0 + */ + public DateTrunc truncate(String unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone(DateTrunc.truncateValue(dateReference()).to(unit), timezone); + } + + /** + * Creates new {@link AggregationExpression} that truncates a date to the given {@literal unit}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + * @since 4.0 + */ + public DateTrunc truncate(TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return truncate(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and + * 366. + * + * @return new instance of {@link DayOfYear}. + */ + public DayOfYear dayOfYear() { + return applyTimezone(DayOfYear.dayOfYear(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the day of the month for a date as a number between 1 and + * 31. + * + * @return new instance of {@link DayOfMonth}. + */ + public DayOfMonth dayOfMonth() { + return applyTimezone(DayOfMonth.dayOfMonth(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the day of the week for a date as a number between 1 + * (Sunday) and 7 (Saturday). + * + * @return new instance of {@link DayOfWeek}. + */ + public DayOfWeek dayOfWeek() { + return applyTimezone(DayOfWeek.dayOfWeek(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, String unit) { + return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diff(Object value, String unit) { + return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diff(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateDiff.diffValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the year portion of a date. + * + * @return new instance of {@link Year}. + */ + public Year year() { + return applyTimezone(Year.year(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the month of a date as a number between 1 and 12. + * + * @return new instance of {@link Month}. + */ + public Month month() { + return applyTimezone(Month.month(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the week of the year for a date as a number between 0 and + * 53. + * + * @return new instance of {@link Week}. + */ + public Week week() { + return applyTimezone(Week.week(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the hour portion of a date as a number between 0 and 23. + * + * @return new instance of {@link Hour}. + */ + public Hour hour() { + return applyTimezone(Hour.hour(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the minute portion of a date as a number between 0 and 59. + * + * @return new instance of {@link Minute}. + */ + public Minute minute() { + return applyTimezone(Minute.minute(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the second portion of a date as a number between 0 and 59, + * but can be 60 to account for leap seconds. + * + * @return new instance of {@link Second}. + */ + public Second second() { + return applyTimezone(Second.second(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the millisecond portion of a date as an integer between 0 + * and 999. + * + * @return new instance of {@link Millisecond}. + */ + public Millisecond millisecond() { + return applyTimezone(Millisecond.millisecond(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that converts a date object to a string according to a user-specified + * {@literal format}. + * + * @param format must not be {@literal null}. + * @return new instance of {@link DateToString}. + */ + public DateToString toString(String format) { + return applyTimezone(DateToString.dateToString(dateReference()).toString(format), timezone); + } + + /** + * Creates new {@link AggregationExpression} that converts a date object to a string according to the server default + * format. + * + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString toStringWithDefaultFormat() { + return applyTimezone(DateToString.dateToString(dateReference()).defaultFormat(), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the weekday number in ISO 8601-2018 format, ranging from 1 + * (for Monday) to 7 (for Sunday). + * + * @return new instance of {@link IsoDayOfWeek}. + */ + public IsoDayOfWeek isoDayOfWeek() { + return applyTimezone(IsoDayOfWeek.isoDayWeek(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the week number in ISO 8601-2018 format, ranging from 1 to + * 53. + * + * @return new instance of {@link IsoWeek}. + */ + public IsoWeek isoWeek() { + return applyTimezone(IsoWeek.isoWeek(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the year number in ISO 8601-2018 format. + * + * @return new instance of {@link IsoWeekYear}. + */ + public IsoWeekYear isoWeekYear() { + return applyTimezone(IsoWeekYear.isoWeekYear(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns a document containing the constituent parts of the date as + * individual properties.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateToParts}. + * @since 2.1 + */ + public DateToParts toParts() { + return applyTimezone(DateToParts.dateToParts(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that converts a date/time string to a date object.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link DateFromString}. + * @since 2.1 + */ + public DateFromString fromString() { + return applyTimezone(DateFromString.fromString(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that returns the incrementing ordinal from a timestamp. + * + * @return new instance of {@link TsIncrement}. + * @since 4.0 + */ + public TsIncrement tsIncrement() { + + if (timezone != null && !Timezone.none().equals(timezone)) { + throw new IllegalArgumentException("$tsIncrement does not support timezones"); + } + + return TsIncrement.tsIncrement(dateReference()); + } + + /** + * Creates new {@link AggregationExpression} that returns the seconds from a timestamp. + * + * @return new instance of {@link TsIncrement}. + * @since 4.0 + */ + public TsSecond tsSecond() { + + if (timezone != null && !Timezone.none().equals(timezone)) { + throw new IllegalArgumentException("$tsSecond does not support timezones"); + } + + return TsSecond.tsSecond(dateReference()); + } + + private Object dateReference() { + + if (usesFieldRef()) { + return Fields.field(fieldReference); + } + + return usesExpression() ? expression : dateValue; + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + + private boolean usesExpression() { + return expression != null; + } + } + + /** + * @author Matt Morrissette + * @author Christoph Strobl + * @since 2.1 + */ + public static class DateFromPartsOperatorFactory { + + private final Timezone timezone; + + private DateFromPartsOperatorFactory(Timezone timezone) { + this.timezone = timezone; + } + + /** + * Set the {@literal week date year} to the given value which must resolve to a weekday in range {@code 0 - 9999}. + * Can be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param isoWeekYear must not be {@literal null}. + * @return new instance of {@link IsoDateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal isoWeekYear} is {@literal null}. + */ + public IsoDateFromParts isoWeekYear(Object isoWeekYear) { + return applyTimezone(IsoDateFromParts.dateFromParts().isoWeekYear(isoWeekYear), timezone); + } + + /** + * Set the {@literal week date year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoDateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public IsoDateFromParts isoWeekYearOf(String fieldReference) { + return isoWeekYear(Fields.field(fieldReference)); + } + + /** + * Set the {@literal week date year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoDateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public IsoDateFromParts isoWeekYearOf(AggregationExpression expression) { + return isoWeekYear(expression); + } + + /** + * Set the {@literal year} to the given value which must resolve to a calendar year. Can be a simple value, + * {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param year must not be {@literal null}. + * @return new instance of {@link DateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal year} is {@literal null} + */ + public DateFromParts year(Object year) { + return applyTimezone(DateFromParts.dateFromParts().year(year), timezone); + } + + /** + * Set the {@literal year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + public DateFromParts yearOf(String fieldReference) { + return year(Fields.field(fieldReference)); + } + + /** + * Set the {@literal year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateFromParts} with {@link Timezone} if set. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + public DateFromParts yearOf(AggregationExpression expression) { + return year(expression); + } + + /** + * Create a new {@link DateFromPartsOperatorFactory} bound to a given {@link Timezone}.
          + * + * @param timezone must not be {@literal null}. Use {@link Timezone#none()} instead. + * @return new instance of {@link DateFromPartsOperatorFactory}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + public DateFromPartsOperatorFactory withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DateFromPartsOperatorFactory(timezone); + } + } + + /** + * {@link AggregationExpression} capable of setting a given {@link Timezone}. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static abstract class TimezonedDateAggregationExpression extends AbstractAggregationExpression { + + protected TimezonedDateAggregationExpression(Object value) { + super(value); + } + + /** + * Append the {@code timezone} to a given source. The source itself can be a {@link Map} of already set properties + * or a single value. In case of single value {@code source} the value will be added as {@code date} property. + * + * @param source must not be {@literal null}. + * @param timezone must not be {@literal null} use {@link Timezone#none()} instead. + * @return + */ + protected static java.util.Map appendTimezone(Object source, Timezone timezone) { + + java.util.Map args; + + if (source instanceof Map map) { + args = new LinkedHashMap<>(map); + } else { + args = new LinkedHashMap<>(2); + args.put("date", source); + } + + if (!ObjectUtils.nullSafeEquals(Timezone.none(), timezone)) { + args.put("timezone", timezone.value); + } else if (args.containsKey("timezone")) { + args.remove("timezone"); + } + + return args; + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + protected abstract TimezonedDateAggregationExpression withTimezone(Timezone timezone); + + protected boolean hasTimezone() { + return contains("timezone"); + } + } + + /** + * {@link AggregationExpression} for {@code $dayOfYear}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DayOfYear extends TimezonedDateAggregationExpression { + + private DayOfYear(Object value) { + super(value); + } + + /** + * Creates new {@link DayOfYear}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link DayOfYear}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static DayOfYear dayOfYear(Object value) { + + Assert.notNull(value, "value must not be null"); + return new DayOfYear(value); + } + + /** + * Creates new {@link DayOfYear}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DayOfYear}. + */ + public static DayOfYear dayOfYear(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dayOfYear(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DayOfYear}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DayOfYear}. + */ + public static DayOfYear dayOfYear(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dayOfYear((Object) expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DayOfYear}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DayOfYear withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DayOfYear(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dayOfYear"; + } + } + + /** + * {@link AggregationExpression} for {@code $dayOfMonth}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DayOfMonth extends TimezonedDateAggregationExpression { + + private DayOfMonth(Object value) { + super(value); + } + + /** + * Creates new {@link DayOfMonth}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link DayOfMonth}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static DayOfMonth dayOfMonth(Object value) { + + Assert.notNull(value, "value must not be null"); + return new DayOfMonth(value); + } + + /** + * Creates new {@link DayOfMonth}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DayOfMonth}. + */ + public static DayOfMonth dayOfMonth(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dayOfMonth(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DayOfMonth}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DayOfMonth}. + */ + public static DayOfMonth dayOfMonth(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dayOfMonth((Object) expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DayOfMonth}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DayOfMonth withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DayOfMonth(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dayOfMonth"; + } + } + + /** + * {@link AggregationExpression} for {@code $dayOfWeek}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DayOfWeek extends TimezonedDateAggregationExpression { + + private DayOfWeek(Object value) { + super(value); + } + + /** + * Creates new {@link DayOfWeek}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link DayOfWeek}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static DayOfWeek dayOfWeek(Object value) { + + Assert.notNull(value, "value must not be null"); + return new DayOfWeek(value); + } + + /** + * Creates new {@link DayOfWeek}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DayOfWeek}. + */ + public static DayOfWeek dayOfWeek(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dayOfWeek(Fields.field(fieldReference)); + } + + /** + * Creates new {@link DayOfWeek}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DayOfWeek}. + */ + public static DayOfWeek dayOfWeek(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dayOfWeek((Object) expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DayOfWeek}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DayOfWeek withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DayOfWeek(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dayOfWeek"; + } + } + + /** + * {@link AggregationExpression} for {@code $year}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Year extends TimezonedDateAggregationExpression { + + private Year(Object value) { + super(value); + } + + /** + * Creates new {@link Year}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Year}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Year year(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Year(value); + } + + /** + * Creates new {@link Year}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Year}. + */ + public static Year yearOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return year(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Year}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Year}. + */ + public static Year yearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return year(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Year}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Year withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Year(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$year"; + } + } + + /** + * {@link AggregationExpression} for {@code $month}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Month extends TimezonedDateAggregationExpression { + + private Month(Object value) { + super(value); + } + + /** + * Creates new {@link Month}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Month}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Month month(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Month(value); + } + + /** + * Creates new {@link Month}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Month}. + */ + public static Month monthOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return month(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Month}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Month}. + */ + public static Month monthOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return month(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Month}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Month withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Month(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$month"; + } + } + + /** + * {@link AggregationExpression} for {@code $week}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Week extends TimezonedDateAggregationExpression { + + private Week(Object value) { + super(value); + } + + /** + * Creates new {@link Week}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Week}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Week week(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Week(value); + } + + /** + * Creates new {@link Week}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Week}. + */ + public static Week weekOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return week(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Week}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Week}. + */ + public static Week weekOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return week(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Week}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Week withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Week(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$week"; + } + } + + /** + * {@link AggregationExpression} for {@code $hour}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Hour extends TimezonedDateAggregationExpression { + + private Hour(Object value) { + super(value); + } + + /** + * Creates new {@link Hour}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Hour}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Hour hour(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Hour(value); + } + + /** + * Creates new {@link Hour}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Hour}. + */ + public static Hour hourOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return hour(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Hour}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Hour}. + */ + public static Hour hourOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return hour(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Hour}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Hour withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Hour(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$hour"; + } + } + + /** + * {@link AggregationExpression} for {@code $minute}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Minute extends TimezonedDateAggregationExpression { + + private Minute(Object value) { + super(value); + } + + /** + * Creates new {@link Minute}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Minute}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Minute minute(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Minute(value); + } + + /** + * Creates new {@link Minute}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Minute}. + */ + public static Minute minuteOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return minute(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Minute}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Minute}. + */ + public static Minute minuteOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return minute(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Minute}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Minute withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Minute(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$minute"; + } + } + + /** + * {@link AggregationExpression} for {@code $second}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Second extends TimezonedDateAggregationExpression { + + private Second(Object value) { + super(value); + } + + /** + * Creates new {@link Second}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Second}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Second second(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Second(value); + } + + /** + * Creates new {@link Second}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Second}. + */ + public static Second secondOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return second(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Second}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Second}. + */ + public static Second secondOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return second(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Second}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Second withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Second(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$second"; + } + } + + /** + * {@link AggregationExpression} for {@code $millisecond}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class Millisecond extends TimezonedDateAggregationExpression { + + private Millisecond(Object value) { + super(value); + } + + /** + * Creates new {@link Millisecond}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link Millisecond}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static Millisecond millisecond(Object value) { + + Assert.notNull(value, "value must not be null"); + return new Millisecond(value); + } + + /** + * Creates new {@link Millisecond}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Millisecond}. + */ + public static Millisecond millisecondOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return millisecond(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Millisecond}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Millisecond}. + */ + public static Millisecond millisecondOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return millisecond(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Millisecond}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public Millisecond withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new Millisecond(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$millisecond"; + } + } + + /** + * {@link AggregationExpression} for {@code $dateToString}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class DateToString extends TimezonedDateAggregationExpression { + + private DateToString(Object value) { + super(value); + } + + /** + * Creates new {@link FormatBuilder}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link FormatBuilder}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static FormatBuilder dateToString(Object value) { + + Assert.notNull(value, "value must not be null"); + + return new FormatBuilder() { + + @Override + public DateToString toString(String format) { + + Assert.notNull(format, "Format must not be null"); + return new DateToString(argumentMap(value, format, Timezone.none())); + } + + @Override + public DateToString defaultFormat() { + return new DateToString(argumentMap(value, null, Timezone.none())); + } + }; + } + + /** + * Creates new {@link FormatBuilder} allowing to define the date format to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link FormatBuilder} to crate {@link DateToString}. + */ + public static FormatBuilder dateOf(final String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dateToString(Fields.field(fieldReference)); + } + + /** + * Creates new {@link FormatBuilder} allowing to define the date format to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link FormatBuilder} to crate {@link DateToString}. + */ + public static FormatBuilder dateOf(final AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return dateToString(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link Millisecond}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public DateToString withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new DateToString(append("timezone", timezone)); + } + + /** + * Optionally specify the value to return when the date is {@literal null} or missing.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString onNullReturn(Object value) { + return new DateToString(append("onNull", value)); + } + + /** + * Optionally specify the field holding the value to return when the date is {@literal null} or missing.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString onNullReturnValueOf(String fieldReference) { + return onNullReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return when the date is {@literal null} or missing.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + public DateToString onNullReturnValueOf(AggregationExpression expression) { + return onNullReturn(expression); + } + + @Override + protected String getMongoMethod() { + return "$dateToString"; + } + + private static java.util.Map argumentMap(Object date, @Nullable String format, Timezone timezone) { + + java.util.Map args = new LinkedHashMap<>(2); + + if (StringUtils.hasText(format)) { + args.put("format", format); + } + + args.put("date", date); + + if (!ObjectUtils.nullSafeEquals(timezone, Timezone.none())) { + args.put("timezone", timezone.value); + } + return args; + } + + protected java.util.Map append(String key, Object value) { + + java.util.Map clone = new LinkedHashMap<>(argumentMap()); + + if (value instanceof Timezone timezone) { + + if (ObjectUtils.nullSafeEquals(value, Timezone.none())) { + clone.remove("timezone"); + } else { + clone.put("timezone", timezone.value); + } + } else { + clone.put(key, value); + } + + return clone; + } + + public interface FormatBuilder { + + /** + * Creates new {@link DateToString} with all previously added arguments appending the given one. + * + * @param format must not be {@literal null}. + * @return + */ + DateToString toString(String format); + + /** + * Creates new {@link DateToString} using the server default string format ({@code %Y-%m-%dT%H:%M:%S.%LZ}) for + * dates.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link DateToString}. + * @since 2.1 + */ + DateToString defaultFormat(); + } + } + + /** + * {@link AggregationExpression} for {@code $isoDayOfWeek}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class IsoDayOfWeek extends TimezonedDateAggregationExpression { + + private IsoDayOfWeek(Object value) { + super(value); + } + + /** + * Creates new {@link IsoDayOfWeek}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link IsoDayOfWeek}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static IsoDayOfWeek isoDayWeek(Object value) { + + Assert.notNull(value, "value must not be null"); + return new IsoDayOfWeek(value); + } + + /** + * Creates new {@link IsoDayOfWeek}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoDayOfWeek}. + */ + public static IsoDayOfWeek isoDayOfWeek(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return isoDayWeek(Fields.field(fieldReference)); + } + + /** + * Creates new {@link IsoDayOfWeek}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoDayOfWeek}. + */ + public static IsoDayOfWeek isoDayOfWeek(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoDayWeek(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoDayOfWeek}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public IsoDayOfWeek withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new IsoDayOfWeek(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$isoDayOfWeek"; + } + } + + /** + * {@link AggregationExpression} for {@code $isoWeek}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class IsoWeek extends TimezonedDateAggregationExpression { + + private IsoWeek(Object value) { + super(value); + } + + /** + * Creates new {@link IsoWeek}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link IsoWeek}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static IsoWeek isoWeek(Object value) { + + Assert.notNull(value, "value must not be null"); + return new IsoWeek(value); + } + + /** + * Creates new {@link IsoWeek}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoWeek}. + */ + public static IsoWeek isoWeekOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return isoWeek(Fields.field(fieldReference)); + } + + /** + * Creates new {@link IsoWeek}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoWeek}. + */ + public static IsoWeek isoWeekOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoWeek(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoWeek}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public IsoWeek withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new IsoWeek(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$isoWeek"; + } + } + + /** + * {@link AggregationExpression} for {@code $isoWeekYear}. + * + * @author Christoph Strobl + * @author Matt Morrissette + */ + public static class IsoWeekYear extends TimezonedDateAggregationExpression { + + private IsoWeekYear(Object value) { + super(value); + } + + /** + * Creates new {@link IsoWeekYear}. + * + * @param value must not be {@literal null} and resolve to field, expression or object that represents a date. + * @return new instance of {@link IsoWeekYear}. + * @throws IllegalArgumentException if given value is {@literal null}. + * @since 2.1 + */ + public static IsoWeekYear isoWeekYear(Object value) { + + Assert.notNull(value, "value must not be null"); + return new IsoWeekYear(value); + } + + /** + * Creates new {@link IsoWeekYear}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link IsoWeekYear}. + */ + public static IsoWeekYear isoWeekYearOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return isoWeekYear(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Millisecond}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link IsoWeekYear}. + */ + public static IsoWeekYear isoWeekYearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoWeekYear(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoWeekYear}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + * @since 2.1 + */ + @Override + public IsoWeekYear withTimezone(Timezone timezone) { + + Assert.notNull(timezone, "Timezone must not be null"); + return new IsoWeekYear(appendTimezone(values().iterator().next(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$isoWeekYear"; + } + } + + /** + * @author Christoph Strobl + * @since 2.1 + */ + public interface DateParts> { + + /** + * Set the {@literal hour} to the given value which must resolve to a value in range of {@code 0 - 23}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param hour must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal hour} is {@literal null} + */ + T hour(Object hour); + + /** + * Set the {@literal hour} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default T hourOf(String fieldReference) { + return hour(Fields.field(fieldReference)); + } + + /** + * Set the {@literal hour} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default T hourOf(AggregationExpression expression) { + return hour(expression); + } + + /** + * Set the {@literal minute} to the given value which must resolve to a value in range {@code 0 - 59}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param minute must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal minute} is {@literal null} + */ + T minute(Object minute); + + /** + * Set the {@literal minute} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default T minuteOf(String fieldReference) { + return minute(Fields.field(fieldReference)); + } + + /** + * Set the {@literal minute} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default T minuteOf(AggregationExpression expression) { + return minute(expression); + } + + /** + * Set the {@literal second} to the given value which must resolve to a value in range {@code 0 - 59}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param second must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal second} is {@literal null} + */ + T second(Object second); + + /** + * Set the {@literal second} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default T secondOf(String fieldReference) { + return second(Fields.field(fieldReference)); + } + + /** + * Set the {@literal second} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default T secondOf(AggregationExpression expression) { + return second(expression); + } + + /** + * Set the {@literal millisecond} to the given value which must resolve to a value in range {@code 0 - 999}. Can be + * a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param millisecond must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal millisecond} is {@literal null} + * @since 3.2 + */ + T millisecond(Object millisecond); + + /** + * Set the {@literal millisecond} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + * @since 3.2 + */ + default T millisecondOf(String fieldReference) { + return millisecond(Fields.field(fieldReference)); + } + + /** + * Set the {@literal milliseconds} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + * @since 3.2 + */ + default T millisecondOf(AggregationExpression expression) { + return millisecond(expression); + } + } + + /** + * {@link AggregationExpression} for {@code $dateFromParts}.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Matt Morrissette + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * @since 2.1 + */ + public static class DateFromParts extends TimezonedDateAggregationExpression implements DateParts { + + private DateFromParts(Object value) { + super(value); + } /** - * Creates new {@link ArithmeticOperatorFactory} for given {@literal fieldReference}. + * Creates new {@link DateFromPartsWithYear}. * - * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateFromPartsWithYear}. + * @since 2.1 */ - public DateOperatorFactory(String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - this.fieldReference = fieldReference; - this.expression = null; + public static DateFromPartsWithYear dateFromParts() { + return year -> new DateFromParts(Collections.singletonMap("year", year)); } /** - * Creates new {@link ArithmeticOperatorFactory} for given {@link AggregationExpression}. + * Set the {@literal month} to the given value which must resolve to a calendar month in range {@code 1 - 12}. Can + * be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. * - * @param expression must not be {@literal null}. + * @param month must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal month} is {@literal null}. */ - public DateOperatorFactory(AggregationExpression expression) { - - Assert.notNull(expression, "Expression must not be null!"); - this.fieldReference = null; - this.expression = expression; + public DateFromParts month(Object month) { + return new DateFromParts(append("month", month)); } /** - * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and - * 366. + * Set the {@literal month} to the value resolved by following the given {@link Field field reference}. * - * @return + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public DayOfYear dayOfYear() { - return usesFieldRef() ? DayOfYear.dayOfYear(fieldReference) : DayOfYear.dayOfYear(expression); + public DateFromParts monthOf(String fieldReference) { + return month(Fields.field(fieldReference)); } /** - * Creates new {@link AggregationExpression} that returns the day of the month for a date as a number between 1 and - * 31. + * Set the {@literal month} to the result of the given {@link AggregationExpression expression}. * - * @return + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public DayOfMonth dayOfMonth() { - return usesFieldRef() ? DayOfMonth.dayOfMonth(fieldReference) : DayOfMonth.dayOfMonth(expression); + public DateFromParts monthOf(AggregationExpression expression) { + return month(expression); } /** - * Creates new {@link AggregationExpression} that returns the day of the week for a date as a number between 1 - * (Sunday) and 7 (Saturday). + * Set the {@literal day} to the given value which must resolve to a calendar day in range {@code 1 - 31}. Can be a + * simple value, {@link Field field reference} or {@link AggregationExpression expression}. * - * @return + * @param day must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal day} is {@literal null}. */ - public DayOfWeek dayOfWeek() { - return usesFieldRef() ? DayOfWeek.dayOfWeek(fieldReference) : DayOfWeek.dayOfWeek(expression); + public DateFromParts day(Object day) { + return new DateFromParts(append("day", day)); } /** - * Creates new {@link AggregationExpression} that returns the year portion of a date. + * Set the {@literal day} to the value resolved by following the given {@link Field field reference}. * - * @return + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public Year year() { - return usesFieldRef() ? Year.yearOf(fieldReference) : Year.yearOf(expression); + public DateFromParts dayOf(String fieldReference) { + return day(Fields.field(fieldReference)); } /** - * Creates new {@link AggregationExpression} that returns the month of a date as a number between 1 and 12. + * Set the {@literal day} to the result of the given {@link AggregationExpression expression}. * - * @return + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public Month month() { - return usesFieldRef() ? Month.monthOf(fieldReference) : Month.monthOf(expression); + public DateFromParts dayOf(AggregationExpression expression) { + return day(expression); + } + + @Override + public DateFromParts hour(Object hour) { + return new DateFromParts(append("hour", hour)); + } + + @Override + public DateFromParts minute(Object minute) { + return new DateFromParts(append("minute", minute)); + } + + @Override + public DateFromParts second(Object second) { + return new DateFromParts(append("second", second)); + } + + @Override + public DateFromParts millisecond(Object millisecond) { + return new DateFromParts(append("millisecond", millisecond)); } /** - * Creates new {@link AggregationExpression} that returns the week of the year for a date as a number between 0 and - * 53. + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. * - * @return + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. */ - public Week week() { - return usesFieldRef() ? Week.weekOf(fieldReference) : Week.weekOf(expression); + @Override + public DateFromParts withTimezone(Timezone timezone) { + return new DateFromParts(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateFromParts"; } /** - * Creates new {@link AggregationExpression} that returns the hour portion of a date as a number between 0 and 23. - * - * @return + * @author Christoph Strobl */ - public Hour hour() { - return usesFieldRef() ? Hour.hourOf(fieldReference) : Hour.hourOf(expression); + public interface DateFromPartsWithYear { + + /** + * Set the {@literal year} to the given value which must resolve to a calendar year. Can be a simple value, + * {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param year must not be {@literal null}. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal year} is {@literal null} + */ + DateFromParts year(Object year); + + /** + * Set the {@literal year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default DateFromParts yearOf(String fieldReference) { + + Assert.hasText(fieldReference, "Field reference must not be null nor empty"); + return year(Fields.field(fieldReference)); + } + + /** + * Set the {@literal year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default DateFromParts yearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return year(expression); + } + } + } + + /** + * {@link AggregationExpression} for {@code $dateFromParts} using ISO week date.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Matt Morrissette + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * @since 2.1 + */ + public static class IsoDateFromParts extends TimezonedDateAggregationExpression + implements DateParts { + + private IsoDateFromParts(Object value) { + super(value); } /** - * Creates new {@link AggregationExpression} that returns the minute portion of a date as a number between 0 and 59. + * Creates new {@link IsoDateFromPartsWithYear}. * - * @return + * @return new instance of {@link IsoDateFromPartsWithYear}. + * @since 2.1 */ - public Minute minute() { - return usesFieldRef() ? Minute.minuteOf(fieldReference) : Minute.minuteOf(expression); + public static IsoDateFromPartsWithYear dateFromParts() { + return year -> new IsoDateFromParts(Collections.singletonMap("isoWeekYear", year)); } /** - * Creates new {@link AggregationExpression} that returns the second portion of a date as a number between 0 and 59, - * but can be 60 to account for leap seconds. + * Set the {@literal week of year} to the given value which must resolve to a calendar week in range {@code 1 - 53}. + * Can be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. * - * @return + * @param isoWeek must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal isoWeek} is {@literal null}. */ - public Second second() { - return usesFieldRef() ? Second.secondOf(fieldReference) : Second.secondOf(expression); + public IsoDateFromParts isoWeek(Object isoWeek) { + return new IsoDateFromParts(append("isoWeek", isoWeek)); } /** - * Creates new {@link AggregationExpression} that returns the millisecond portion of a date as an integer between 0 - * and 999. + * Set the {@literal week of year} to the value resolved by following the given {@link Field field reference}. * - * @return + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public Millisecond millisecond() { - return usesFieldRef() ? Millisecond.millisecondOf(fieldReference) : Millisecond.millisecondOf(expression); + public IsoDateFromParts isoWeekOf(String fieldReference) { + return isoWeek(Fields.field(fieldReference)); } /** - * Creates new {@link AggregationExpression} that converts a date object to a string according to a user-specified - * {@literal format}. + * Set the {@literal week of year} to the result of the given {@link AggregationExpression expression}. * - * @param format must not be {@literal null}. - * @return + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public DateToString toString(String format) { - return (usesFieldRef() ? DateToString.dateOf(fieldReference) : DateToString.dateOf(expression)).toString(format); + public IsoDateFromParts isoWeekOf(AggregationExpression expression) { + return isoWeek(expression); } /** - * Creates new {@link AggregationExpression} that returns the weekday number in ISO 8601-2018 format, ranging from 1 (for - * Monday) to 7 (for Sunday). + * Set the {@literal day of week} to the given value which must resolve to a weekday in range {@code 1 - 7}. Can be + * a simple value, {@link Field field reference} or {@link AggregationExpression expression}. * - * @return + * @param day must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal isoWeek} is {@literal null}. */ - public IsoDayOfWeek isoDayOfWeek() { - return usesFieldRef() ? IsoDayOfWeek.isoDayOfWeek(fieldReference) : IsoDayOfWeek.isoDayOfWeek(expression); + public IsoDateFromParts isoDayOfWeek(Object day) { + return new IsoDateFromParts(append("isoDayOfWeek", day)); } /** - * Creates new {@link AggregationExpression} that returns the week number in ISO 8601-2018 format, ranging from 1 to 53. + * Set the {@literal day of week} to the value resolved by following the given {@link Field field reference}. * - * @return + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public IsoWeek isoWeek() { - return usesFieldRef() ? IsoWeek.isoWeekOf(fieldReference) : IsoWeek.isoWeekOf(expression); + public IsoDateFromParts isoDayOfWeekOf(String fieldReference) { + return isoDayOfWeek(Fields.field(fieldReference)); } /** - * Creates new {@link AggregationExpression} that returns the year number in ISO 8601-2018 format. + * Set the {@literal day of week} to the result of the given {@link AggregationExpression expression}. * - * @return + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public IsoWeekYear isoWeekYear() { - return usesFieldRef() ? IsoWeekYear.isoWeekYearOf(fieldReference) : IsoWeekYear.isoWeekYearOf(expression); + public IsoDateFromParts isoDayOfWeekOf(AggregationExpression expression) { + return isoDayOfWeek(expression); } - private boolean usesFieldRef() { - return fieldReference != null; + @Override + public IsoDateFromParts hour(Object hour) { + return new IsoDateFromParts(append("hour", hour)); } - } - /** - * {@link AggregationExpression} for {@code $dayOfYear}. - * - * @author Christoph Strobl - */ - public static class DayOfYear extends AbstractAggregationExpression { + @Override + public IsoDateFromParts minute(Object minute) { + return new IsoDateFromParts(append("minute", minute)); + } - private DayOfYear(Object value) { - super(value); + @Override + public IsoDateFromParts second(Object second) { + return new IsoDateFromParts(append("second", second)); } @Override - protected String getMongoMethod() { - return "$dayOfYear"; + public IsoDateFromParts millisecond(Object millisecond) { + return new IsoDateFromParts(append("millisecond", millisecond)); } /** - * Creates new {@link DayOfYear}. + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. * - * @param fieldReference must not be {@literal null}. - * @return + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link IsoDateFromParts}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. */ - public static DayOfYear dayOfYear(String fieldReference) { + @Override + public IsoDateFromParts withTimezone(Timezone timezone) { + return new IsoDateFromParts(appendTimezone(argumentMap(), timezone)); + } - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new DayOfYear(Fields.field(fieldReference)); + @Override + protected String getMongoMethod() { + return "$dateFromParts"; } /** - * Creates new {@link DayOfYear}. - * - * @param expression must not be {@literal null}. - * @return + * @author Christoph Strobl */ - public static DayOfYear dayOfYear(AggregationExpression expression) { + public interface IsoDateFromPartsWithYear { + + /** + * Set the {@literal week date year} to the given value which must resolve to a weekday in range {@code 0 - 9999}. + * Can be a simple value, {@link Field field reference} or {@link AggregationExpression expression}. + * + * @param isoWeekYear must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal isoWeekYear} is {@literal null}. + */ + IsoDateFromParts isoWeekYear(Object isoWeekYear); + + /** + * Set the {@literal week date year} to the value resolved by following the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. + */ + default IsoDateFromParts isoWeekYearOf(String fieldReference) { - Assert.notNull(expression, "Expression must not be null!"); - return new DayOfYear(expression); + Assert.hasText(fieldReference, "Field reference must not be null nor empty"); + return isoWeekYear(Fields.field(fieldReference)); + } + + /** + * Set the {@literal week date year} to the result of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. + */ + default IsoDateFromParts isoWeekYearOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return isoWeekYear(expression); + } } } /** - * {@link AggregationExpression} for {@code $dayOfMonth}. + * {@link AggregationExpression} for {@code $dateToParts}.
          + * NOTE: Requires MongoDB 3.6 or later. * + * @author Matt Morrissette * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/ + * @since 2.1 */ - public static class DayOfMonth extends AbstractAggregationExpression { + public static class DateToParts extends TimezonedDateAggregationExpression { - private DayOfMonth(Object value) { + private DateToParts(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$dayOfMonth"; + /** + * Creates new {@link DateToParts}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateToParts}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static DateToParts dateToParts(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new DateToParts(Collections.singletonMap("date", value)); } /** - * Creates new {@link DayOfMonth}. + * Creates new {@link DateToParts}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateToParts}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public static DayOfMonth dayOfMonth(String fieldReference) { + public static DateToParts datePartsOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new DayOfMonth(Fields.field(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return dateToParts(Fields.field(fieldReference)); } /** - * Creates new {@link DayOfMonth}. + * Creates new {@link DateToParts}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link DateToParts}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public static DayOfMonth dayOfMonth(AggregationExpression expression) { - - Assert.notNull(expression, "Expression must not be null!"); - return new DayOfMonth(expression); - } - } - - /** - * {@link AggregationExpression} for {@code $dayOfWeek}. - * - * @author Christoph Strobl - */ - public static class DayOfWeek extends AbstractAggregationExpression { - - private DayOfWeek(Object value) { - super(value); - } - - @Override - protected String getMongoMethod() { - return "$dayOfWeek"; + public static DateToParts datePartsOf(AggregationExpression expression) { + return dateToParts(expression); } /** - * Creates new {@link DayOfWeek}. + * Use ISO week date fields in the resulting document. * - * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateToParts}. */ - public static DayOfWeek dayOfWeek(String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new DayOfWeek(Fields.field(fieldReference)); + public DateToParts iso8601() { + return new DateToParts(append("iso8601", true)); } /** - * Creates new {@link DayOfWeek}. + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. * - * @param expression must not be {@literal null}. - * @return + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateFromParts}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. */ - public static DayOfWeek dayOfWeek(AggregationExpression expression) { + @Override + public DateToParts withTimezone(Timezone timezone) { + return new DateToParts(appendTimezone(argumentMap(), timezone)); + } - Assert.notNull(expression, "Expression must not be null!"); - return new DayOfWeek(expression); + @Override + protected String getMongoMethod() { + return "$dateToParts"; } } /** - * {@link AggregationExpression} for {@code $year}. + * {@link AggregationExpression} for {@code $dateFromString}.
          + * NOTE: Requires MongoDB 3.6 or later. * + * @author Matt Morrissette * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/ + * @since 2.1 */ - public static class Year extends AbstractAggregationExpression { + public static class DateFromString extends TimezonedDateAggregationExpression { - private Year(Object value) { + private DateFromString(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$year"; + /** + * Creates new {@link DateFromString}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static DateFromString fromString(Object value) { + return new DateFromString(Collections.singletonMap("dateString", value)); } /** - * Creates new {@link Year}. + * Creates new {@link DateFromString}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public static Year yearOf(String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Year(Fields.field(fieldReference)); + public static DateFromString fromStringOf(String fieldReference) { + return fromString(Fields.field(fieldReference)); } /** - * Creates new {@link Year}. + * Creates new {@link DateFromString}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public static Year yearOf(AggregationExpression expression) { + public static DateFromString fromStringOf(AggregationExpression expression) { + return fromString(expression); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal timezone} is {@literal null}. + */ + @Override + public DateFromString withTimezone(Timezone timezone) { + return new DateFromString(appendTimezone(argumentMap(), timezone)); + } - Assert.notNull(expression, "Expression must not be null!"); - return new Year(expression); + /** + * Optionally set the date format to use. If not specified {@code %Y-%m-%dT%H:%M:%S.%LZ} is used.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param format must not be {@literal null}. + * @return new instance of {@link DateFromString}. + * @throws IllegalArgumentException if given {@literal format} is {@literal null}. + */ + public DateFromString withFormat(String format) { + + Assert.notNull(format, "Format must not be null"); + return new DateFromString(append("format", format)); + } + + @Override + protected String getMongoMethod() { + return "$dateFromString"; } } /** - * {@link AggregationExpression} for {@code $month}. + * {@link AggregationExpression} for {@code $dateAdd}.
          + * NOTE: Requires MongoDB 5.0 or later. * * @author Christoph Strobl + * @since 3.3 */ - public static class Month extends AbstractAggregationExpression { + public static class DateAdd extends TimezonedDateAggregationExpression { - private Month(Object value) { + private DateAdd(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$month"; + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(AggregationExpression expression, String unit) { + return addValue(expression, unit); } /** - * Creates new {@link Month}. + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. * * @param fieldReference must not be {@literal null}. - * @return + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. */ - public static Month monthOf(String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Month(Fields.field(fieldReference)); + public static DateAdd addValueOf(String fieldReference, String unit) { + return addValue(Fields.field(fieldReference), unit); } /** - * Creates new {@link Month}. + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. * - * @param expression must not be {@literal null}. - * @return + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. */ - public static Month monthOf(AggregationExpression expression) { - - Assert.notNull(expression, "Expression must not be null!"); - return new Month(expression); - } - } - - /** - * {@link AggregationExpression} for {@code $week}. - * - * @author Christoph Strobl - */ - public static class Week extends AbstractAggregationExpression { + public static DateAdd addValue(Object value, String unit) { - private Week(Object value) { - super(value); + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("amount", value); + return new DateAdd(args); } - @Override - protected String getMongoMethod() { - return "$week"; + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(AggregationExpression expression) { + return toDate(expression); } /** - * Creates new {@link Week}. + * Define the start date, in UTC, for the addition operation. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateAdd}. */ - public static Week weekOf(String fieldReference) { + public DateAdd toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Week(Fields.field(fieldReference)); + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDate(Object dateExpression) { + return new DateAdd(append("startDate", dateExpression)); } /** - * Creates new {@link Week}. + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. * - * @param expression must not be {@literal null}. - * @return + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. */ - public static Week weekOf(AggregationExpression expression) { + public DateAdd withTimezone(Timezone timezone) { + return new DateAdd(appendTimezone(argumentMap(), timezone)); + } - Assert.notNull(expression, "Expression must not be null!"); - return new Week(expression); + @Override + protected String getMongoMethod() { + return "$dateAdd"; } } /** - * {@link AggregationExpression} for {@code $hour}. + * {@link AggregationExpression} for {@code $dateSubtract}.
          + * NOTE: Requires MongoDB 5.0 or later. * * @author Christoph Strobl + * @since 4.0 */ - public static class Hour extends AbstractAggregationExpression { + public static class DateSubtract extends TimezonedDateAggregationExpression { - private Hour(Object value) { + private DateSubtract(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$hour"; + /** + * Subtract the number of {@literal units} of the result of the given {@link AggregationExpression expression} from + * a {@link #fromDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public static DateSubtract subtractValueOf(AggregationExpression expression, String unit) { + return subtractValue(expression, unit); } /** - * Creates new {@link Hour}. + * Subtract the number of {@literal units} from a {@literal field} from a {@link #fromDate(Object) start date}. * * @param fieldReference must not be {@literal null}. - * @return + * @param unit must not be {@literal null}. + * @return new instance of {@link DateSubtract}. */ - public static Hour hourOf(String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Hour(Fields.field(fieldReference)); + public static DateSubtract subtractValueOf(String fieldReference, String unit) { + return subtractValue(Fields.field(fieldReference), unit); } /** - * Creates new {@link Hour}. + * Subtract the number of {@literal units} from a {@link #fromDate(Object) start date}. * - * @param expression must not be {@literal null}. - * @return + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateSubtract}. */ - public static Hour hourOf(AggregationExpression expression) { - - Assert.notNull(expression, "Expression must not be null!"); - return new Hour(expression); - } - } - - /** - * {@link AggregationExpression} for {@code $minute}. - * - * @author Christoph Strobl - */ - public static class Minute extends AbstractAggregationExpression { + public static DateSubtract subtractValue(Object value, String unit) { - private Minute(Object value) { - super(value); + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("amount", value); + return new DateSubtract(args); } - @Override - protected String getMongoMethod() { - return "$minute"; + /** + * Define the start date, in UTC, for the subtraction operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public DateSubtract fromDateOf(AggregationExpression expression) { + return fromDate(expression); } /** - * Creates new {@link Minute}. + * Define the start date, in UTC, for the subtraction operation. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateSubtract}. */ - public static Minute minuteOf(String fieldReference) { + public DateSubtract fromDateOf(String fieldReference) { + return fromDate(Fields.field(fieldReference)); + } - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Minute(Fields.field(fieldReference)); + /** + * Define the start date, in UTC, for the subtraction operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateSubtract}. + */ + public DateSubtract fromDate(Object dateExpression) { + return new DateSubtract(append("startDate", dateExpression)); } /** - * Creates new {@link Minute}. + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. * - * @param expression must not be {@literal null}. - * @return + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateSubtract}. */ - public static Minute minuteOf(AggregationExpression expression) { + public DateSubtract withTimezone(Timezone timezone) { + return new DateSubtract(appendTimezone(argumentMap(), timezone)); + } - Assert.notNull(expression, "Expression must not be null!"); - return new Minute(expression); + @Override + protected String getMongoMethod() { + return "$dateSubtract"; } } /** - * {@link AggregationExpression} for {@code $second}. + * {@link AggregationExpression} for {@code $dateDiff}.
          + * NOTE: Requires MongoDB 5.0 or later. * * @author Christoph Strobl + * @since 3.3 */ - public static class Second extends AbstractAggregationExpression { + public static class DateDiff extends TimezonedDateAggregationExpression { - private Second(Object value) { + private DateDiff(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$second"; + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(AggregationExpression expression, String unit) { + return diffValue(expression, unit); } /** - * Creates new {@link Second}. + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. * * @param fieldReference must not be {@literal null}. - * @return + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. */ - public static Second secondOf(String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Second(Fields.field(fieldReference)); + public static DateDiff diffValueOf(String fieldReference, String unit) { + return diffValue(Fields.field(fieldReference), unit); } /** - * Creates new {@link Second}. + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. * - * @param expression must not be {@literal null}. - * @return + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. */ - public static Second secondOf(AggregationExpression expression) { + public static DateDiff diffValue(Object value, String unit) { - Assert.notNull(expression, "Expression must not be null!"); - return new Second(expression); + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("endDate", value); + return new DateDiff(args); } - } - - /** - * {@link AggregationExpression} for {@code $millisecond}. - * - * @author Christoph Strobl - */ - public static class Millisecond extends AbstractAggregationExpression { - private Millisecond(Object value) { - super(value); + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(AggregationExpression expression) { + return toDate(expression); } - @Override - protected String getMongoMethod() { - return "$millisecond"; + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); } /** - * Creates new {@link Millisecond}. + * Define the start date, in UTC, for the addition operation. * - * @param fieldReference must not be {@literal null}. - * @return + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. */ - public static Millisecond millisecondOf(String fieldReference) { + public DateDiff toDate(Object dateExpression) { + return new DateDiff(append("startDate", dateExpression)); + } - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Millisecond(Fields.field(fieldReference)); + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateDiff withTimezone(Timezone timezone) { + return new DateDiff(appendTimezone(argumentMap(), timezone)); } /** - * Creates new {@link Millisecond}. + * Set the start day of the week if the unit if measure is set to {@literal week}. Uses {@literal Sunday} by + * default. * - * @param expression must not be {@literal null}. - * @return + * @param day must not be {@literal null}. + * @return new instance of {@link DateDiff}. */ - public static Millisecond millisecondOf(AggregationExpression expression) { + public DateDiff startOfWeek(Object day) { + return new DateDiff(append("startOfWeek", day)); + } - Assert.notNull(expression, "Expression must not be null!"); - return new Millisecond(expression); + @Override + protected String getMongoMethod() { + return "$dateDiff"; } } /** - * {@link AggregationExpression} for {@code $dateToString}. + * {@link AggregationExpression} for {@code $dateTrunc}.
          + * NOTE: Requires MongoDB 5.0 or later. * * @author Christoph Strobl + * @since 4.0 */ - public static class DateToString extends AbstractAggregationExpression { + public static class DateTrunc extends TimezonedDateAggregationExpression { - private DateToString(Object value) { + private DateTrunc(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$dateToString"; + /** + * Truncates the date value of computed by the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public static DateTrunc truncateValueOf(AggregationExpression expression) { + return truncateValue(expression); } /** - * Creates new {@link FormatBuilder} allowing to define the date format to apply. + * Truncates the date value of the referenced {@literal field}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateTrunc}. */ - public static FormatBuilder dateOf(final String fieldReference) { - - Assert.notNull(fieldReference, "FieldReference must not be null!"); - - return new FormatBuilder() { - - @Override - public DateToString toString(String format) { + public static DateTrunc truncateValueOf(String fieldReference) { + return truncateValue(Fields.field(fieldReference)); + } - Assert.notNull(format, "Format must not be null!"); - return new DateToString(argumentMap(Fields.field(fieldReference), format)); - } - }; + /** + * Truncates the date value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public static DateTrunc truncateValue(Object value) { + return new DateTrunc(Collections.singletonMap("date", value)); } /** - * Creates new {@link FormatBuilder} allowing to define the date format to apply. + * Define the unit of time. * - * @param expression must not be {@literal null}. - * @return + * @param unit must not be {@literal null}. + * @return new instance of {@link DateTrunc}. */ - public static FormatBuilder dateOf(final AggregationExpression expression) { + public DateTrunc to(String unit) { + return new DateTrunc(append("unit", unit)); + } - Assert.notNull(expression, "Expression must not be null!"); + /** + * Define the unit of time via an {@link AggregationExpression}. + * + * @param unit must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc to(AggregationExpression unit) { + return new DateTrunc(append("unit", unit)); + } - return new FormatBuilder() { + /** + * Define the weeks starting day if {@link #to(String)} resolves to {@literal week}. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc startOfWeek(java.time.DayOfWeek day) { + return startOfWeek(day.name().toLowerCase(Locale.US)); + } - @Override - public DateToString toString(String format) { + /** + * Define the weeks starting day if {@link #to(String)} resolves to {@literal week}. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc startOfWeek(String day) { + return new DateTrunc(append("startOfWeek", day)); + } - Assert.notNull(format, "Format must not be null!"); - return new DateToString(argumentMap(expression, format)); - } - }; + /** + * Define the numeric time value. + * + * @param binSize must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc binSize(int binSize) { + return binSize((Object) binSize); } - private static java.util.Map argumentMap(Object date, String format) { + /** + * Define the numeric time value via an {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc binSize(AggregationExpression expression) { + return binSize((Object) expression); + } - java.util.Map args = new LinkedHashMap(2); - args.put("format", format); - args.put("date", date); - return args; + /** + * Define the numeric time value. + * + * @param binSize must not be {@literal null}. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc binSize(Object binSize) { + return new DateTrunc(append("binSize", binSize)); } - public interface FormatBuilder { + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateTrunc}. + */ + public DateTrunc withTimezone(Timezone timezone) { + return new DateTrunc(appendTimezone(argumentMap(), timezone)); + } - /** - * Creates new {@link DateToString} with all previously added arguments appending the given one. - * - * @param format must not be {@literal null}. - * @return - */ - DateToString toString(String format); + @Override + protected String getMongoMethod() { + return "$dateTrunc"; } } /** - * {@link AggregationExpression} for {@code $isoDayOfWeek}. + * {@link AggregationExpression} for {@code $tsIncrement}. * * @author Christoph Strobl + * @since 4.0 */ - public static class IsoDayOfWeek extends AbstractAggregationExpression { + public static class TsIncrement extends AbstractAggregationExpression { - private IsoDayOfWeek(Object value) { + private TsIncrement(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$isoDayOfWeek"; + /** + * Creates new {@link TsIncrement} that returns the incrementing ordinal from a timestamp. + * + * @param value must not be {@literal null}. + * @return new instance of {@link TsIncrement}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static TsIncrement tsIncrement(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new TsIncrement(value); } /** - * Creates new {@link IsoDayOfWeek}. + * Creates new {@link TsIncrement} that returns the incrementing ordinal from a timestamp. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link TsIncrement}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public static IsoDayOfWeek isoDayOfWeek(String fieldReference) { + public static TsIncrement tsIncrementValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IsoDayOfWeek(Fields.field(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return tsIncrement(Fields.field(fieldReference)); } /** - * Creates new {@link IsoDayOfWeek}. + * Creates new {@link TsIncrement}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link TsIncrement}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public static IsoDayOfWeek isoDayOfWeek(AggregationExpression expression) { + public static TsIncrement tsIncrementValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return tsIncrement(expression); + } - Assert.notNull(expression, "Expression must not be null!"); - return new IsoDayOfWeek(expression); + @Override + protected String getMongoMethod() { + return "$tsIncrement"; } } /** - * {@link AggregationExpression} for {@code $isoWeek}. + * {@link AggregationExpression} for {@code $tsSecond}. * * @author Christoph Strobl + * @since 4.0 */ - public static class IsoWeek extends AbstractAggregationExpression { + public static class TsSecond extends AbstractAggregationExpression { - private IsoWeek(Object value) { + private TsSecond(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$isoWeek"; + /** + * Creates new {@link TsSecond} that returns the incrementing ordinal from a timestamp. + * + * @param value must not be {@literal null}. + * @return new instance of {@link TsSecond}. + * @throws IllegalArgumentException if given {@literal value} is {@literal null}. + */ + public static TsSecond tsSecond(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new TsSecond(value); } /** - * Creates new {@link IsoWeek}. + * Creates new {@link TsSecond} that returns the incrementing ordinal from a timestamp. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link TsSecond}. + * @throws IllegalArgumentException if given {@literal fieldReference} is {@literal null}. */ - public static IsoWeek isoWeekOf(String fieldReference) { + public static TsSecond tsSecondValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IsoWeek(Fields.field(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return tsSecond(Fields.field(fieldReference)); } /** - * Creates new {@link IsoWeek}. + * Creates new {@link TsSecond}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link TsSecond}. + * @throws IllegalArgumentException if given {@literal expression} is {@literal null}. */ - public static IsoWeek isoWeekOf(AggregationExpression expression) { + public static TsSecond tsSecondValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return tsSecond(expression); + } - Assert.notNull(expression, "Expression must not be null!"); - return new IsoWeek(expression); + @Override + protected String getMongoMethod() { + return "$tsSecond"; } } /** - * {@link AggregationExpression} for {@code $isoWeekYear}. + * Interface defining a temporal unit for date operators. * - * @author Christoph Strobl + * @author Mark Paluch + * @since 3.3 */ - public static class IsoWeekYear extends AbstractAggregationExpression { + public interface TemporalUnit { - private IsoWeekYear(Object value) { - super(value); - } - - @Override - protected String getMongoMethod() { - return "$isoWeekYear"; - } + String name(); /** - * Creates new {@link IsoWeekYear}. + * Converts the given time unit into a {@link TemporalUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. * - * @param fieldReference must not be {@literal null}. + * @param timeUnit the time unit to convert, must not be {@literal null}. * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. */ - public static IsoWeekYear isoWeekYearOf(String fieldReference) { + static TemporalUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IsoWeekYear(Fields.field(fieldReference)); + switch (timeUnit) { + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLISECONDS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", timeUnit)); } /** - * Creates new {@link Millisecond}. + * Converts the given chrono unit into a {@link TemporalUnit}. Supported units are: years, weeks, months, days, + * hours, minutes, seconds, and millis. * - * @param expression must not be {@literal null}. + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. */ - public static IsoWeekYear isoWeekYearOf(AggregationExpression expression) { - - Assert.notNull(expression, "Expression must not be null!"); - return new IsoWeekYear(expression); + static TemporalUnit from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return TemporalUnits.YEAR; + case WEEKS: + return TemporalUnits.WEEK; + case MONTHS: + return TemporalUnits.MONTH; + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLIS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", chronoUnit)); } } + + /** + * Supported temporal units. + */ + enum TemporalUnits implements TemporalUnit { + YEAR, QUARTER, WEEK, MONTH, DAY, HOUR, MINUTE, SECOND, MILLISECOND + + } + + @SuppressWarnings("unchecked") + private static T applyTimezone(T instance, Timezone timezone) { + return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() + ? (T) instance.withTimezone(timezone) + : instance; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DensifyOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DensifyOperation.java new file mode 100644 index 0000000000..0da9343ddf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DensifyOperation.java @@ -0,0 +1,383 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/*** + * Encapsulates the aggregation framework {@code $densify}-operation. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class DensifyOperation implements AggregationOperation { + + private @Nullable Field field; + private @Nullable List partitionBy; + private @Nullable Range range; + + protected DensifyOperation(@Nullable Field field, @Nullable List partitionBy, @Nullable Range range) { + + this.field = field; + this.partitionBy = partitionBy; + this.range = range; + } + + /** + * Obtain a builder to create the {@link DensifyOperation}. + * + * @return new instance of {@link DensifyOperationBuilder}. + */ + public static DensifyOperationBuilder builder() { + return new DensifyOperationBuilder(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document densify = new Document(); + densify.put("field", context.getReference(field).getRaw()); + if (!ObjectUtils.isEmpty(partitionBy)) { + densify.put("partitionByFields", partitionBy.stream().map(it -> { + if (it instanceof Field field) { + return context.getReference(field).getRaw(); + } + if (it instanceof AggregationExpression expression) { + return expression.toDocument(context); + } + return it; + }).collect(Collectors.toList())); + } + densify.put("range", range.toDocument(context)); + return new Document("$densify", densify); + } + + /** + * The {@link Range} specifies how the data is densified. + */ + public interface Range { + + /** + * Add documents spanning the range of values within the given lower (inclusive) and upper (exclusive) bound. + * + * @param lower must not be {@literal null}. + * @param upper must not be {@literal null}. + * @return new instance of {@link DensifyRange}. + */ + static DensifyRange bounded(Object lower, Object upper) { + return new BoundedRange(lower, upper, DensifyUnits.NONE); + } + + /** + * Add documents spanning the full value range. + * + * @return new instance of {@link DensifyRange}. + */ + static DensifyRange full() { + + return new DensifyRange(DensifyUnits.NONE) { + + @Override + Object getBounds(AggregationOperationContext ctx) { + return "full"; + } + }; + } + + /** + * Add documents spanning the full value range for each partition. + * + * @return new instance of {@link DensifyRange}. + */ + static DensifyRange partition() { + return new DensifyRange(DensifyUnits.NONE) { + + @Override + Object getBounds(AggregationOperationContext ctx) { + return "partition"; + } + }; + } + + /** + * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } + + /** + * Obtain the document representation of the window in the given {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + Document toDocument(AggregationOperationContext ctx); + } + + /** + * Base {@link Range} implementation. + * + * @author Christoph Strobl + */ + public static abstract class DensifyRange implements Range { + + private @Nullable DensifyUnit unit; + private Number step; + + public DensifyRange(DensifyUnit unit) { + this.unit = unit; + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + + Document range = new Document("step", step); + if (unit != null && !DensifyUnits.NONE.equals(unit)) { + range.put("unit", unit.name().toLowerCase(Locale.US)); + } + range.put("bounds", getBounds(ctx)); + return range; + } + + /** + * Set the increment for the value. + * + * @param step must not be {@literal null}. + * @return this. + */ + public DensifyRange incrementBy(Number step) { + this.step = step; + return this; + } + + /** + * Set the increment for the value. + * + * @param step must not be {@literal null}. + * @return this. + */ + public DensifyRange incrementBy(Number step, DensifyUnit unit) { + this.step = step; + return unit(unit); + } + + /** + * Set the {@link DensifyUnit unit} for the step field. + * + * @param unit + * @return this. + */ + public DensifyRange unit(DensifyUnit unit) { + + this.unit = unit; + return this; + } + + abstract Object getBounds(AggregationOperationContext ctx); + } + + /** + * {@link Range} implementation holding lower and upper bound values. + * + * @author Christoph Strobl + */ + public static class BoundedRange extends DensifyRange { + + private List bounds; + + protected BoundedRange(Object lower, Object upper, DensifyUnit unit) { + + super(unit); + this.bounds = Arrays.asList(lower, upper); + } + + @Override + List getBounds(AggregationOperationContext ctx) { + return bounds.stream().map(it -> { + if (it instanceof AggregationExpression expression) { + return expression.toDocument(ctx); + } + return it; + }).collect(Collectors.toList()); + } + } + + /** + * The actual time unit to apply to a {@link Range}. + */ + public interface DensifyUnit { + + String name(); + + /** + * Converts the given time unit into a {@link DensifyUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static DensifyUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return DensifyUnits.DAY; + case HOURS: + return DensifyUnits.HOUR; + case MINUTES: + return DensifyUnits.MINUTE; + case SECONDS: + return DensifyUnits.SECOND; + case MILLISECONDS: + return DensifyUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create DensifyUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link DensifyUnit}. Supported units are: years, weeks, months, days, + * hours, minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static DensifyUnits from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return DensifyUnits.YEAR; + case WEEKS: + return DensifyUnits.WEEK; + case MONTHS: + return DensifyUnits.MONTH; + case DAYS: + return DensifyUnits.DAY; + case HOURS: + return DensifyUnits.HOUR; + case MINUTES: + return DensifyUnits.MINUTE; + case SECONDS: + return DensifyUnits.SECOND; + case MILLIS: + return DensifyUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create DensifyUnit from %s", chronoUnit)); + } + } + + /** + * Quick access to available {@link DensifyUnit units}. + */ + public enum DensifyUnits implements DensifyUnit { + NONE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND + } + + public static class DensifyOperationBuilder { + + DensifyOperation target; + + public DensifyOperationBuilder() { + this.target = new DensifyOperation(null, Collections.emptyList(), null); + } + + /** + * Set the field to densify. + * + * @param fieldname must not be {@literal null}. + * @return this. + */ + public DensifyOperationBuilder densify(String fieldname) { + this.target.field = Fields.field(fieldname); + return this; + } + + /** + * Set the fields used for grouping documents. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public DensifyOperationBuilder partitionBy(String... fields) { + target.partitionBy = Fields.fields(fields).asList(); + return this; + } + + /** + * Set the operational range. + * + * @param range must not be {@literal null}. + * @return this. + */ + public DensifyOperationBuilder range(Range range) { + + target.range = range; + return this; + } + + /** + * Operate on full range. + * + * @param consumer + * @return this. + */ + public DensifyOperationBuilder fullRange(Consumer consumer) { + + Assert.notNull(consumer, "Consumer must not be null"); + + DensifyRange range = Range.full(); + consumer.accept(range); + + return range(range); + } + + /** + * Operate on full range. + * + * @param consumer + * @return this. + */ + public DensifyOperationBuilder partitionRange(Consumer consumer) { + + DensifyRange range = Range.partition(); + consumer.accept(range); + + return range(range); + } + + public DensifyOperation build() { + return new DensifyOperation(target.field, target.partitionBy, target.range); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentEnhancingOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentEnhancingOperation.java new file mode 100644 index 0000000000..7f260c3785 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentEnhancingOperation.java @@ -0,0 +1,162 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.util.Assert; + +/** + * Base class for common tasks required by {@link SetOperation} and {@link AddFieldsOperation}. + * + * @author Christoph Strobl + * @since 3.0 + */ +abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOperation { + + private final Map valueMap; + + private ExposedFields exposedFields = ExposedFields.empty(); + + protected DocumentEnhancingOperation(Map source) { + + this.valueMap = new LinkedHashMap<>(source); + for (Object key : source.keySet()) { + this.exposedFields = add(key); + } + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); + + if (valueMap.size() == 1) { + return context.getMappedObject( + new Document(mongoOperator(), toSetEntry(valueMap.entrySet().iterator().next(), operationContext))); + } + + Document $set = new Document(); + valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll); + return context.getMappedObject(new Document(mongoOperator(), $set)); + } + + /** + * @return the String representation of the native MongoDB operator. + */ + protected abstract String mongoOperator(); + + @Override + public String getOperator() { + return mongoOperator(); + } + + /** + * @return the raw value map + */ + protected Map getValueMap() { + return this.valueMap; + } + + @Override + public ExposedFields getFields() { + return exposedFields; + } + + private ExposedFields add(Object fieldValue) { + + if (fieldValue instanceof Field field) { + return exposedFields.and(new ExposedField(field, true)); + } + if (fieldValue instanceof String fieldName) { + return exposedFields.and(new ExposedField(Fields.field(fieldName), true)); + } + + throw new IllegalArgumentException(String.format("Expected %s to be a field/property", fieldValue)); + } + + private static Document toSetEntry(Entry entry, AggregationOperationContext context) { + + String field = entry.getKey() instanceof String key ? context.getReference(key).getRaw() + : context.getReference((Field) entry.getKey()).getRaw(); + + Object value = computeValue(entry.getValue(), context); + + return new Document(field, value); + } + + private static Object computeValue(Object value, AggregationOperationContext context) { + + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + + if (value instanceof ExpressionProjection expressionProjection) { + return expressionProjection.toExpression(context); + } + + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (value instanceof Collection collection) { + return collection.stream().map(it -> computeValue(it, context)).collect(Collectors.toList()); + } + + return value; + } + + /** + * A {@link AggregationExpression} based on a SpEL expression. + * + * @author Mark Paluch + */ + static class ExpressionProjection { + + private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer(); + + private final String expression; + private final Object[] params; + + /** + * Creates a new {@link ProjectionOperation.ExpressionProjectionOperationBuilder.ExpressionProjection} for the given + * field, SpEL expression and parameters. + * + * @param expression must not be {@literal null} or empty. + * @param parameters must not be {@literal null}. + */ + ExpressionProjection(String expression, Object[] parameters) { + + Assert.notNull(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); + + this.expression = expression; + this.params = parameters.clone(); + } + + Object toExpression(AggregationOperationContext context) { + return TRANSFORMER.transform(expression, context, params); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java new file mode 100644 index 0000000000..ff63ad834d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -0,0 +1,222 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; + +import org.bson.Document; + +/** + * Gateway to {@literal document expressions} such as {@literal $rank, $documentNumber, etc.} + * + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentOperators { + + /** + * Obtain the document position (including gaps) relative to others (rank). + * + * @return new instance of {@link Rank}. + * @since 3.3 + */ + public static Rank rank() { + return new Rank(); + } + + /** + * Obtain the document position (without gaps) relative to others (rank). + * + * @return new instance of {@link DenseRank}. + * @since 3.3 + */ + public static DenseRank denseRank() { + return new DenseRank(); + } + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(String fieldReference) { + return new DocumentOperatorsFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(AggregationExpression expression) { + return new DocumentOperatorsFactory(expression); + } + + /** + * Obtain the current document position. + * + * @return new instance of {@link DocumentNumber}. + * @since 3.3 + */ + public static DocumentNumber documentNumber() { + return new DocumentNumber(); + } + + /** + * @author Christoph Strobl + */ + public static class DocumentOperatorsFactory { + + private final Object target; + + public DocumentOperatorsFactory(Object target) { + this.target = target; + } + + /** + * Creates new {@link AggregationExpression} that applies the expression to a document at specified position + * relative to the current document. + * + * @param by the value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift shift(int by) { + + Shift shift = usesExpression() ? Shift.shift((AggregationExpression) target) : Shift.shift(target.toString()); + return shift.by(by); + } + + private boolean usesExpression() { + return target instanceof AggregationExpression; + } + } + + /** + * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents + * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Rank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rank", new Document()); + } + } + + /** + * {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple + * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next + * rank without any gaps. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DenseRank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$denseRank", new Document()); + } + } + + /** + * {@link DocumentNumber} resolves the current document position. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DocumentNumber implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$documentNumber", new Document()); + } + } + + /** + * Shift applies an expression to a document in a specified position relative to the current document. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Shift extends AbstractAggregationExpression { + + private Shift(Object value) { + super(value); + } + + /** + * Specifies the field to evaluate and return. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(String fieldReference) { + return new Shift(Collections.singletonMap("output", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression expression} to evaluate and return. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(AggregationExpression expression) { + return new Shift(Collections.singletonMap("output", expression)); + } + + /** + * Shift the document position relative to the current. Use a positive value for follow up documents (eg. 1 for the + * next) or a negative value for the predecessor documents (eg. -1 for the previous). + * + * @param shiftBy value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift by(int shiftBy) { + return new Shift(append("by", shiftBy)); + } + + /** + * Define the default value if the target document is out of range. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultTo(Object value) { + return new Shift(append("default", value)); + } + + /** + * Define the {@link AggregationExpression expression} to evaluate if the target document is out of range. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultToValueOf(AggregationExpression expression) { + return defaultTo(expression); + } + + @Override + protected String getMongoMethod() { + return "$shift"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java new file mode 100644 index 0000000000..56f20dde17 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java @@ -0,0 +1,206 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal evaluation operators} such as {@literal $expr}. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.3 + */ +public class EvaluationOperators { + + /** + * Take the value resulting from the given fieldReference. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(String fieldReference) { + return new EvaluationOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(AggregationExpression expression) { + return new EvaluationOperatorFactory(expression); + } + + public static class EvaluationOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public EvaluationOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public EvaluationOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that is a valid aggregation expression. + * + * @return new instance of {@link Expr}. + */ + public Expr expr() { + return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that is a valid aggregation expression. + * + * @return new instance of {@link Expr}. + */ + public LastObservationCarriedForward locf() { + return usesFieldRef() ? LastObservationCarriedForward.locfValueOf(fieldReference) + : LastObservationCarriedForward.locfValueOf(expression); + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * Allows the use of aggregation expressions within the query language. + */ + public static class Expr extends AbstractAggregationExpression { + + private Expr(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$expr"; + } + + /** + * Creates new {@link Expr}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Expr(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Expr}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Expr(expression); + } + + /** + * Creates {@code $expr} as {@link CriteriaDefinition}. + * + * @return the {@link CriteriaDefinition} from this expression. + */ + public CriteriaDefinition toCriteriaDefinition(AggregationOperationContext context) { + + Document criteriaObject = toDocument(context); + + return new CriteriaDefinition() { + @Override + public Document getCriteriaObject() { + return criteriaObject; + } + + @Override + public String getKey() { + return getMongoMethod(); + } + }; + } + } + + /** + * Sets {@literal null} and missing values to the last non-null value. + * + * @since 4.0 + */ + public static class LastObservationCarriedForward extends AbstractAggregationExpression { + + private LastObservationCarriedForward(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$locf"; + } + + /** + * Creates new {@link LastObservationCarriedForward}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LastObservationCarriedForward}. + */ + public static LastObservationCarriedForward locfValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new LastObservationCarriedForward(Fields.field(fieldReference)); + } + + /** + * Creates new {@link LastObservationCarriedForward}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LastObservationCarriedForward}. + */ + public static LastObservationCarriedForward locfValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new LastObservationCarriedForward(expression); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java index f777f3a543..458bc43437 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFields.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -46,7 +46,7 @@ public final class ExposedFields implements Iterable { /** * Returns an empty {@link ExposedFields} instance. * - * @return + * @return never {@literal null}. * @since 2.0 */ public static ExposedFields empty() { @@ -57,7 +57,7 @@ public static ExposedFields empty() { * Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s. * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static ExposedFields from(ExposedField... fields) { return from(Arrays.asList(fields)); @@ -67,7 +67,7 @@ public static ExposedFields from(ExposedField... fields) { * Creates a new {@link ExposedFields} instance from the given {@link ExposedField}s. * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ private static ExposedFields from(List fields) { @@ -84,7 +84,7 @@ private static ExposedFields from(List fields) { * Creates synthetic {@link ExposedFields} from the given {@link Fields}. * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static ExposedFields synthetic(Fields fields) { return createFields(fields, true); @@ -94,7 +94,7 @@ public static ExposedFields synthetic(Fields fields) { * Creates non-synthetic {@link ExposedFields} from the given {@link Fields}. * * @param fields must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static ExposedFields nonSynthetic(Fields fields) { return createFields(fields, false); @@ -105,12 +105,12 @@ public static ExposedFields nonSynthetic(Fields fields) { * * @param fields must not be {@literal null}. * @param synthetic - * @return + * @return never {@literal null}. */ private static ExposedFields createFields(Fields fields, boolean synthetic) { - Assert.notNull(fields, "Fields must not be null!"); - List result = new ArrayList(); + Assert.notNull(fields, "Fields must not be null"); + List result = new ArrayList(fields.size()); for (Field field : fields) { result.add(new ExposedField(field, synthetic)); @@ -135,13 +135,13 @@ private ExposedFields(List originals, List synthetic * Creates a new {@link ExposedFields} adding the given {@link ExposedField}. * * @param field must not be {@literal null}. - * @return + * @return new instance of {@link ExposedFields}. */ public ExposedFields and(ExposedField field) { - Assert.notNull(field, "Exposed field must not be null!"); + Assert.notNull(field, "Exposed field must not be null"); - ArrayList result = new ArrayList(); + ArrayList result = new ArrayList<>(); result.addAll(field.synthetic ? syntheticFields : originalFields); result.add(field); @@ -151,8 +151,8 @@ public ExposedFields and(ExposedField field) { /** * Returns the field with the given name or {@literal null} if no field with the given name is available. * - * @param name - * @return + * @param name must not be {@literal null}. + * @return can be {@literal null}. */ @Nullable public ExposedField getField(String name) { @@ -209,10 +209,6 @@ private int exposedFieldsCount() { return originalFields.size() + syntheticFields.size(); } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ @Override public Iterator iterator() { @@ -260,28 +256,16 @@ public ExposedField(Field delegate, boolean synthetic) { this.synthetic = synthetic; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getKey() - */ @Override public String getName() { return field.getName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getTarget() - */ @Override public String getTarget() { return field.getTarget(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#isAliased() - */ @Override public boolean isAliased() { return field.isAliased(); @@ -304,39 +288,25 @@ public boolean canBeReferredToBy(String name) { return getName().equals(name) || getTarget().equals(name); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("AggregationField: %s, synthetic: %s", field, synthetic); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof ExposedField)) { + if (!(obj instanceof ExposedField that)) { return false; } - ExposedField that = (ExposedField) obj; - return this.field.equals(that.field) && this.synthetic == that.synthetic; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -389,33 +359,21 @@ static class DirectFieldReference implements FieldReference { */ public DirectFieldReference(ExposedField field) { - Assert.notNull(field, "ExposedField must not be null!"); + Assert.notNull(field, "ExposedField must not be null"); this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference#getRaw() - */ public String getRaw() { String target = field.getTarget(); return field.synthetic ? target : String.format("%s.%s", Fields.UNDERSCORE_ID, target); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference#getReferenceValue() - */ public Object getReferenceValue() { return field.synthetic && !field.isAliased() ? 1 : toString(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { @@ -426,30 +384,20 @@ public String toString() { return String.format("$%s", getRaw()); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof DirectFieldReference)) { + if (!(obj instanceof DirectFieldReference fieldReference)) { return false; } - DirectFieldReference that = (DirectFieldReference) obj; - - return this.field.equals(that.field); + return this.field.equals(fieldReference.field); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return field.hashCode(); @@ -475,19 +423,11 @@ public ExpressionFieldReference(FieldReference field) { delegate = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference#getRaw() - */ @Override public String getRaw() { return delegate.getRaw(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference#getReferenceValue() - */ @Override public Object getReferenceValue() { return delegate.getReferenceValue(); @@ -510,18 +450,17 @@ public String toString() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof ExpressionFieldReference)) { + if (!(obj instanceof ExpressionFieldReference fieldReference)) { return false; } - ExpressionFieldReference that = (ExpressionFieldReference) obj; - return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + return ObjectUtils.nullSafeEquals(this.delegate, fieldReference.delegate); } @Override diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java index e8866a550e..131fa8a845 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,8 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; + import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; @@ -36,6 +38,7 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo private final ExposedFields exposedFields; private final AggregationOperationContext rootContext; + private final FieldLookupPolicy lookupPolicy; /** * Creates a new {@link ExposedFieldsAggregationOperationContext} from the given {@link ExposedFields}. Uses the given @@ -43,44 +46,45 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo * * @param exposedFields must not be {@literal null}. * @param rootContext must not be {@literal null}. + * @param lookupPolicy must not be {@literal null}. */ - public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, - AggregationOperationContext rootContext) { + public ExposedFieldsAggregationOperationContext(ExposedFields exposedFields, AggregationOperationContext rootContext, + FieldLookupPolicy lookupPolicy) { - Assert.notNull(exposedFields, "ExposedFields must not be null!"); - Assert.notNull(rootContext, "RootContext must not be null!"); + Assert.notNull(exposedFields, "ExposedFields must not be null"); + Assert.notNull(rootContext, "RootContext must not be null"); + Assert.notNull(lookupPolicy, "FieldLookupPolicy must not be null"); this.exposedFields = exposedFields; this.rootContext = rootContext; + this.lookupPolicy = lookupPolicy; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document) - */ @Override - public Document getMappedObject(Document document) { - return rootContext.getMappedObject(document); + public Document getMappedObject(Document document, @Nullable Class type) { + return rootContext.getMappedObject(document, type); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ @Override public FieldReference getReference(Field field) { + + if (field.isInternal()) { + return new DirectFieldReference(new ExposedField(field, true)); + } + return getReference(field, field.getTarget()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return getReference(null, name); } + @Override + public Fields getFields(Class type) { + return rootContext.getFields(type); + } + /** * Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}. * @@ -90,14 +94,22 @@ public FieldReference getReference(String name) { */ private FieldReference getReference(@Nullable Field field, String name) { - Assert.notNull(name, "Name must not be null!"); + Assert.notNull(name, "Name must not be null"); FieldReference exposedField = resolveExposedField(field, name); if (exposedField != null) { return exposedField; } - throw new IllegalArgumentException(String.format("Invalid reference '%s'!", name)); + if (lookupPolicy.isStrict()) { + throw new IllegalArgumentException(String.format("Invalid reference '%s'", name)); + } + + if (field != null) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + return new DirectFieldReference(new ExposedField(name, true)); } /** @@ -135,4 +147,37 @@ protected FieldReference resolveExposedField(@Nullable Field field, String name) } return null; } + + /** + * @return obtain the root context used to resolve references. + * @since 3.1 + */ + AggregationOperationContext getRootContext() { + return rootContext; + } + + @Override + public CodecRegistry getCodecRegistry() { + return getRootContext().getCodecRegistry(); + } + + @Override + @Deprecated(since = "4.3.1", forRemoval = true) + public AggregationOperationContext continueOnMissingFieldReference() { + if (!lookupPolicy.isStrict()) { + return this; + } + return new ExposedFieldsAggregationOperationContext(exposedFields, rootContext, FieldLookupPolicy.relaxed()); + } + + @Override + public AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + + @Override + public AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java index 92fe9f9432..f5c73dd09c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FacetOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,12 +20,11 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.util.Assert; -import org.bson.Document; - /** * Encapsulates the aggregation framework {@code $facet}-operation.
          * Facet of {@link AggregationOperation}s to be used in an {@link Aggregation}. Processes multiple @@ -73,23 +72,22 @@ private FacetOperation(Facets facets) { */ public FacetOperationBuilder and(AggregationOperation... operations) { - Assert.notNull(operations, "AggregationOperations must not be null!"); - Assert.notEmpty(operations, "AggregationOperations must not be empty!"); + Assert.notNull(operations, "AggregationOperations must not be null"); + Assert.notEmpty(operations, "AggregationOperations must not be empty"); return new FacetOperationBuilder(facets, Arrays.asList(operations)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$facet", facets.toDocument(context)); + return new Document(getOperator(), facets.toDocument(context)); + } + + @Override + public String getOperator() { + return "$facet"; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return facets.asExposedFields(); @@ -120,7 +118,7 @@ private FacetOperationBuilder(Facets current, List operati */ public FacetOperation as(String fieldName) { - Assert.hasText(fieldName, "FieldName must not be null or empty!"); + Assert.hasText(fieldName, "FieldName must not be null or empty"); return new FacetOperation(current.and(fieldName, operations)); } @@ -180,8 +178,8 @@ protected Document toDocument(AggregationOperationContext context) { */ Facets and(String fieldName, List operations) { - Assert.hasText(fieldName, "FieldName must not be null or empty!"); - Assert.notNull(operations, "AggregationOperations must not be null!"); + Assert.hasText(fieldName, "FieldName must not be null or empty"); + Assert.notNull(operations, "AggregationOperations must not be null"); List facets = new ArrayList(this.facets.size() + 1); facets.addAll(this.facets); @@ -209,8 +207,8 @@ private static class Facet { */ Facet(ExposedField exposedField, List operations) { - Assert.notNull(exposedField, "ExposedField must not be null!"); - Assert.notNull(operations, "AggregationOperations must not be null!"); + Assert.notNull(exposedField, "ExposedField must not be null"); + Assert.notNull(operations, "AggregationOperations must not be null"); this.exposedField = exposedField; this.operations = operations; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java index b0145cbd19..a6737dc574 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Field.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -43,4 +43,12 @@ public interface Field { * @return */ boolean isAliased(); + + /** + * @return true if the field name references a local value such as {@code $$this}. + * @since 2.2 + */ + default boolean isInternal() { + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldLookupPolicy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldLookupPolicy.java new file mode 100644 index 0000000000..b438be3f31 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldLookupPolicy.java @@ -0,0 +1,64 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +/** + * Lookup policy for aggregation fields. Allows strict lookups that fail if the field is absent or relaxed ones that + * pass-thru the requested field even if we have to assume that the field isn't present because of the limited scope of + * our input. + * + * @author Mark Paluch + * @since 4.3.1 + */ +public abstract class FieldLookupPolicy { + + private static final FieldLookupPolicy STRICT = new FieldLookupPolicy() { + @Override + boolean isStrict() { + return true; + } + }; + + private static final FieldLookupPolicy RELAXED = new FieldLookupPolicy() { + @Override + boolean isStrict() { + return false; + } + }; + + private FieldLookupPolicy() {} + + /** + * @return a relaxed lookup policy. + */ + public static FieldLookupPolicy relaxed() { + return RELAXED; + } + + /** + * @return a strict lookup policy. + */ + public static FieldLookupPolicy strict() { + return STRICT; + } + + /** + * @return {@code true} if the policy uses a strict lookup; {@code false} to allow references to fields that cannot be + * determined to be exactly present. + */ + abstract boolean isStrict(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java index 065d9b63a8..83fc7c2b87 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Fields.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import java.util.List; import java.util.Map; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -37,10 +38,10 @@ */ public final class Fields implements Iterable { - private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s! Please " - + "customize your field definitions to get to unique field names!"; + private static final String AMBIGUOUS_EXCEPTION = "Found two fields both using '%s' as name: %s and %s; Please " + + "customize your field definitions to get to unique field names"; - public static final String UNDERSCORE_ID = "_id"; + public static final String UNDERSCORE_ID = FieldName.ID.name(); public static final String UNDERSCORE_ID_REF = "$_id"; private final List fields; @@ -53,7 +54,7 @@ public final class Fields implements Iterable { */ public static Fields from(Field... fields) { - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(fields, "Fields must not be null"); return new Fields(Arrays.asList(fields)); } @@ -65,9 +66,9 @@ public static Fields from(Field... fields) { */ public static Fields fields(String... names) { - Assert.notNull(names, "Field names must not be null!"); + Assert.notNull(names, "Field names must not be null"); - List fields = new ArrayList(); + List fields = new ArrayList<>(); for (String name : names) { fields.add(field(name)); @@ -96,7 +97,7 @@ public static Field field(String name) { * @return */ public static Field field(String name, String target) { - Assert.hasText(target, "Target must not be null or empty!"); + Assert.hasText(target, "Target must not be null or empty"); return new AggregationField(name, target); } @@ -107,14 +108,14 @@ public static Field field(String name, String target) { */ private Fields(List fields) { - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(fields, "Fields must not be null"); this.fields = verify(fields); } - private static final List verify(List fields) { + private static List verify(List fields) { - Map reference = new HashMap(); + Map reference = new HashMap<>(); for (Field field : fields) { @@ -133,7 +134,7 @@ private static final List verify(List fields) { private Fields(Fields existing, Field tail) { - this.fields = new ArrayList(existing.fields.size() + 1); + this.fields = new ArrayList<>(existing.fields.size() + 1); this.fields.addAll(existing.fields); this.fields.add(tail); } @@ -167,6 +168,10 @@ public Fields and(Fields fields) { return result; } + public int size() { + return fields.size(); + } + @Nullable public Field getField(String name) { @@ -179,10 +184,6 @@ public Field getField(String name) { return null; } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ @Override public Iterator iterator() { return fields.iterator(); @@ -232,7 +233,7 @@ public AggregationField(String name, @Nullable String target) { String nameToSet = name != null ? cleanUp(name) : null; String targetToSet = target != null ? cleanUp(target) : null; - Assert.hasText(nameToSet, "AggregationField name must not be null or empty!"); + Assert.hasText(nameToSet, "AggregationField name must not be null or empty"); if (target == null && name.contains(".")) { this.name = nameToSet.substring(nameToSet.indexOf('.') + 1); @@ -245,7 +246,7 @@ public AggregationField(String name, @Nullable String target) { private static String cleanUp(String source) { - if (Aggregation.SystemVariable.isReferingToSystemVariable(source)) { + if (AggregationVariable.isVariable(source)) { return source; } @@ -253,36 +254,31 @@ private static String cleanUp(String source) { return dollarIndex == -1 ? source : source.substring(dollarIndex + 1); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getKey() - */ + @Override public String getName() { return name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#getAlias() - */ + @Override public String getTarget() { - if (isLocalVar()) { + if (isLocalVar() || pointsToDBRefId()) { return this.getRaw(); } return StringUtils.hasText(this.target) ? this.target : this.name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Field#isAliased() - */ @Override public boolean isAliased() { return !getName().equals(getTarget()); } + @Override + public boolean isInternal() { + return getRaw().endsWith("$$this") || getRaw().endsWith("$$value"); + } + /** * @return {@literal true} in case the field name starts with {@code $$}. * @since 1.10 @@ -291,6 +287,10 @@ public boolean isLocalVar() { return raw.startsWith("$$") && !raw.startsWith("$$$"); } + protected boolean pointsToDBRefId() { // see https://jira.mongodb.org/browse/SERVER-14466 + return raw.endsWith(".$id"); + } + /** * @return * @since 1.10 @@ -299,39 +299,25 @@ public String getRaw() { return raw; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("AggregationField - name: %s, target: %s", name, target); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof AggregationField)) { + if (!(obj instanceof AggregationField field)) { return false; } - AggregationField that = (AggregationField) obj; - - return this.name.equals(that.name) && ObjectUtils.nullSafeEquals(this.target, that.target); + return this.name.equals(field.name) && ObjectUtils.nullSafeEquals(this.target, field.target); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java index 7f168f4868..4fdea92dde 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/FieldsExposingAggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,9 +33,22 @@ public interface FieldsExposingAggregationOperation extends AggregationOperation */ ExposedFields getFields(); + /** + * @return {@literal true} to conditionally inherit fields from previous operations. + * @since 2.0.6 + */ + default boolean inheritsFields() { + return false; + } + /** * Marker interface for {@link AggregationOperation} that inherits fields from previous operations. */ - interface InheritsFieldsAggregationOperation extends FieldsExposingAggregationOperation {} + interface InheritsFieldsAggregationOperation extends FieldsExposingAggregationOperation { + @Override + default boolean inheritsFields() { + return true; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java index b654cf09dc..f4a5fb4498 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,14 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.ArrayList; +import java.util.List; + import org.bson.Document; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Represents a {@code geoNear} aggregation operation. @@ -28,38 +33,98 @@ * @author Thomas Darimont * @author Christoph Strobl * @since 1.3 + * @see MongoDB Aggregation Framework: + * $geoNear */ public class GeoNearOperation implements AggregationOperation { private final NearQuery nearQuery; private final String distanceField; + private final @Nullable String indexKey; /** * Creates a new {@link GeoNearOperation} from the given {@link NearQuery} and the given distance field. The * {@code distanceField} defines output field that contains the calculated distance. * - * @param query must not be {@literal null}. + * @param nearQuery must not be {@literal null}. * @param distanceField must not be {@literal null}. */ public GeoNearOperation(NearQuery nearQuery, String distanceField) { + this(nearQuery, distanceField, null); + } + + /** + * Creates a new {@link GeoNearOperation} from the given {@link NearQuery} and the given distance field. The + * {@code distanceField} defines output field that contains the calculated distance. + * + * @param nearQuery must not be {@literal null}. + * @param distanceField must not be {@literal null}. + * @param indexKey can be {@literal null}; + * @since 2.1 + */ + private GeoNearOperation(NearQuery nearQuery, String distanceField, @Nullable String indexKey) { - Assert.notNull(nearQuery, "NearQuery must not be null."); - Assert.hasLength(distanceField, "Distance field must not be null or empty."); + Assert.notNull(nearQuery, "NearQuery must not be null"); + Assert.hasLength(distanceField, "Distance field must not be null or empty"); this.nearQuery = nearQuery; this.distanceField = distanceField; + this.indexKey = indexKey; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Optionally specify the geospatial index to use via the field to use in the calculation.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param key the geospatial index field to use when calculating the distance. + * @return new instance of {@link GeoNearOperation}. + * @since 2.1 */ + public GeoNearOperation useIndex(String key) { + return new GeoNearOperation(nearQuery, distanceField, key); + } + @Override public Document toDocument(AggregationOperationContext context) { Document command = context.getMappedObject(nearQuery.toDocument()); + + if (command.containsKey("query")) { + command.replace("query", context.getMappedObject(command.get("query", Document.class))); + } + + command.remove("collation"); command.put("distanceField", distanceField); - return new Document("$geoNear", command); + if (StringUtils.hasText(indexKey)) { + command.put("key", indexKey); + } + + return new Document(getOperator(), command); + } + + @Override + public String getOperator() { + return "$geoNear"; + } + + @Override + public List toPipelineStages(AggregationOperationContext context) { + + Document command = toDocument(context); + Number limit = (Number) command.get("$geoNear", Document.class).remove("num"); + + List stages = new ArrayList<>(3); + stages.add(command); + + if (nearQuery.getSkip() != null && nearQuery.getSkip() > 0) { + stages.add(new Document("$skip", nearQuery.getSkip())); + } + + if (limit != null) { + stages.add(new Document("$limit", limit.longValue())); + } + + return stages; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java index 2f0ed70569..72a917c599 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -36,16 +35,14 @@ * We recommend to use the static factory method {@link Aggregation#graphLookup(String)} instead of creating instances * of this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/graphLookup/ + * @see https://docs.mongodb.org/manual/reference/aggregation/graphLookup/ * @author Mark Paluch * @author Christoph Strobl * @since 1.10 */ public class GraphLookupOperation implements InheritsFieldsAggregationOperation { - private static final Set> ALLOWED_START_TYPES = new HashSet>( - Arrays.> asList(AggregationExpression.class, String.class, Field.class, Document.class)); + private static final Set> ALLOWED_START_TYPES = Set.of(AggregationExpression.class, String.class, Field.class, Document.class); private final String from; private final List startWith; @@ -78,9 +75,6 @@ public static FromBuilder builder() { return new GraphLookupOperationFromBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -88,14 +82,14 @@ public Document toDocument(AggregationOperationContext context) { graphLookup.put("from", from); - List mappedStartWith = new ArrayList(startWith.size()); + List mappedStartWith = new ArrayList<>(startWith.size()); for (Object startWithElement : startWith) { - if (startWithElement instanceof AggregationExpression) { - mappedStartWith.add(((AggregationExpression) startWithElement).toDocument(context)); - } else if (startWithElement instanceof Field) { - mappedStartWith.add(context.getReference((Field) startWithElement).toString()); + if (startWithElement instanceof AggregationExpression aggregationExpression) { + mappedStartWith.add(aggregationExpression.toDocument(context)); + } else if (startWithElement instanceof Field field) { + mappedStartWith.add(context.getReference(field).toString()); } else { mappedStartWith.add(startWithElement); } @@ -103,8 +97,8 @@ public Document toDocument(AggregationOperationContext context) { graphLookup.put("startWith", mappedStartWith.size() == 1 ? mappedStartWith.iterator().next() : mappedStartWith); - graphLookup.put("connectFromField", connectFrom.getName()); - graphLookup.put("connectToField", connectTo.getName()); + graphLookup.put("connectFromField", connectFrom.getTarget()); + graphLookup.put("connectToField", connectTo.getTarget()); graphLookup.put("as", as.getName()); if (maxDepth != null) { @@ -112,23 +106,30 @@ public Document toDocument(AggregationOperationContext context) { } if (depthField != null) { - graphLookup.put("depthField", depthField.getName()); + graphLookup.put("depthField", depthField.getTarget()); } if (restrictSearchWithMatch != null) { graphLookup.put("restrictSearchWithMatch", context.getMappedObject(restrictSearchWithMatch.getCriteriaObject())); } - return new Document("$graphLookup", graphLookup); + return new Document(getOperator(), graphLookup); + } + + @Override + public String getOperator() { + return "$graphLookup"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { - return ExposedFields.from(new ExposedField(as, true)); + + List fields = new ArrayList<>(2); + fields.add(new ExposedField(as, true)); + if(depthField != null) { + fields.add(new ExposedField(depthField, true)); + } + return ExposedFields.from(fields.toArray(new ExposedField[0])); } /** @@ -140,7 +141,7 @@ public interface FromBuilder { * Set the {@literal collectionName} to apply the {@code $graphLookup} to. * * @param collectionName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ StartWithBuilder from(String collectionName); } @@ -155,7 +156,7 @@ public interface StartWithBuilder { * Set the startWith {@literal fieldReferences} to apply the {@code $graphLookup} to. * * @param fieldReferences must not be {@literal null}. - * @return + * @return never {@literal null}. */ ConnectFromBuilder startWith(String... fieldReferences); @@ -163,7 +164,7 @@ public interface StartWithBuilder { * Set the startWith {@literal expressions} to apply the {@code $graphLookup} to. * * @param expressions must not be {@literal null}. - * @return + * @return never {@literal null}. */ ConnectFromBuilder startWith(AggregationExpression... expressions); @@ -172,7 +173,7 @@ public interface StartWithBuilder { * {@link AggregationExpression} to apply the {@code $graphLookup} to. * * @param expressions must not be {@literal null}. - * @return + * @return never {@literal null}. * @throws IllegalArgumentException */ ConnectFromBuilder startWith(Object... expressions); @@ -187,7 +188,7 @@ public interface ConnectFromBuilder { * Set the connectFrom {@literal fieldName} to apply the {@code $graphLookup} to. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ ConnectToBuilder connectFrom(String fieldName); } @@ -201,7 +202,7 @@ public interface ConnectToBuilder { * Set the connectTo {@literal fieldName} to apply the {@code $graphLookup} to. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ GraphLookupOperationBuilder connectTo(String fieldName); } @@ -219,28 +220,22 @@ static final class GraphLookupOperationFromBuilder private @Nullable List startWith; private @Nullable String connectFrom; - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.FromBuilder#from(java.lang.String) - */ @Override public StartWithBuilder from(String collectionName) { - Assert.hasText(collectionName, "CollectionName must not be null or empty!"); + Assert.hasText(collectionName, "CollectionName must not be null or empty"); this.from = collectionName; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder#startWith(java.lang.String[]) - */ @Override public ConnectFromBuilder startWith(String... fieldReferences) { - Assert.notNull(fieldReferences, "FieldReferences must not be null!"); - Assert.noNullElements(fieldReferences, "FieldReferences must not contain null elements!"); + Assert.notNull(fieldReferences, "FieldReferences must not be null"); + Assert.noNullElements(fieldReferences, "FieldReferences must not contain null elements"); - List fields = new ArrayList(fieldReferences.length); + List fields = new ArrayList<>(fieldReferences.length); for (String fieldReference : fieldReferences) { fields.add(Fields.field(fieldReference)); @@ -250,14 +245,11 @@ public ConnectFromBuilder startWith(String... fieldReferences) { return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder#startWith(org.springframework.data.mongodb.core.aggregation.AggregationExpression[]) - */ @Override public ConnectFromBuilder startWith(AggregationExpression... expressions) { - Assert.notNull(expressions, "AggregationExpressions must not be null!"); - Assert.noNullElements(expressions, "AggregationExpressions must not contain null elements!"); + Assert.notNull(expressions, "AggregationExpressions must not be null"); + Assert.noNullElements(expressions, "AggregationExpressions must not contain null elements"); this.startWith = Arrays.asList(expressions); return this; @@ -266,8 +258,8 @@ public ConnectFromBuilder startWith(AggregationExpression... expressions) { @Override public ConnectFromBuilder startWith(Object... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); - Assert.noNullElements(expressions, "Expressions must not contain null elements!"); + Assert.notNull(expressions, "Expressions must not be null"); + Assert.noNullElements(expressions, "Expressions must not contain null elements"); this.startWith = verifyAndPotentiallyTransformStartsWithTypes(expressions); return this; @@ -275,14 +267,14 @@ public ConnectFromBuilder startWith(Object... expressions) { private List verifyAndPotentiallyTransformStartsWithTypes(Object... expressions) { - List expressionsToUse = new ArrayList(expressions.length); + List expressionsToUse = new ArrayList<>(expressions.length); for (Object expression : expressions) { assertStartWithType(expression); - if (expression instanceof String) { - expressionsToUse.add(Fields.field((String) expression)); + if (expression instanceof String stringValue) { + expressionsToUse.add(Fields.field(stringValue)); } else { expressionsToUse.add(expression); } @@ -304,25 +296,19 @@ private void assertStartWithType(Object expression) { String.format("Expression must be any of %s but was %s", ALLOWED_START_TYPES, expression.getClass())); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.ConnectFromBuilder#connectFrom(java.lang.String) - */ @Override public ConnectToBuilder connectFrom(String fieldName) { - Assert.hasText(fieldName, "ConnectFrom must not be null or empty!"); + Assert.hasText(fieldName, "ConnectFrom must not be null or empty"); this.connectFrom = fieldName; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.ConnectToBuilder#connectTo(java.lang.String) - */ @Override public GraphLookupOperationBuilder connectTo(String fieldName) { - Assert.hasText(fieldName, "ConnectTo must not be null or empty!"); + Assert.hasText(fieldName, "ConnectTo must not be null or empty"); return new GraphLookupOperationBuilder(from, startWith, connectFrom, fieldName); } @@ -341,11 +327,11 @@ public static final class GraphLookupOperationBuilder { private @Nullable Field depthField; private @Nullable CriteriaDefinition restrictSearchWithMatch; - protected GraphLookupOperationBuilder(String from, List startWith, String connectFrom, + private GraphLookupOperationBuilder(String from, List startWith, String connectFrom, String connectTo) { this.from = from; - this.startWith = new ArrayList(startWith); + this.startWith = new ArrayList<>(startWith); this.connectFrom = Fields.field(connectFrom); this.connectTo = Fields.field(connectTo); } @@ -354,11 +340,11 @@ protected GraphLookupOperationBuilder(String from, List startW * Optionally limit the number of recursions. * * @param numberOfRecursions must be greater or equal to zero. - * @return + * @return this. */ public GraphLookupOperationBuilder maxDepth(long numberOfRecursions) { - Assert.isTrue(numberOfRecursions >= 0, "Max depth must be >= 0!"); + Assert.isTrue(numberOfRecursions >= 0, "Max depth must be >= 0"); this.maxDepth = numberOfRecursions; return this; @@ -368,11 +354,11 @@ public GraphLookupOperationBuilder maxDepth(long numberOfRecursions) { * Optionally add a depth field {@literal fieldName} to each traversed document in the search path. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return this. */ public GraphLookupOperationBuilder depthField(String fieldName) { - Assert.hasText(fieldName, "Depth field name must not be null or empty!"); + Assert.hasText(fieldName, "Depth field name must not be null or empty"); this.depthField = Fields.field(fieldName); return this; @@ -386,7 +372,7 @@ public GraphLookupOperationBuilder depthField(String fieldName) { */ public GraphLookupOperationBuilder restrict(CriteriaDefinition criteriaDefinition) { - Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null!"); + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); this.restrictSearchWithMatch = criteriaDefinition; return this; @@ -401,7 +387,7 @@ public GraphLookupOperationBuilder restrict(CriteriaDefinition criteriaDefinitio */ public GraphLookupOperation as(String fieldName) { - Assert.hasText(fieldName, "As field name must not be null or empty!"); + Assert.hasText(fieldName, "As field name must not be null or empty"); return new GraphLookupOperation(from, startWith, connectFrom, connectTo, Fields.field(fieldName), maxDepth, depthField, restrictSearchWithMatch); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java index c7de8cc1fc..10d58a7682 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/GroupOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,13 +16,13 @@ package org.springframework.data.mongodb.core.aggregation; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.ScriptOperators.Accumulator; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -79,8 +79,8 @@ protected GroupOperation(GroupOperation groupOperation) { */ private GroupOperation(GroupOperation groupOperation, List nextOperations) { - Assert.notNull(groupOperation, "GroupOperation must not be null!"); - Assert.notNull(nextOperations, "NextOperations must not be null!"); + Assert.notNull(groupOperation, "GroupOperation must not be null"); + Assert.notNull(nextOperations, "NextOperations must not be null"); this.idFields = groupOperation.idFields; this.operations = new ArrayList(nextOperations.size() + 1); @@ -95,7 +95,7 @@ private GroupOperation(GroupOperation groupOperation, List nextOperat * @return */ protected GroupOperation and(Operation operation) { - return new GroupOperation(this, Arrays.asList(operation)); + return new GroupOperation(this, List.of(operation)); } /** @@ -116,8 +116,8 @@ public static final class GroupOperationBuilder { */ private GroupOperationBuilder(GroupOperation groupOperation, Operation operation) { - Assert.notNull(groupOperation, "GroupOperation must not be null!"); - Assert.notNull(operation, "Operation must not be null!"); + Assert.notNull(groupOperation, "GroupOperation must not be null"); + Assert.notNull(operation, "Operation must not be null"); this.groupOperation = groupOperation; this.operation = operation; @@ -138,7 +138,7 @@ public GroupOperation as(String alias) { * Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression. *

          * Count expressions are emulated via {@code $sum: 1}. - *

          + *

          * * @return */ @@ -167,7 +167,7 @@ public GroupOperationBuilder sum(String reference) { */ public GroupOperationBuilder sum(AggregationExpression expr) { - Assert.notNull(expr, "Expr must not be null!"); + Assert.notNull(expr, "Expr must not be null"); return newBuilder(GroupOps.SUM, null, expr); } @@ -375,14 +375,32 @@ public GroupOperationBuilder stdDevPop(AggregationExpression expr) { return newBuilder(GroupOps.STD_DEV_POP, null, expr); } + /** + * Generates an {@link GroupOperationBuilder} for an {@code $accumulator}-expression. + * + * @param accumulator must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + public GroupOperationBuilder accumulate(Accumulator accumulator) { + return new GroupOperationBuilder(this, new Operation(accumulator)); + } + + /** + * Adds a computed field to the {@link GroupOperation}. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + public GroupOperation and(String fieldName, AggregationExpression expression) { + return new GroupOperationBuilder(this, new Operation(expression)).as(fieldName); + } + private GroupOperationBuilder newBuilder(Keyword keyword, @Nullable String reference, @Nullable Object value) { return new GroupOperationBuilder(this, new Operation(keyword, null, reference, value)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields() - */ @Override public ExposedFields getFields() { @@ -395,10 +413,6 @@ public ExposedFields getFields() { return fields; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -429,7 +443,12 @@ public Document toDocument(AggregationOperationContext context) { operationObject.putAll(operation.toDocument(context)); } - return new Document("$group", operationObject); + return new Document(getOperator(), operationObject); + } + + @Override + public String getOperator() { + return "$group"; } interface Keyword { @@ -437,12 +456,12 @@ interface Keyword { String toString(); } - private static enum GroupOps implements Keyword { + private enum GroupOps implements Keyword { SUM("$sum"), LAST("$last"), FIRST("$first"), PUSH("$push"), AVG("$avg"), MIN("$min"), MAX("$max"), ADD_TO_SET( "$addToSet"), STD_DEV_POP("$stdDevPop"), STD_DEV_SAMP("$stdDevSamp"); - private String mongoOperator; + private final String mongoOperator; GroupOps(String mongoOperator) { this.mongoOperator = mongoOperator; @@ -456,12 +475,16 @@ public String toString() { static class Operation implements AggregationOperation { - private final Keyword op; + private final @Nullable Keyword op; private final @Nullable String key; private final @Nullable String reference; private final @Nullable Object value; - public Operation(Keyword op, @Nullable String key, @Nullable String reference, @Nullable Object value) { + Operation(AggregationExpression expression) { + this(null, null, null, expression); + } + + public Operation(@Nullable Keyword op, @Nullable String key, @Nullable String reference, @Nullable Object value) { this.op = op; this.key = key; @@ -478,21 +501,26 @@ public ExposedField asField() { } public Document toDocument(AggregationOperationContext context) { - return new Document(key, new Document(op.toString(), getValue(context))); + + Object value = getValue(context); + if(op == null && value instanceof Document) { + return new Document(key, value); + } + return new Document(key, new Document(op.toString(), value)); } public Object getValue(AggregationOperationContext context) { if (reference == null) { - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } return value; } - if (Aggregation.SystemVariable.isReferingToSystemVariable(reference)) { + if (SystemVariable.isReferingToSystemVariable(reference)) { return reference; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java index 52c39c5237..ca6a2e2754 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/InheritingExposedFieldsAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,16 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.lang.Nullable; /** * {@link ExposedFieldsAggregationOperationContext} that inherits fields from its parent * {@link AggregationOperationContext}. * * @author Mark Paluch + * @author Christoph Strobl * @since 1.9 */ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext { @@ -34,21 +37,23 @@ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAg * * @param exposedFields must not be {@literal null}. * @param previousContext must not be {@literal null}. + * @param lookupPolicy must not be {@literal null}. */ public InheritingExposedFieldsAggregationOperationContext(ExposedFields exposedFields, - AggregationOperationContext previousContext) { + AggregationOperationContext previousContext, FieldLookupPolicy lookupPolicy) { - super(exposedFields, previousContext); + super(exposedFields, previousContext, lookupPolicy); this.previousContext = previousContext; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ExposedFieldsAggregationOperationContext#resolveExposedField(org.springframework.data.mongodb.core.aggregation.Field, java.lang.String) - */ @Override - protected FieldReference resolveExposedField(Field field, String name) { + public Document getMappedObject(Document document) { + return previousContext.getMappedObject(document); + } + + @Override + protected FieldReference resolveExposedField(@Nullable Field field, String name) { FieldReference fieldReference = super.resolveExposedField(field, name); if (fieldReference != null) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java index 76e005f1a3..e73dba1b9f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LimitOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -39,16 +39,17 @@ public class LimitOperation implements AggregationOperation { */ public LimitOperation(long maxElements) { - Assert.isTrue(maxElements >= 0, "Maximum number of elements must be greater or equal to zero!"); + Assert.isTrue(maxElements >= 0, "Maximum number of elements must be greater or equal to zero"); this.maxElements = maxElements; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$limit", Long.valueOf(maxElements)); + return new Document(getOperator(), maxElements); + } + + @Override + public String getOperator() { + return "$limit"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java index e54ab14134..e277539315 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LiteralOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,7 +33,7 @@ public class LiteralOperators { */ public static LiteralOperatorFactory valueOf(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new LiteralOperatorFactory(value); } @@ -51,7 +51,7 @@ public static class LiteralOperatorFactory { */ public LiteralOperatorFactory(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); this.value = value; } @@ -89,7 +89,7 @@ protected String getMongoMethod() { */ public static Literal asLiteral(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Literal(value); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java index be30406aec..282ffbd9e0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/LookupOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,44 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.function.Supplier; + import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** - * Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the static factory method - * {@link Aggregation#lookup(String, String, String, String)} instead of creating instances of this class directly. + * Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the builder provided via + * {@link #newLookup()} instead of creating instances of this class directly. * * @author Alessio Fachechi * @author Christoph Strobl * @author Mark Paluch + * @author Sangyong Choi * @since 1.9 * @see MongoDB Aggregation Framework: * $lookup */ public class LookupOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation { - private final Field from; + private final String from; + + @Nullable // private final Field localField; + + @Nullable // private final Field foreignField; + + @Nullable // + private final Let let; + + @Nullable // + private final AggregationPipeline pipeline; + private final ExposedField as; /** @@ -48,86 +64,193 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe * @param as must not be {@literal null}. */ public LookupOperation(Field from, Field localField, Field foreignField, Field as) { + this(((Supplier) () -> { + + Assert.notNull(from, "From must not be null"); + return from.getTarget(); + }).get(), localField, foreignField, null, null, as); + } + + /** + * Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline + * pipeline}. + * + * @param from must not be {@literal null}. + * @param let must not be {@literal null}. + * @param as must not be {@literal null}. + * @since 4.1 + */ + public LookupOperation(String from, @Nullable Let let, AggregationPipeline pipeline, Field as) { + this(from, null, null, let, pipeline, as); + } + + /** + * Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline + * pipeline}. + * + * @param from must not be {@literal null}. + * @param localField can be {@literal null} if {@literal pipeline} is present. + * @param foreignField can be {@literal null} if {@literal pipeline} is present. + * @param let can be {@literal null} if {@literal localField} and {@literal foreignField} are present. + * @param as must not be {@literal null}. + * @since 4.1 + */ + public LookupOperation(String from, @Nullable Field localField, @Nullable Field foreignField, @Nullable Let let, + @Nullable AggregationPipeline pipeline, Field as) { - Assert.notNull(from, "From must not be null!"); - Assert.notNull(localField, "LocalField must not be null!"); - Assert.notNull(foreignField, "ForeignField must not be null!"); - Assert.notNull(as, "As must not be null!"); + Assert.notNull(from, "From must not be null"); + if (pipeline == null) { + Assert.notNull(localField, "LocalField must not be null"); + Assert.notNull(foreignField, "ForeignField must not be null"); + } else if (localField == null && foreignField == null) { + Assert.notNull(pipeline, "Pipeline must not be null"); + } + Assert.notNull(as, "As must not be null"); this.from = from; this.localField = localField; this.foreignField = foreignField; this.as = new ExposedField(as, true); + this.let = let; + this.pipeline = pipeline; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return ExposedFields.from(as); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { Document lookupObject = new Document(); - lookupObject.append("from", from.getTarget()); - lookupObject.append("localField", localField.getTarget()); - lookupObject.append("foreignField", foreignField.getTarget()); + lookupObject.append("from", from); + if (localField != null) { + lookupObject.append("localField", localField.getTarget()); + } + if (foreignField != null) { + lookupObject.append("foreignField", foreignField.getTarget()); + } + if (let != null) { + lookupObject.append("let", let.toDocument(context).get("$let", Document.class).get("vars")); + } + if (pipeline != null) { + lookupObject.append("pipeline", pipeline.toDocuments(context)); + } + lookupObject.append("as", as.getTarget()); - return new Document("$lookup", lookupObject); + return new Document(getOperator(), lookupObject); + } + + @Override + public String getOperator() { + return "$lookup"; } /** * Get a builder that allows creation of {@link LookupOperation}. * - * @return + * @return never {@literal null}. */ public static FromBuilder newLookup() { return new LookupOperationBuilder(); } - public static interface FromBuilder { + public interface FromBuilder { /** * @param name the collection in the same database to perform the join with, must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ LocalFieldBuilder from(String name); } - public static interface LocalFieldBuilder { + public interface LocalFieldBuilder extends PipelineBuilder { /** * @param name the field from the documents input to the {@code $lookup} stage, must not be {@literal null} or * empty. - * @return + * @return never {@literal null}. */ ForeignFieldBuilder localField(String name); } - public static interface ForeignFieldBuilder { + public interface ForeignFieldBuilder { /** * @param name the field from the documents in the {@code from} collection, must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ AsBuilder foreignField(String name); } - public static interface AsBuilder { + /** + * @since 4.1 + * @author Christoph Strobl + */ + public interface LetBuilder { + + /** + * Specifies {@link Let#getVariableNames() variables) that can be used in the + * {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}. + * + * @param let must not be {@literal null}. + * @return never {@literal null}. + * @see PipelineBuilder + */ + PipelineBuilder let(Let let); + + /** + * Specifies {@link Let#getVariableNames() variables) that can be used in the + * {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}. + * + * @param variables must not be {@literal null}. + * @return never {@literal null}. + * @see PipelineBuilder + */ + default PipelineBuilder let(ExpressionVariable... variables) { + return let(Let.just(variables)); + } + } + + /** + * @since 4.1 + * @author Christoph Strobl + */ + public interface PipelineBuilder extends LetBuilder { + + /** + * Specifies the {@link AggregationPipeline pipeline} that determines the resulting documents. + * + * @param pipeline must not be {@literal null}. + * @return never {@literal null}. + */ + AsBuilder pipeline(AggregationPipeline pipeline); + + /** + * Specifies the {@link AggregationPipeline#getOperations() stages} that determine the resulting documents. + * + * @param stages must not be {@literal null} can be empty. + * @return never {@literal null}. + */ + default AsBuilder pipeline(AggregationOperation... stages) { + return pipeline(AggregationPipeline.of(stages)); + } /** * @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty. - * @return + * @return new instance of {@link LookupOperation}. + */ + LookupOperation as(String name); + } + + public interface AsBuilder extends PipelineBuilder { + + /** + * @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty. + * @return new instance of {@link LookupOperation}. */ LookupOperation as(String name); } @@ -141,10 +264,12 @@ public static interface AsBuilder { public static final class LookupOperationBuilder implements FromBuilder, LocalFieldBuilder, ForeignFieldBuilder, AsBuilder { - private @Nullable Field from; + private @Nullable String from; private @Nullable Field localField; private @Nullable Field foreignField; private @Nullable ExposedField as; + private @Nullable Let let; + private @Nullable AggregationPipeline pipeline; /** * Creates new builder for {@link LookupOperation}. @@ -158,24 +283,15 @@ public static FromBuilder newBuilder() { @Override public LocalFieldBuilder from(String name) { - Assert.hasText(name, "'From' must not be null or empty!"); - from = Fields.field(name); + Assert.hasText(name, "'From' must not be null or empty"); + from = name; return this; } - @Override - public LookupOperation as(String name) { - - Assert.hasText(name, "'As' must not be null or empty!"); - as = new ExposedField(Fields.field(name), true); - return new LookupOperation(from, localField, foreignField, - as); - } - @Override public AsBuilder foreignField(String name) { - Assert.hasText(name, "'ForeignField' must not be null or empty!"); + Assert.hasText(name, "'ForeignField' must not be null or empty"); foreignField = Fields.field(name); return this; } @@ -183,9 +299,33 @@ public AsBuilder foreignField(String name) { @Override public ForeignFieldBuilder localField(String name) { - Assert.hasText(name, "'LocalField' must not be null or empty!"); + Assert.hasText(name, "'LocalField' must not be null or empty"); localField = Fields.field(name); return this; } + + @Override + public PipelineBuilder let(Let let) { + + Assert.notNull(let, "Let must not be null"); + this.let = let; + return this; + } + + @Override + public AsBuilder pipeline(AggregationPipeline pipeline) { + + Assert.notNull(pipeline, "Pipeline must not be null"); + this.pipeline = pipeline; + return this; + } + + @Override + public LookupOperation as(String name) { + + Assert.hasText(name, "'As' must not be null or empty"); + as = new ExposedField(Fields.field(name), true); + return new LookupOperation(from, localField, foreignField, let, pipeline, as); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java index 27861a2d7b..da1dbfc027 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; + import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; @@ -29,12 +30,15 @@ * @author Sebastian Herold * @author Thomas Darimont * @author Oliver Gierke + * @author Divya Srivastava * @since 1.3 - * @see MongoDB Aggregation Framework: $match + * @see MongoDB Aggregation Framework: + * $match */ public class MatchOperation implements AggregationOperation { private final CriteriaDefinition criteriaDefinition; + private final AggregationExpression expression; /** * Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}. @@ -43,16 +47,35 @@ public class MatchOperation implements AggregationOperation { */ public MatchOperation(CriteriaDefinition criteriaDefinition) { - Assert.notNull(criteriaDefinition, "Criteria must not be null!"); + Assert.notNull(criteriaDefinition, "Criteria must not be null"); + this.criteriaDefinition = criteriaDefinition; + this.expression = null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates a new {@link MatchOperation} for the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @since 3.3 */ + public MatchOperation(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + this.criteriaDefinition = null; + this.expression = expression; + } + @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$match", context.getMappedObject(criteriaDefinition.getCriteriaObject())); + + return new Document(getOperator(), + context.getMappedObject(expression != null ? expression.toDocument() : criteriaDefinition.getCriteriaObject())); + } + + @Override + public String getOperator() { + return "$match"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MergeOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MergeOperation.java new file mode 100644 index 0000000000..314f83fc7c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MergeOperation.java @@ -0,0 +1,587 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Encapsulates the {@code $merge}-operation. + *

          + * We recommend to use the {@link MergeOperationBuilder builder} via {@link MergeOperation#builder()} instead of + * creating instances of this class directly. + * + * @see MongoDB Documentation + * @author Christoph Strobl + * @since 3.0 + */ +public class MergeOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation { + + private final MergeOperationTarget into; + private final UniqueMergeId on; + private final @Nullable Let let; + private final @Nullable WhenDocumentsMatch whenMatched; + private final @Nullable WhenDocumentsDontMatch whenNotMatched; + + /** + * Create new instance of {@link MergeOperation}. + * + * @param into the target (collection and database) + * @param on the unique identifier. Can be {@literal null}. + * @param let exposed variables for {@link WhenDocumentsMatch#updateWith(Aggregation)}. Can be {@literal null}. + * @param whenMatched behavior if a result document matches an existing one in the target collection. Can be + * {@literal null}. + * @param whenNotMatched behavior if a result document does not match an existing one in the target collection. Can be + * {@literal null}. + */ + public MergeOperation(MergeOperationTarget into, UniqueMergeId on, @Nullable Let let, + @Nullable WhenDocumentsMatch whenMatched, @Nullable WhenDocumentsDontMatch whenNotMatched) { + + Assert.notNull(into, "Into must not be null Please provide a target collection"); + Assert.notNull(on, "On must not be null Use UniqueMergeId.id() instead"); + + this.into = into; + this.on = on; + this.let = let; + this.whenMatched = whenMatched; + this.whenNotMatched = whenNotMatched; + } + + /** + * Simplified form to apply all default options for {@code $merge} (including writing to a collection in the same + * database). + * + * @param collection the output collection within the same database. + * @return new instance of {@link MergeOperation}. + */ + public static MergeOperation mergeInto(String collection) { + return builder().intoCollection(collection).build(); + } + + /** + * Access the {@link MergeOperationBuilder builder API} to create a new instance of {@link MergeOperation}. + * + * @return new instance of {@link MergeOperationBuilder}. + */ + public static MergeOperationBuilder builder() { + return new MergeOperationBuilder(); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (isJustCollection()) { + return new Document(getOperator(), into.collection); + } + + Document $merge = new Document(); + $merge.putAll(into.toDocument(context)); + + if (!on.isJustIdField()) { + $merge.putAll(on.toDocument(context)); + } + + if (let != null) { + $merge.append("let", let.toDocument(context).get("$let", Document.class).get("vars")); + } + + if (whenMatched != null) { + $merge.putAll(whenMatched.toDocument(context)); + } + + if (whenNotMatched != null) { + $merge.putAll(whenNotMatched.toDocument(context)); + } + + return new Document(getOperator(), $merge); + } + + @Override + public String getOperator() { + return "$merge"; + } + + @Override + public ExposedFields getFields() { + + if (let == null) { + return ExposedFields.from(); + } + + return ExposedFields.synthetic(Fields.fields(let.getVariableNames())); + } + + @Override + public boolean inheritsFields() { + return true; + } + + /** + * @return true if nothing more than the collection is specified. + */ + private boolean isJustCollection() { + return into.isTargetingSameDatabase() && on.isJustIdField() && let == null && whenMatched == null + && whenNotMatched == null; + } + + /** + * Value object representing the unique id used during the merge operation to identify duplicates in the target + * collection. + * + * @author Christoph Strobl + */ + public static class UniqueMergeId { + + private static final UniqueMergeId ID = new UniqueMergeId(Collections.emptyList()); + + private final Collection uniqueIdentifier; + + private UniqueMergeId(Collection uniqueIdentifier) { + this.uniqueIdentifier = uniqueIdentifier; + } + + public static UniqueMergeId ofIdFields(String... fields) { + + Assert.noNullElements(fields, "Fields must not contain null values"); + + if (ObjectUtils.isEmpty(fields)) { + return id(); + } + + return new UniqueMergeId(Arrays.asList(fields)); + } + + /** + * Merge Documents by using the MongoDB {@literal _id} field. + * + * @return never {@literal null}. + */ + public static UniqueMergeId id() { + return ID; + } + + boolean isJustIdField() { + return this.equals(ID); + } + + Document toDocument(AggregationOperationContext context) { + + List mappedOn = uniqueIdentifier.stream().map(context::getReference).map(FieldReference::getRaw) + .collect(Collectors.toList()); + return new Document("on", mappedOn.size() == 1 ? mappedOn.iterator().next() : mappedOn); + } + } + + /** + * Value Object representing the {@code into} field of a {@code $merge} aggregation stage.
          + * If not stated explicitly via {@link MergeOperationTarget#inDatabase(String)} the {@literal collection} is created + * in the very same {@literal database}. In this case {@code into} is just a single String holding the collection + * name.
          + * + *

          +	 *     into: "target-collection-name"
          +	 * 
          + * + * If the collection needs to be in a different database {@code into} will be a {@link Document} like the following + * + *
          +	 * {
          +	 * 	into: {}
          +	 * }
          +	 * 
          + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class MergeOperationTarget { + + private final @Nullable String database; + private final String collection; + + private MergeOperationTarget(@Nullable String database, String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + this.database = database; + this.collection = collection; + } + + /** + * @param collection The output collection results will be stored in. Must not be {@literal null}. + * @return new instance of {@link MergeOperationTarget}. + */ + public static MergeOperationTarget collection(String collection) { + return new MergeOperationTarget(null, collection); + } + + /** + * Optionally specify the target database if different from the source one. + * + * @param database must not be {@literal null}. + * @return new instance of {@link MergeOperationTarget}. + */ + public MergeOperationTarget inDatabase(String database) { + return new MergeOperationTarget(database, collection); + } + + boolean isTargetingSameDatabase() { + return !StringUtils.hasText(database); + } + + Document toDocument(AggregationOperationContext context) { + + return new Document("into", + !StringUtils.hasText(database) ? collection : new Document("db", database).append("coll", collection)); + } + } + + /** + * Value Object specifying how to deal with a result document that matches an existing document in the collection + * based on the fields of the {@code on} property describing the unique identifier. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class WhenDocumentsMatch { + + private final Object value; + + private WhenDocumentsMatch(Object value) { + this.value = value; + } + + public static WhenDocumentsMatch whenMatchedOf(String value) { + return new WhenDocumentsMatch(value); + } + + /** + * Replace the existing document in the output collection with the matching results document. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch replaceDocument() { + return whenMatchedOf("replace"); + } + + /** + * Keep the existing document in the output collection. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch keepExistingDocument() { + return whenMatchedOf("keepExisting"); + } + + /** + * Merge the matching documents. Please see the MongoDB reference documentation for details. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch mergeDocuments() { + return whenMatchedOf("merge"); + } + + /** + * Stop and fail the aggregation operation. Does not revert already performed changes on previous documents. + * + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch failOnMatch() { + return whenMatchedOf("fail"); + } + + /** + * Use an {@link Aggregation} to update the document in the collection. Please see the MongoDB reference + * documentation for details. + * + * @param aggregation must not be {@literal null}. + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch updateWith(Aggregation aggregation) { + return new WhenDocumentsMatch(aggregation); + } + + /** + * Use an aggregation pipeline to update the document in the collection. Please see the MongoDB reference + * documentation for details. + * + * @param aggregationPipeline must not be {@literal null}. + * @return new instance of {@link WhenDocumentsMatch}. + */ + public static WhenDocumentsMatch updateWith(List aggregationPipeline) { + return new WhenDocumentsMatch(aggregationPipeline); + } + + Document toDocument(AggregationOperationContext context) { + + if (value instanceof Aggregation aggregation) { + return new Document("whenMatched", aggregation.toPipeline(context)); + } + + return new Document("whenMatched", value); + } + } + + /** + * Value Object specifying how to deal with a result document that do not match an existing document in the collection + * based on the fields of the {@code on} property describing the unique identifier. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class WhenDocumentsDontMatch { + + private final String value; + + private WhenDocumentsDontMatch(String value) { + + Assert.notNull(value, "Value must not be null"); + + this.value = value; + } + + /** + * Factory method creating {@link WhenDocumentsDontMatch} from a {@code value} literal. + * + * @param value must not be {@literal null}. + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch whenNotMatchedOf(String value) { + return new WhenDocumentsDontMatch(value); + } + + /** + * Insert the document into the output collection. + * + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch insertNewDocument() { + return whenNotMatchedOf("insert"); + } + + /** + * Discard the document - do not insert the document into the output collection. + * + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch discardDocument() { + return whenNotMatchedOf("discard"); + } + + /** + * Stop and fail the aggregation operation. Does not revert already performed changes on previous documents. + * + * @return new instance of {@link WhenDocumentsDontMatch}. + */ + public static WhenDocumentsDontMatch failWhenNotMatch() { + return whenNotMatchedOf("fail"); + } + + public Document toDocument(AggregationOperationContext context) { + return new Document("whenNotMatched", value); + } + } + + /** + * Builder API to construct a {@link MergeOperation}. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class MergeOperationBuilder { + + private String collection; + private @Nullable String database; + private UniqueMergeId id = UniqueMergeId.id(); + private @Nullable Let let; + private @Nullable WhenDocumentsMatch whenMatched; + private @Nullable WhenDocumentsDontMatch whenNotMatched; + + public MergeOperationBuilder() {} + + /** + * Required output collection name to store results to. + * + * @param collection must not be {@literal null} nor empty. + * @return this. + */ + public MergeOperationBuilder intoCollection(String collection) { + + Assert.hasText(collection, "Collection must not be null nor empty"); + + this.collection = collection; + return this; + } + + /** + * Optionally define a target database if different from the current one. + * + * @param database must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder inDatabase(String database) { + + this.database = database; + return this; + } + + /** + * Define the target to store results in. + * + * @param into must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder into(MergeOperationTarget into) { + + this.database = into.database; + this.collection = into.collection; + return this; + } + + /** + * Define the target to store results in. + * + * @param target must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder target(MergeOperationTarget target) { + return into(target); + } + + /** + * Appends a single field or multiple fields that act as a unique identifier for a document. The identifier + * determines if a results document matches an already existing document in the output collection.
          + * The aggregation results documents must contain the field(s) specified via {@code on}, unless it's the {@code _id} + * field. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder on(String... fields) { + return id(UniqueMergeId.ofIdFields(fields)); + } + + /** + * Set the identifier that determines if a results document matches an already existing document in the output + * collection. + * + * @param id must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder id(UniqueMergeId id) { + + this.id = id; + return this; + } + + /** + * Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update + * aggregation}. + * + * @param let the variable expressions + * @return this. + */ + public MergeOperationBuilder let(Let let) { + + this.let = let; + return this; + } + + /** + * Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update + * aggregation}. + * + * @param let the variable expressions + * @return this. + */ + public MergeOperationBuilder exposeVariablesOf(Let let) { + return let(let); + } + + /** + * The action to take place when documents already exist in the target collection. + * + * @param whenMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenMatched(WhenDocumentsMatch whenMatched) { + + this.whenMatched = whenMatched; + return this; + } + + /** + * The action to take place when documents already exist in the target collection. + * + * @param whenMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenDocumentsMatch(WhenDocumentsMatch whenMatched) { + return whenMatched(whenMatched); + } + + /** + * The {@link Aggregation action} to take place when documents already exist in the target collection. + * + * @param aggregation must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenDocumentsMatchApply(Aggregation aggregation) { + return whenMatched(WhenDocumentsMatch.updateWith(aggregation)); + } + + /** + * The action to take place when documents do not already exist in the target collection. + * + * @param whenNotMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenNotMatched(WhenDocumentsDontMatch whenNotMatched) { + + this.whenNotMatched = whenNotMatched; + return this; + } + + /** + * The action to take place when documents do not already exist in the target collection. + * + * @param whenNotMatched must not be {@literal null}. + * @return this. + */ + public MergeOperationBuilder whenDocumentsDontMatch(WhenDocumentsDontMatch whenNotMatched) { + return whenNotMatched(whenNotMatched); + } + + /** + * @return new instance of {@link MergeOperation}. + */ + public MergeOperation build() { + return new MergeOperation(new MergeOperationTarget(database, collection), id, let, whenMatched, whenNotMatched); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java index 3b40d19056..c553a7be02 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/NestedDelegatingExpressionAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,13 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Collection; + import org.bson.Document; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; + +import org.bson.codecs.configuration.CodecRegistry; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExpressionFieldReference; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.util.Assert; /** @@ -26,47 +30,72 @@ * variable. * * @author Christoph Strobl + * @author Mark Paluch * @since 1.10 */ class NestedDelegatingExpressionAggregationOperationContext implements AggregationOperationContext { private final AggregationOperationContext delegate; + private final Collection inners; /** * Creates new {@link NestedDelegatingExpressionAggregationOperationContext}. * * @param referenceContext must not be {@literal null}. */ - public NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext) { + NestedDelegatingExpressionAggregationOperationContext(AggregationOperationContext referenceContext, + Collection inners) { - Assert.notNull(referenceContext, "Reference context must not be null!"); + Assert.notNull(referenceContext, "Reference context must not be null"); this.delegate = referenceContext; + this.inners = inners; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document) - */ @Override public Document getMappedObject(Document document) { return delegate.getMappedObject(document); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field) - */ + @Override + public Document getMappedObject(Document document, Class type) { + return delegate.getMappedObject(document, type); + } + @Override public FieldReference getReference(Field field) { - return new ExpressionFieldReference(delegate.getReference(field)); + + FieldReference reference = delegate.getReference(field); + return isInnerVariableReference(field) ? new ExpressionFieldReference(delegate.getReference(field)) : reference; + } + + private boolean isInnerVariableReference(Field field) { + + if (inners.isEmpty()) { + return false; + } + + for (Field inner : inners) { + if (inner.getName().equals(field.getName()) + || (field.getTarget().contains(".") && field.getTarget().startsWith(inner.getName()))) { + return true; + } + } + + return false; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return new ExpressionFieldReference(delegate.getReference(name)); } + + @Override + public Fields getFields(Class type) { + return delegate.getFields(type); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ObjectOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ObjectOperators.java new file mode 100644 index 0000000000..25189241b7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ObjectOperators.java @@ -0,0 +1,532 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * Gateway for + * object + * expression operators. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ObjectOperators { + + /** + * Take the value referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ObjectOperatorFactory}. + */ + public static ObjectOperatorFactory valueOf(String fieldReference) { + return new ObjectOperatorFactory(Fields.field(fieldReference)); + } + + /** + * Take the value provided by the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ObjectOperatorFactory}. + */ + public static ObjectOperatorFactory valueOf(AggregationExpression expression) { + return new ObjectOperatorFactory(expression); + } + + /** + * Use the value from the given {@link SystemVariable} as input for the target {@link AggregationExpression expression}. + * + * @param variable the {@link SystemVariable} to use (eg. {@link SystemVariable#ROOT}. + * @return new instance of {@link ObjectOperatorFactory}. + * @since 4.2 + */ + public static ObjectOperatorFactory valueOf(SystemVariable variable) { + return new ObjectOperatorFactory(Fields.field(variable.getName(), variable.getTarget())); + } + + /** + * Get the value of the field with given name from the {@literal $$CURRENT} object. + * Short version for {@code ObjectOperators.valueOf("$$CURRENT").getField(fieldName)}. + * + * @param fieldName the field name. + * @return new instance of {@link AggregationExpression}. + * @since 4.2 + */ + public static AggregationExpression getValueOf(String fieldName) { + return new ObjectOperatorFactory(SystemVariable.CURRENT).getField(fieldName); + } + + /** + * Set the value of the field with given name on the {@literal $$CURRENT} object. + * Short version for {@code ObjectOperators.valueOf($$CURRENT).setField(fieldName).toValue(value)}. + * + * @param fieldName the field name. + * @return new instance of {@link AggregationExpression}. + * @since 4.2 + */ + public static AggregationExpression setValueTo(String fieldName, Object value) { + return new ObjectOperatorFactory(SystemVariable.CURRENT).setField(fieldName).toValue(value); + } + + /** + * @author Christoph Strobl + */ + public static class ObjectOperatorFactory { + + private final Object value; + + /** + * Creates new {@link ObjectOperatorFactory} for given {@literal value}. + * + * @param value must not be {@literal null}. + */ + public ObjectOperatorFactory(Object value) { + + Assert.notNull(value, "Value must not be null"); + + this.value = value; + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and uses + * {@literal $mergeObjects} as an accumulator within the {@literal $group} stage.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects merge() { + return MergeObjects.merge(value); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and combines it with the + * given values (documents or mapped objects) into a single document.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWith(Object... values) { + return merge().mergeWith(values); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and combines it with the + * values of the given {@link Field field references} into a single document.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(String... fieldReferences) { + return merge().mergeWithValuesOf(fieldReferences); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the associated value and combines it with the + * result values of the given {@link Aggregation expressions} into a single document.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(AggregationExpression... expression) { + return merge().mergeWithValuesOf(expression); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the associated value and converts it to an + * array of {@link Document documents} that contain two fields {@literal k} and {@literal v} each.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @since 2.1 + */ + public ObjectToArray toArray() { + return ObjectToArray.toArray(value); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the associated value and obtains the value of the + * field with matching name. + * + * @since 4.0 + */ + public GetField getField(String fieldName) { + return GetField.getField(Fields.field(fieldName)).of(value); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the associated value and obtains the value of the + * field with matching name. + * + * @since 4.0 + */ + public SetField setField(String fieldName) { + return SetField.field(Fields.field(fieldName)).input(value); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the associated value and obtains the value of the + * field with matching name. + * + * @since 4.0 + */ + public AggregationExpression removeField(String fieldName) { + return SetField.field(fieldName).input(value).toValue(SystemVariable.REMOVE); + } + } + + /** + * {@link AggregationExpression} for {@code $mergeObjects} that combines multiple documents into a single document. + *
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/mergeObjects/ + * @since 2.1 + */ + public static class MergeObjects extends AbstractAggregationExpression { + + private MergeObjects(Object value) { + super(value); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes given values and combines them into a single + * document.
          + * + * @param values must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public static MergeObjects merge(Object... values) { + return new MergeObjects(Arrays.asList(values)); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the given {@link Field field references} and + * combines them into a single document. + * + * @param fieldReferences must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public static MergeObjects mergeValuesOf(String... fieldReferences) { + return merge(Arrays.stream(fieldReferences).map(Fields::field).toArray()); + } + + /** + * Creates new {@link MergeObjects aggregation expression} that takes the result of the given {@link Aggregation + * expressions} and combines them into a single document. + * + * @param expressions must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public static MergeObjects mergeValuesOf(AggregationExpression... expressions) { + return merge(expressions); + } + + /** + * Creates new {@link MergeObjects aggregation expression} by adding the given {@link Field field references}. + * + * @param fieldReferences must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(String... fieldReferences) { + return mergeWith(Arrays.stream(fieldReferences).map(Fields::field).toArray()); + } + + /** + * Creates new {@link MergeObjects aggregation expression} by adding the given {@link AggregationExpression + * expressions}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWithValuesOf(AggregationExpression... expression) { + return mergeWith(expression); + } + + /** + * Creates new {@link MergeObjects aggregation expression} by adding the given values (documents or mapped objects). + * + * @param values must not be {@literal null}. + * @return new instance of {@link MergeObjects}. + */ + public MergeObjects mergeWith(Object... values) { + return new MergeObjects(append(Arrays.asList(values))); + } + + @Override + public Document toDocument(Object value, AggregationOperationContext context) { + return super.toDocument(potentiallyExtractSingleValue(value), context); + } + + @SuppressWarnings("unchecked") + private Object potentiallyExtractSingleValue(Object value) { + + if (value instanceof Collection collection && collection.size() == 1) { + return collection.iterator().next(); + } + return value; + } + + @Override + protected String getMongoMethod() { + return "$mergeObjects"; + } + } + + /** + * {@link AggregationExpression} for {@code $objectToArray} that converts a document to an array of {@link Document + * documents} that each contains two fields {@literal k} and {@literal v}.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/objectToArray/ + * @since 2.1 + */ + public static class ObjectToArray extends AbstractAggregationExpression { + + private ObjectToArray(Object value) { + super(value); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the value pointed to by given {@link Field + * fieldReference} and converts it to an array. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ObjectToArray}. + */ + public static ObjectToArray valueOfToArray(String fieldReference) { + return toArray(Fields.field(fieldReference)); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the result value of the given + * {@link AggregationExpression expression} and converts it to an array. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ObjectToArray}. + */ + public static ObjectToArray valueOfToArray(AggregationExpression expression) { + return toArray(expression); + } + + /** + * Creates new {@link ObjectToArray aggregation expression} that takes the given value and converts it to an array. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ObjectToArray}. + */ + public static ObjectToArray toArray(Object value) { + return new ObjectToArray(value); + } + + @Override + protected String getMongoMethod() { + return "$objectToArray"; + } + } + + /** + * {@link AggregationExpression} for {@code $getField}. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class GetField extends AbstractAggregationExpression { + + protected GetField(Object value) { + super(value); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given {@code fieldName}. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public static GetField getField(String fieldName) { + return new GetField(Collections.singletonMap("field", fieldName)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given {@link Field}. + * + * @param field must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public static GetField getField(Field field) { + return new GetField(Collections.singletonMap("field", field)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given + * {@code field reference}. + * + * @param fieldRef must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public GetField of(String fieldRef) { + return of(Fields.field(fieldRef)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public GetField of(AggregationExpression expression) { + return of((Object) expression); + } + + private GetField of(Object fieldRef) { + return new GetField(append("input", fieldRef)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if(isArgumentMap() && get("field") instanceof Field field) { + return new GetField(append("field", context.getReference(field).getRaw())).toDocument(context); + } + return super.toDocument(context); + } + + @Override + protected String getMongoMethod() { + return "$getField"; + } + } + + /** + * {@link AggregationExpression} for {@code $setField}. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class SetField extends AbstractAggregationExpression { + + protected SetField(Object value) { + super(value); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input + * {@code fieldName}. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public static SetField field(String fieldName) { + return new SetField(Collections.singletonMap("field", fieldName)); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input {@link Field}. + * + * @param field must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public static SetField field(Field field) { + return new SetField(Collections.singletonMap("field", field)); + } + + /** + * Creates new {@link GetField aggregation expression} that takes the value pointed to by given input + * {@code field reference}. + * + * @param fieldRef must not be {@literal null}. + * @return new instance of {@link GetField}. + */ + public SetField input(String fieldRef) { + return input(Fields.field(fieldRef)); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public SetField input(AggregationExpression expression) { + return input((Object) expression); + } + + /** + * Creates new {@link SetField aggregation expression} that takes the value pointed to by given input + * {@code field reference}. + * + * @param fieldRef must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + private SetField input(Object fieldRef) { + return new SetField(append("input", fieldRef)); + } + + /** + * Creates new {@link SetField aggregation expression} providing the {@code value} using {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public SetField toValueOf(String fieldReference) { + return toValue(Fields.field(fieldReference)); + } + + /** + * Creates new {@link SetField aggregation expression} providing the {@code value} using + * {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SetField}. + */ + public SetField toValueOf(AggregationExpression expression) { + return toValue(expression); + } + + /** + * Creates new {@link SetField aggregation expression} providing the {@code value}. + * + * @param value + * @return new instance of {@link SetField}. + */ + public SetField toValue(Object value) { + return new SetField(append("value", value)); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + if(get("field") instanceof Field field) { + return new SetField(append("field", context.getReference(field).getRaw())).toDocument(context); + } + return super.toDocument(context); + } + + @Override + protected String getMongoMethod() { + return "$setField"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java index f054485ea2..51520f0868 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/OutOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,10 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Encapsulates the {@code $out}-operation. @@ -26,26 +29,226 @@ * * @author Nikolay Bogdanov * @author Christoph Strobl - * @see MongoDB Aggregation Framework: $out + * @see MongoDB Aggregation Framework: + * $out */ public class OutOperation implements AggregationOperation { + private final @Nullable String databaseName; private final String collectionName; + private final @Nullable Document uniqueKey; + private final @Nullable OutMode mode; /** * @param outCollectionName Collection name to export the results. Must not be {@literal null}. */ public OutOperation(String outCollectionName) { - Assert.notNull(outCollectionName, "Collection name must not be null!"); - this.collectionName = outCollectionName; + this(null, outCollectionName, null, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * @param databaseName Optional database name the target collection is located in. Can be {@literal null}. + * @param collectionName Collection name to export the results. Must not be {@literal null}. Can be {@literal null}. + * @param uniqueKey Optional unique key spec identify a document in the to collection for replacement or merge. + * @param mode The mode for merging the aggregation pipeline output with the target collection. Can be + * {@literal null}. {@literal null}. + * @since 2.2 + */ + private OutOperation(@Nullable String databaseName, String collectionName, @Nullable Document uniqueKey, + @Nullable OutMode mode) { + + Assert.notNull(collectionName, "Collection name must not be null"); + + this.databaseName = databaseName; + this.collectionName = collectionName; + this.uniqueKey = uniqueKey; + this.mode = mode; + } + + /** + * Optionally specify the database of the target collection.
          + * NOTE: Requires MongoDB 4.2 or later. + * + * @param database can be {@literal null}. Defaulted to aggregation target database. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation in(@Nullable String database) { + return new OutOperation(database, collectionName, uniqueKey, mode); + } + + /** + * Optionally specify the field that uniquely identifies a document in the target collection.
          + * For convenience the given {@literal key} can either be a single field name or the Json representation of a key + * {@link Document}. + * + *
          +	 *
          +	 * // {
          +	 * //    "field-1" : 1
          +	 * // }
          +	 * .uniqueKey("field-1")
          +	 *
          +	 * // {
          +	 * //    "field-1" : 1,
          +	 * //    "field-2" : 1
          +	 * // }
          +	 * .uniqueKey("{ 'field-1' : 1, 'field-2' : 1}")
          +	 * 
          + * + * NOTE: Requires MongoDB 4.2 or later. + * + * @param key can be {@literal null}. Server uses {@literal _id} when {@literal null}. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation uniqueKey(@Nullable String key) { + + Document uniqueKey = key == null ? null : BsonUtils.toDocumentOrElse(key, it -> new Document(it, 1)); + return new OutOperation(databaseName, collectionName, uniqueKey, mode); + } + + /** + * Optionally specify the fields that uniquely identifies a document in the target collection.
          + * + *
          +	 *
          +	 * // {
          +	 * //    "field-1" : 1
          +	 * //    "field-2" : 1
          +	 * // }
          +	 * .uniqueKeyOf(Arrays.asList("field-1", "field-2"))
          +	 * 
          + * + * NOTE: Requires MongoDB 4.2 or later. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link OutOperation}. + * @since 2.2 + */ + public OutOperation uniqueKeyOf(Iterable fields) { + + Assert.notNull(fields, "Fields must not be null"); + + Document uniqueKey = new Document(); + fields.forEach(it -> uniqueKey.append(it, 1)); + + return new OutOperation(databaseName, collectionName, uniqueKey, mode); + } + + /** + * Specify how to merge the aggregation output with the target collection.
          + * NOTE: Requires MongoDB 4.2 or later. + * + * @param mode must not be {@literal null}. + * @return new instance of {@link OutOperation}. + * @since 2.2 */ + public OutOperation mode(OutMode mode) { + + Assert.notNull(mode, "Mode must not be null"); + return new OutOperation(databaseName, collectionName, uniqueKey, mode); + } + + /** + * Replace the target collection.
          + * NOTE: Requires MongoDB 4.2 or later. + * + * @return new instance of {@link OutOperation}. + * @see OutMode#REPLACE_COLLECTION + * @since 2.2 + */ + public OutOperation replaceCollection() { + return mode(OutMode.REPLACE_COLLECTION); + } + + /** + * Replace/Upsert documents in the target collection.
          + * NOTE: Requires MongoDB 4.2 or later. + * + * @return new instance of {@link OutOperation}. + * @see OutMode#REPLACE + * @since 2.2 + */ + public OutOperation replaceDocuments() { + return mode(OutMode.REPLACE); + } + + /** + * Insert documents to the target collection.
          + * NOTE: Requires MongoDB 4.2 or later. + * + * @return new instance of {@link OutOperation}. + * @see OutMode#INSERT + * @since 2.2 + */ + public OutOperation insertDocuments() { + return mode(OutMode.INSERT); + } + @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$out", collectionName); + + if (!requiresMongoDb42Format()) { + return new Document("$out", collectionName); + } + + Assert.state(mode != null, "Mode must not be null"); + + Document $out = new Document("to", collectionName) // + .append("mode", mode.getMongoMode()); + + if (StringUtils.hasText(databaseName)) { + $out.append("db", databaseName); + } + + if (uniqueKey != null) { + $out.append("uniqueKey", uniqueKey); + } + + return new Document(getOperator(), $out); + } + + @Override + public String getOperator() { + return "$out"; + } + + private boolean requiresMongoDb42Format() { + return StringUtils.hasText(databaseName) || mode != null || uniqueKey != null; + } + + /** + * The mode for merging the aggregation pipeline output. + * + * @author Christoph Strobl + * @since 2.2 + */ + public enum OutMode { + + /** + * Write documents to the target collection. Errors if a document same uniqueKey already exists. + */ + INSERT("insertDocuments"), + + /** + * Update on any document in the target collection with the same uniqueKey. + */ + REPLACE("replaceDocuments"), + + /** + * Replaces the to collection with the output from the aggregation pipeline. Cannot be in a different database. + */ + REPLACE_COLLECTION("replaceCollection"); + + private final String mode; + + OutMode(String mode) { + this.mode = mode; + } + + public String getMongoMode() { + return mode; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java index 11edfb58f6..9524171fed 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/PrefixingDelegatingAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,13 +24,15 @@ import java.util.Set; import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.lang.Nullable; /** * {@link AggregationOperationContext} implementation prefixing non-command keys on root level with the given prefix. * Useful when mapping fields to domain specific types while having to prefix keys for query purpose. - *

          - * Fields to be excluded from prefixing my be added to a {@literal blacklist}. + *
          + * Fields to be excluded from prefixing my be added to a {@literal denylist}. * * @author Christoph Strobl * @author Mark Paluch @@ -40,47 +42,50 @@ public class PrefixingDelegatingAggregationOperationContext implements Aggregati private final AggregationOperationContext delegate; private final String prefix; - private final Set blacklist; + private final Set denylist; public PrefixingDelegatingAggregationOperationContext(AggregationOperationContext delegate, String prefix) { this(delegate, prefix, Collections.emptySet()); } public PrefixingDelegatingAggregationOperationContext(AggregationOperationContext delegate, String prefix, - Collection blacklist) { + Collection denylist) { this.delegate = delegate; this.prefix = prefix; - this.blacklist = new HashSet<>(blacklist); + this.denylist = new HashSet<>(denylist); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document) - */ @Override public Document getMappedObject(Document document) { return doPrefix(delegate.getMappedObject(document)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field) - */ + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return doPrefix(delegate.getMappedObject(document, type)); + } + @Override public FieldReference getReference(Field field) { return delegate.getReference(field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return delegate.getReference(name); } + @Override + public Fields getFields(Class type) { + return delegate.getFields(type); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } + @SuppressWarnings("unchecked") private Document doPrefix(Document source) { @@ -102,7 +107,7 @@ private Document doPrefix(Document source) { } private String prefixKey(String key) { - return (key.startsWith("$") || blacklist.contains(key)) ? key : (prefix + "." + key); + return (key.startsWith("$") || isDenied(key)) ? key : (prefix + "." + key); } private Object prefixCollection(Collection sourceCollection) { @@ -110,8 +115,8 @@ private Object prefixCollection(Collection sourceCollection) { List prefixed = new ArrayList<>(sourceCollection.size()); for (Object o : sourceCollection) { - if (o instanceof Document) { - prefixed.add(doPrefix((Document) o)); + if (o instanceof Document document) { + prefixed.add(doPrefix(document)); } else { prefixed.add(o); } @@ -119,4 +124,23 @@ private Object prefixCollection(Collection sourceCollection) { return prefixed; } + + private boolean isDenied(String key) { + + if (denylist.contains(key)) { + return true; + } + + if (!key.contains(".")) { + return false; + } + + for (String denied : denylist) { + if (key.startsWith(denied + ".")) { + return true; + } + } + + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java index baf44ceca6..35db2214f5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,6 +20,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; @@ -52,8 +53,8 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation { private static final List NONE = Collections.emptyList(); - private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed. Projections by the mongodb " - + "aggregation framework only support the exclusion of the %s field!"; + private static final String EXCLUSION_ERROR = "Exclusion of field %s not allowed; Projections by the mongodb" + + " aggregation framework only support the exclusion of the %s field"; private final List projections; @@ -73,6 +74,16 @@ public ProjectionOperation(Fields fields) { this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields)); } + /** + * Creates a new {@link ProjectionOperation} including all top level fields of the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @since 2.2 + */ + public ProjectionOperation(Class type) { + this(NONE, Collections.singletonList(new TypeProjection(type))); + } + /** * Copy constructor to allow building up {@link ProjectionOperation} instances from already existing * {@link Projection}s. @@ -82,8 +93,8 @@ public ProjectionOperation(Fields fields) { */ private ProjectionOperation(List current, List projections) { - Assert.notNull(current, "Current projections must not be null!"); - Assert.notNull(projections, "Projections must not be null!"); + Assert.notNull(current, "Current projections must not be null"); + Assert.notNull(projections, "Projections must not be null"); this.projections = new ArrayList(current.size() + projections.size()); this.projections.addAll(current); @@ -140,11 +151,6 @@ public ProjectionOperationBuilder and(AggregationExpression expression) { */ public ProjectionOperation andExclude(String... fieldNames) { - for (String fieldName : fieldNames) { - Assert.isTrue(Fields.UNDERSCORE_ID.equals(fieldName), - String.format(EXCLUSION_ERROR, fieldName, Fields.UNDERSCORE_ID)); - } - List excludeProjections = FieldProjection.from(Fields.fields(fieldNames), false); return new ProjectionOperation(this.projections, excludeProjections); } @@ -171,10 +177,48 @@ public ProjectionOperation andInclude(Fields fields) { return new ProjectionOperation(this.projections, FieldProjection.from(fields, true)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() + /** + * Includes the current {@link ProjectionOperation} as an array with given name.
          + * If you want to specify array values directly use {@link #andArrayOf(Object...)}. + * + * @param name the target property name. + * @return new instance of {@link ProjectionOperation}. + * @since 2.2 */ + public ProjectionOperation asArray(String name) { + + return new ProjectionOperation(Collections.emptyList(), + Collections.singletonList(new ArrayProjection(Fields.field(name), (List) this.projections))); + } + + /** + * Includes the given values ({@link Field field references}, {@link AggregationExpression expression}, plain values) + * as an array.
          + * The target property name needs to be set via {@link ArrayProjectionOperationBuilder#as(String)}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ArrayProjectionOperationBuilder}. + * @throws IllegalArgumentException if the required argument it {@literal null}. + * @since 2.2 + */ + public ArrayProjectionOperationBuilder andArrayOf(Object... values) { + + ArrayProjectionOperationBuilder builder = new ArrayProjectionOperationBuilder(this); + + for (Object value : values) { + + if (value instanceof Field field) { + builder.and(field); + } else if (value instanceof AggregationExpression aggregationExpression) { + builder.and(aggregationExpression); + } else { + builder.and(value); + } + } + + return builder; + } + @Override public ExposedFields getFields() { @@ -188,10 +232,14 @@ public ExposedFields getFields() { return fields != null ? fields : ExposedFields.empty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ + @Override + public boolean inheritsFields() { + + return projections.stream().filter(FieldProjection.class::isInstance) // + .map(FieldProjection.class::cast) // + .anyMatch(FieldProjection::isExcluded); + } + @Override public Document toDocument(AggregationOperationContext context) { @@ -201,7 +249,12 @@ public Document toDocument(AggregationOperationContext context) { fieldObject.putAll(projection.toDocument(context)); } - return new Document("$project", fieldObject); + return new Document(getOperator(), fieldObject); + } + + @Override + public String getOperator() { + return "$project"; } /** @@ -222,17 +275,13 @@ private static abstract class AbstractProjectionOperationBuilder implements Aggr */ public AbstractProjectionOperationBuilder(Object value, ProjectionOperation operation) { - Assert.notNull(value, "value must not be null or empty!"); - Assert.notNull(operation, "ProjectionOperation must not be null!"); + Assert.notNull(value, "value must not be null or empty"); + Assert.notNull(operation, "ProjectionOperation must not be null"); this.value = value; this.operation = operation; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return this.operation.toDocument(context); @@ -281,7 +330,7 @@ public static class ExpressionProjectionOperationBuilder extends ProjectionOpera * * @param expression must not be {@literal null}. * @param operation must not be {@literal null}. - * @param parameters + * @param parameters parameters must not be {@literal null}. */ public ExpressionProjectionOperationBuilder(String expression, ProjectionOperation operation, Object[] parameters) { @@ -290,9 +339,6 @@ public ExpressionProjectionOperationBuilder(String expression, ProjectionOperati this.params = parameters.clone(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder#project(java.lang.String, java.lang.Object[]) - */ @Override public ProjectionOperationBuilder project(String operation, final Object... values) { @@ -301,7 +347,7 @@ public ProjectionOperationBuilder project(String operation, final Object... valu @Override protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values.length + 1); + List result = new ArrayList<>(values.length + 1); result.add(ExpressionProjection.toMongoExpression(context, ExpressionProjectionOperationBuilder.this.expression, ExpressionProjectionOperationBuilder.this.params)); result.addAll(Arrays.asList(values)); @@ -313,10 +359,6 @@ protected List getOperationArguments(AggregationOperationContext context return new ProjectionOperationBuilder(value, this.operation.and(operationProjection), operationProjection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.AbstractProjectionOperationBuilder#as(java.lang.String) - */ @Override public ProjectionOperation as(String alias) { @@ -348,17 +390,13 @@ public ExpressionProjection(Field field, String expression, Object[] parameters) super(field); - Assert.hasText(expression, "Expression must not be null!"); - Assert.notNull(parameters, "Parameters must not be null!"); + Assert.hasText(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); this.expression = expression; this.params = parameters.clone(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(getExposedField().getName(), toMongoExpression(context, expression, params)); @@ -380,8 +418,8 @@ protected static Object toMongoExpression(AggregationOperationContext context, S */ public static class ProjectionOperationBuilder extends AbstractProjectionOperationBuilder { - private static final String NUMBER_NOT_NULL = "Number must not be null!"; - private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null!"; + private static final String NUMBER_NOT_NULL = "Number must not be null"; + private static final String FIELD_REFERENCE_NOT_NULL = "Field reference must not be null"; private final @Nullable String name; private final @Nullable OperationProjection previousProjection; @@ -461,25 +499,17 @@ public ProjectionOperation as(String alias) { return this.operation.and(new FieldProjection(Fields.field(alias, getRequiredName()), null)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.AbstractProjectionOperationBuilder#transform(org.springframework.data.mongodb.core.aggregation.ConditionalOperator) - */ @Override public ProjectionOperation applyCondition(Cond cond) { - Assert.notNull(cond, "ConditionalOperator must not be null!"); + Assert.notNull(cond, "ConditionalOperator must not be null"); return this.operation.and(new ExpressionProjection(Fields.field(getRequiredName()), cond)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.AbstractProjectionOperationBuilder#transform(org.springframework.data.mongodb.core.aggregation.IfNullOperator) - */ @Override public ProjectionOperation applyCondition(IfNull ifNull) { - Assert.notNull(ifNull, "IfNullOperator must not be null!"); + Assert.notNull(ifNull, "IfNullOperator must not be null"); return this.operation.and(new ExpressionProjection(Fields.field(getRequiredName()), ifNull)); } @@ -503,7 +533,7 @@ public ProjectionOperationBuilder plus(Number number) { */ public ProjectionOperationBuilder plus(String fieldReference) { - Assert.notNull(fieldReference, "Field reference must not be null!"); + Assert.notNull(fieldReference, "Field reference must not be null"); return project("add", Fields.field(fieldReference)); } @@ -515,7 +545,7 @@ public ProjectionOperationBuilder plus(String fieldReference) { */ public ProjectionOperationBuilder minus(Number number) { - Assert.notNull(number, "Number must not be null!"); + Assert.notNull(number, "Number must not be null"); return project("subtract", number); } @@ -542,7 +572,7 @@ public ProjectionOperationBuilder minus(String fieldReference) { */ public ProjectionOperationBuilder minus(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return project("subtract", expression); } @@ -581,7 +611,7 @@ public ProjectionOperationBuilder multiply(String fieldReference) { */ public ProjectionOperationBuilder multiply(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return project("multiply", expression); } @@ -594,7 +624,7 @@ public ProjectionOperationBuilder multiply(AggregationExpression expression) { public ProjectionOperationBuilder divide(Number number) { Assert.notNull(number, FIELD_REFERENCE_NOT_NULL); - Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!"); + Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero"); return project("divide", number); } @@ -621,7 +651,7 @@ public ProjectionOperationBuilder divide(String fieldReference) { */ public ProjectionOperationBuilder divide(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return project("divide", expression); } @@ -635,7 +665,7 @@ public ProjectionOperationBuilder divide(AggregationExpression expression) { public ProjectionOperationBuilder mod(Number number) { Assert.notNull(number, NUMBER_NOT_NULL); - Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero!"); + Assert.isTrue(Math.abs(number.intValue()) != 0, "Number must not be zero"); return project("mod", number); } @@ -662,7 +692,7 @@ public ProjectionOperationBuilder mod(String fieldReference) { */ public ProjectionOperationBuilder mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return project("mod", expression); } @@ -805,7 +835,7 @@ public ProjectionOperationBuilder filter(String as, AggregationExpression condit */ public ProjectionOperationBuilder equalsArrays(String... arrays) { - Assert.notEmpty(arrays, "Arrays must not be null or empty!"); + Assert.notEmpty(arrays, "Arrays must not be null or empty"); return project("setEquals", Fields.fields(arrays)); } @@ -819,7 +849,7 @@ public ProjectionOperationBuilder equalsArrays(String... arrays) { */ public ProjectionOperationBuilder intersectsArrays(String... arrays) { - Assert.notEmpty(arrays, "Arrays must not be null or empty!"); + Assert.notEmpty(arrays, "Arrays must not be null or empty"); return project("setIntersection", Fields.fields(arrays)); } @@ -833,7 +863,7 @@ public ProjectionOperationBuilder intersectsArrays(String... arrays) { */ public ProjectionOperationBuilder unionArrays(String... arrays) { - Assert.notEmpty(arrays, "Arrays must not be null or empty!"); + Assert.notEmpty(arrays, "Arrays must not be null or empty"); return project("setUnion", Fields.fields(arrays)); } @@ -847,7 +877,7 @@ public ProjectionOperationBuilder unionArrays(String... arrays) { */ public ProjectionOperationBuilder differenceToArray(String array) { - Assert.hasText(array, "Array must not be null or empty!"); + Assert.hasText(array, "Array must not be null or empty"); return project("setDifference", Fields.fields(array)); } @@ -861,7 +891,7 @@ public ProjectionOperationBuilder differenceToArray(String array) { */ public ProjectionOperationBuilder subsetOfArray(String array) { - Assert.hasText(array, "Array must not be null or empty!"); + Assert.hasText(array, "Array must not be null or empty"); return project("setIsSubset", Fields.fields(array)); } @@ -1197,6 +1227,18 @@ public ProjectionOperationBuilder dateAsFormattedString(String format) { return this.operation.and(DateOperators.DateToString.dateOf(getRequiredName()).toString(format)); } + /** + * Generates a {@code $dateToString} expression that takes the date representation of the previously mentioned field + * using the server default format.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return + * @since 2.1 + */ + public ProjectionOperationBuilder dateAsFormattedString() { + return this.operation.and(DateOperators.DateToString.dateOf(getRequiredName()).defaultFormat()); + } + /** * Generates a {@code $let} expression that binds variables for use in the specified expression, and returns the * result of the expression. @@ -1228,15 +1270,11 @@ public ProjectionOperationBuilder let(Collection variables, private String getRequiredName() { - Assert.state(name != null, "Projection field name must not be null!"); + Assert.state(name != null, "Projection field name must not be null"); return name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return this.operation.toDocument(context); @@ -1274,10 +1312,6 @@ public PreviousOperationProjection(String name) { this.name = name; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(name, Fields.UNDERSCORE_ID_REF); @@ -1334,7 +1368,7 @@ public static List from(Fields fields) { */ public static List from(Fields fields, @Nullable Object value) { - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(fields, "Fields must not be null"); List projections = new ArrayList(); for (Field field : fields) { @@ -1344,10 +1378,13 @@ public static List from(Fields fields, @Nullable Object value) return projections; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * @return {@literal true} if this field is excluded. */ + public boolean isExcluded() { + return Boolean.FALSE.equals(value); + } + @Override public Document toDocument(AggregationOperationContext context) { return new Document(field.getName(), renderFieldValue(context)); @@ -1358,10 +1395,18 @@ private Object renderFieldValue(AggregationOperationContext context) { // implicit reference or explicit include? if (value == null || Boolean.TRUE.equals(value)) { - if (Aggregation.SystemVariable.isReferingToSystemVariable(field.getTarget())) { + if (SystemVariable.isReferingToSystemVariable(field.getTarget())) { return field.getTarget(); } + if (field.getTarget().equals(Fields.UNDERSCORE_ID)) { + try { + return context.getReference(field).getReferenceValue(); + } catch (java.lang.IllegalArgumentException e) { + return Fields.UNDERSCORE_ID_REF; + } + } + // check whether referenced field exists in the context return context.getReference(field).getReferenceValue(); @@ -1392,18 +1437,14 @@ static class OperationProjection extends Projection { super(field); - Assert.hasText(operation, "Operation must not be null or empty!"); - Assert.notNull(values, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be null or empty"); + Assert.notNull(values, "Values must not be null"); this.field = field; this.operation = operation; this.values = Arrays.asList(values); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -1414,19 +1455,19 @@ public Document toDocument(AggregationOperationContext context) { protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values.size()); + List result = new ArrayList<>(values.size()); result.add(context.getReference(getField()).toString()); for (Object element : values) { - if (element instanceof Field) { - result.add(context.getReference((Field) element).toString()); - } else if (element instanceof Fields) { - for (Field field : (Fields) element) { + if (element instanceof Field field) { + result.add(context.getReference(field).toString()); + } else if (element instanceof Fields fields) { + for (Field field : fields) { result.add(context.getReference(field).toString()); } - } else if (element instanceof AggregationExpression) { - result.add(((AggregationExpression) element).toDocument(context)); + } else if (element instanceof AggregationExpression aggregationExpression) { + result.add(aggregationExpression.toDocument(context)); } else { result.add(element); } @@ -1444,10 +1485,6 @@ protected Field getField() { return field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#getExposedField() - */ @Override public ExposedField getExposedField() { @@ -1469,9 +1506,6 @@ OperationProjection withAlias(String alias) { final Field aliasedField = Fields.field(alias, this.field.getName()); return new OperationProjection(aliasedField, operation, values.toArray()) { - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.OperationProjection#getField() - */ @Override protected Field getField() { return aliasedField; @@ -1501,10 +1535,6 @@ static class NestedFieldProjection extends Projection { this.fields = fields; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -1625,7 +1655,7 @@ private static abstract class Projection { */ public Projection(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); } @@ -1674,4 +1704,167 @@ public Document toDocument(AggregationOperationContext context) { return new Document(field.getName(), expression.toDocument(context)); } } + + /** + * A {@link Projection} including all top level fields of the given target type mapped to include potentially + * deviating field names. + * + * @since 2.2 + * @author Christoph Strobl + */ + static class TypeProjection extends Projection { + + private final Class type; + + TypeProjection(Class type) { + + super(Fields.field(type.getSimpleName())); + this.type = type; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document projections = new Document(); + + Fields fields = context.getFields(type); + + fields.forEach(it -> projections.append(it.getTarget(), 1)); + return projections; + } + } + + /** + * A {@link Projection} including all top level fields of the given target type mapped to include potentially + * deviating field names. + * + * @since 2.2 + * @author Christoph Strobl + */ + static class FilterProjection extends Projection { + + public static String FILTER_ELEMENT = "filterElement"; + private final Object value; + + FilterProjection(String fieldReference, Object value) { + super(Fields.field(FILTER_ELEMENT + "." + fieldReference)); + this.value = value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getExposedField().getName(), value); + } + } + + /** + * Builder for {@code array} projections. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class ArrayProjectionOperationBuilder { + + private ProjectionOperation target; + private final List projections; + + public ArrayProjectionOperationBuilder(ProjectionOperation target) { + + this.target = target; + this.projections = new ArrayList<>(); + } + + /** + * Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one. + * + * @param expression + * @return + */ + public ArrayProjectionOperationBuilder and(AggregationExpression expression) { + + Assert.notNull(expression, "AggregationExpression must not be null"); + + this.projections.add(expression); + return this; + } + + /** + * Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one. + * + * @param field + * @return + */ + public ArrayProjectionOperationBuilder and(Field field) { + + Assert.notNull(field, "Field must not be null"); + + this.projections.add(field); + return this; + } + + /** + * Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one. + * + * @param value + * @return + */ + public ArrayProjectionOperationBuilder and(Object value) { + + this.projections.add(value); + return this; + } + + /** + * Create the {@link ProjectionOperation} for the array property with given {@literal name}. + * + * @param name The target property name. Must not be {@literal null}. + * @return new instance of {@link ArrayProjectionOperationBuilder}. + */ + public ProjectionOperation as(String name) { + + return new ProjectionOperation(target.projections, + Collections.singletonList(new ArrayProjection(Fields.field(name), this.projections))); + } + } + + /** + * @author Christoph Strobl + * @since 2.2 + */ + static class ArrayProjection extends Projection { + + private final Field targetField; + private final List projections; + + public ArrayProjection(Field targetField, List projections) { + + super(targetField); + this.targetField = targetField; + this.projections = projections; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + return new Document(targetField.getName(), + projections.stream().map(it -> toArrayEntry(it, context)).collect(Collectors.toList())); + } + + private Object toArrayEntry(Object projection, AggregationOperationContext ctx) { + + if (projection instanceof Field field) { + return ctx.getReference(field).toString(); + } + + if (projection instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(ctx); + } + + if (projection instanceof FieldProjection fieldProjection) { + return ctx.getReference(fieldProjection.getExposedField().getTarget()).toString(); + } + + return projection; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RedactOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RedactOperation.java new file mode 100644 index 0000000000..a370016356 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RedactOperation.java @@ -0,0 +1,243 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.util.Assert; + +/** + * {@link RedactOperation} allows to restrict the content of a {@link Document} based on information stored within + * itself. + * + *
          + * RedactOperation.builder() //
          + * 		.when(Criteria.where("level").is(5)) //
          + * 		.thenPrune() //
          + * 		.otherwiseDescend() //
          + * 		.build();
          + * 
          + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/redact/ + * @since 3.0 + */ +public class RedactOperation implements AggregationOperation { + + /** + * Return fields at the current document level. Exclude embedded ones. + */ + public static final String DESCEND = "$$DESCEND"; + + /** + * Return/Keep all fields at the current document/embedded level. + */ + public static final String KEEP = "$$KEEP"; + + /** + * Exclude all fields at this current document/embedded level. + */ + public static final String PRUNE = "$$PRUNE"; + + private final AggregationExpression condition; + + /** + * Create new {@link RedactOperation}. + * + * @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or + * {@literal $$KEEP}. Must not be {@literal null}. + */ + public RedactOperation(AggregationExpression condition) { + + Assert.notNull(condition, "Condition must not be null"); + this.condition = condition; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document(getOperator(), condition.toDocument(context)); + } + + @Override + public String getOperator() { + return "$redact"; + } + + /** + * Obtain a new instance of {@link RedactOperationBuilder} to specify condition and outcome of the {@literal $redact} + * operation. + * + * @return new instance of {@link RedactOperationBuilder}. + */ + public static RedactOperationBuilder builder() { + return new RedactOperationBuilder(); + } + + /** + * Builder to create new instance of {@link RedactOperation}. + * + * @author Christoph Strobl + */ + public static class RedactOperationBuilder { + + private Object when; + private Object then; + private Object otherwise; + + private RedactOperationBuilder() { + + } + + /** + * Specify the evaluation condition. + * + * @param criteria must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder when(CriteriaDefinition criteria) { + + this.when = criteria; + return this; + } + + /** + * Specify the evaluation condition. + * + * @param condition must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder when(AggregationExpression condition) { + + this.when = condition; + return this; + } + + /** + * Specify the evaluation condition. + * + * @param condition must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder when(Document condition) { + + this.when = condition; + return this; + } + + /** + * Return fields at the current document level and exclude embedded ones if the condition is met. + * + * @return this. + */ + public RedactOperationBuilder thenDescend() { + return then(DESCEND); + } + + /** + * Return/Keep all fields at the current document/embedded level if the condition is met. + * + * @return this. + */ + public RedactOperationBuilder thenKeep() { + return then(KEEP); + } + + /** + * Exclude all fields at this current document/embedded level if the condition is met. + * + * @return this. + */ + public RedactOperationBuilder thenPrune() { + return then(PRUNE); + } + + /** + * Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP}) + * when the condition is met. + * + * @param then must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder then(Object then) { + + this.then = then; + return this; + } + + /** + * Return fields at the current document level and exclude embedded ones if the condition is not met. + * + * @return this. + */ + public RedactOperationBuilder otherwiseDescend() { + return otherwise(DESCEND); + } + + /** + * Return/Keep all fields at the current document/embedded level if the condition is not met. + * + * @return this. + */ + public RedactOperationBuilder otherwiseKeep() { + return otherwise(KEEP); + } + + /** + * Exclude all fields at this current document/embedded level if the condition is not met. + * + * @return this. + */ + public RedactOperationBuilder otherwisePrune() { + return otherwise(PRUNE); + } + + /** + * Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP}) + * when the condition is not met. + * + * @param otherwise must not be {@literal null}. + * @return this. + */ + public RedactOperationBuilder otherwise(Object otherwise) { + this.otherwise = otherwise; + return this; + } + + /** + * @return new instance of {@link RedactOperation}. + */ + public RedactOperation build() { + return new RedactOperation(when().then(then).otherwise(otherwise)); + } + + private ThenBuilder when() { + + if (when instanceof CriteriaDefinition criteriaDefinition) { + return ConditionalOperators.Cond.when(criteriaDefinition); + } + if (when instanceof AggregationExpression aggregationExpression) { + return ConditionalOperators.Cond.when(aggregationExpression); + } + if (when instanceof Document document) { + return ConditionalOperators.Cond.when(document); + } + + throw new IllegalArgumentException(String.format( + "Invalid Condition; Expected CriteriaDefinition, AggregationExpression or Document but was %s", when)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RelaxedTypeBasedAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RelaxedTypeBasedAggregationOperationContext.java new file mode 100644 index 0000000000..a27b9fcb45 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/RelaxedTypeBasedAggregationOperationContext.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +/** + * A {@link TypeBasedAggregationOperationContext} with less restrictive field reference handling, suppressing + * {@link InvalidPersistentPropertyPath} exceptions when resolving mapped field names. + * + * @author Christoph Strobl + * @since 3.0 + * @deprecated since 4.3.1 + */ +@Deprecated(since = "4.3.1") +public class RelaxedTypeBasedAggregationOperationContext extends TypeBasedAggregationOperationContext { + + /** + * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and + * {@link QueryMapper}. + * + * @param type must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + * @param mapper must not be {@literal null}. + */ + public RelaxedTypeBasedAggregationOperationContext(Class type, + MappingContext, MongoPersistentProperty> mappingContext, QueryMapper mapper) { + super(type, mappingContext, mapper, FieldLookupPolicy.relaxed()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java index 015cea72d2..130182a001 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,7 +33,8 @@ * @author Mark Paluch * @author Christoph Strobl * @since 1.10 - * @see MongoDB Aggregation Framework: $replaceRoot + * @see MongoDB Aggregation + * Framework: $replaceRoot */ public class ReplaceRootOperation implements FieldsExposingAggregationOperation { @@ -64,7 +65,7 @@ public ReplaceRootOperation(AggregationExpression aggregationExpression) { */ public ReplaceRootOperation(Replacement replacement) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); this.replacement = replacement; } @@ -77,22 +78,31 @@ public static ReplaceRootOperationBuilder builder() { return new ReplaceRootOperationBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$replaceRoot", new Document("newRoot", replacement.toDocumentExpression(context))); + return new Document("$replaceRoot", new Document("newRoot", getReplacement().toDocumentExpression(context))); + } + + @Override + public String getOperator() { + return "$replaceRoot"; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return ExposedFields.from(); } + /** + * Obtain the {@link Replacement}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected Replacement getReplacement() { + return replacement; + } + /** * Builder for {@link ReplaceRootOperation}. * @@ -141,7 +151,7 @@ public ReplaceRootDocumentOperation withDocument() { */ public ReplaceRootOperation withDocument(Document document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); return new ReplaceRootDocumentOperation().andValuesOf(document); } @@ -155,7 +165,7 @@ public ReplaceRootOperation withDocument(Document document) { * * @author Mark Paluch */ - static class ReplaceRootDocumentOperation extends ReplaceRootOperation { + public static class ReplaceRootDocumentOperation extends ReplaceRootOperation { private final static ReplacementDocument EMPTY = new ReplacementDocument(); private final ReplacementDocument current; @@ -229,8 +239,8 @@ public static class ReplaceRootDocumentOperationBuilder { protected ReplaceRootDocumentOperationBuilder(ReplaceRootDocumentOperation currentOperation, Object value) { - Assert.notNull(currentOperation, "Current ReplaceRootDocumentOperation must not be null!"); - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(currentOperation, "Current ReplaceRootDocumentOperation must not be null"); + Assert.notNull(value, "Value must not be null"); this.currentOperation = currentOperation; this.value = value; @@ -238,9 +248,9 @@ protected ReplaceRootDocumentOperationBuilder(ReplaceRootDocumentOperation curre public ReplaceRootDocumentOperation as(String fieldName) { - if (value instanceof AggregationExpression) { + if (value instanceof AggregationExpression aggregationExpression) { return new ReplaceRootDocumentOperation(currentOperation, - ReplacementDocument.forExpression(fieldName, (AggregationExpression) value)); + ReplacementDocument.forExpression(fieldName, aggregationExpression)); } return new ReplaceRootDocumentOperation(currentOperation, ReplacementDocument.forSingleValue(fieldName, value)); @@ -276,13 +286,10 @@ private static class AggregationExpressionReplacement implements Replacement { protected AggregationExpressionReplacement(AggregationExpression aggregationExpression) { - Assert.notNull(aggregationExpression, "AggregationExpression must not be null!"); + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); this.aggregationExpression = aggregationExpression; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.Replacement#toObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocumentExpression(AggregationOperationContext context) { return aggregationExpression.toDocument(context); @@ -303,13 +310,10 @@ private static class FieldReplacement implements Replacement { */ protected FieldReplacement(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = field; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.Replacement#toObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Object toDocumentExpression(AggregationOperationContext context) { return context.getReference(field).toString(); @@ -335,11 +339,11 @@ protected ReplacementDocument() { /** * Creates a {@link ReplacementDocument} given {@link ReplacementContributor}. * - * @param contributor + * @param contributor must not be {@literal null}. */ protected ReplacementDocument(ReplacementContributor contributor) { - Assert.notNull(contributor, "ReplacementContributor must not be null!"); + Assert.notNull(contributor, "ReplacementContributor must not be null"); replacements = Collections.singleton(contributor); } @@ -351,7 +355,7 @@ private ReplacementDocument(Collection replacements) { * Creates a {@link ReplacementDocument} given a {@literal value}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link ReplacementDocument}. */ public static ReplacementDocument valueOf(Object value) { return new ReplacementDocument(new DocumentContributor(value)); @@ -361,7 +365,7 @@ public static ReplacementDocument valueOf(Object value) { * Creates a {@link ReplacementDocument} given a single {@literal field} and {@link AggregationExpression}. * * @param aggregationExpression must not be {@literal null}. - * @return + * @return new instance of {@link ReplacementDocument}. */ public static ReplacementDocument forExpression(String field, AggregationExpression aggregationExpression) { return new ReplacementDocument(new ExpressionFieldContributor(Fields.field(field), aggregationExpression)); @@ -371,15 +375,12 @@ public static ReplacementDocument forExpression(String field, AggregationExpress * Creates a {@link ReplacementDocument} given a single {@literal field} and {@literal value}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link ReplacementDocument}. */ public static ReplacementDocument forSingleValue(String field, Object value) { return new ReplacementDocument(new ValueFieldContributor(Fields.field(field), value)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.Replacement#toObject(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocumentExpression(AggregationOperationContext context) { @@ -427,8 +428,9 @@ private interface ReplacementContributor extends AggregationExpression { * {@link AggregationOperationContext}. * * @param context will never be {@literal null}. - * @return + * @return never {@literal null}. */ + @Override Document toDocument(AggregationOperationContext context); } @@ -450,13 +452,10 @@ private static class DocumentContributor implements ReplacementContributor { */ public DocumentContributor(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); this.value = value; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplacementContributor#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -483,7 +482,7 @@ private abstract static class FieldContributorSupport implements ReplacementCont */ public FieldContributorSupport(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); } @@ -515,14 +514,11 @@ public ValueFieldContributor(Field field, Object value) { super(field); - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); this.value = value; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplacementContributor#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -551,14 +547,11 @@ public ExpressionFieldContributor(Field field, AggregationExpression aggregation super(field); - Assert.notNull(aggregationExpression, "AggregationExpression must not be null!"); + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); this.aggregationExpression = aggregationExpression; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplacementContributor#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(getField().getTarget(), aggregationExpression.toDocument(context)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperation.java new file mode 100644 index 0000000000..795644ee61 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperation.java @@ -0,0 +1,91 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collection; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * Encapsulates the aggregation framework {@code $replaceRoot}-operation.
          + * The operation replaces all existing fields including the {@code id} field with @{code $replaceWith}. This way it is + * possible to promote an embedded document to the top-level or specify a new document. + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation + * Framework: $replaceWith + */ +public class ReplaceWithOperation extends ReplaceRootOperation { + + /** + * Creates new instance of {@link ReplaceWithOperation}. + * + * @param replacement must not be {@literal null}. + */ + public ReplaceWithOperation(Replacement replacement) { + super(replacement); + } + + /** + * Creates new instance of {@link ReplaceWithOperation}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceWithOperation}. + */ + public static ReplaceWithOperation replaceWithValue(Object value) { + return new ReplaceWithOperation((ctx) -> value); + } + + /** + * Creates new instance of {@link ReplaceWithOperation} treating a given {@link String} {@literal value} as a + * {@link Field field reference}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceWithOperation}. + */ + public static ReplaceWithOperation replaceWithValueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return new ReplaceWithOperation(ctx -> { + + Object target = value instanceof String stringValue ? Fields.field(stringValue) : value; + return computeValue(target, ctx); + }); + } + + private static Object computeValue(Object value, AggregationOperationContext context) { + + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + if (value instanceof Collection collection) { + return collection.stream().map(it -> computeValue(it, context)).collect(Collectors.toList()); + } + + return value; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return context.getMappedObject(new Document("$replaceWith", getReplacement().toDocumentExpression(context))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java index 5db334c90c..a366850f8a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SampleOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -38,16 +38,17 @@ public class SampleOperation implements AggregationOperation { */ public SampleOperation(long sampleSize) { - Assert.isTrue(sampleSize > 0, "Sample size must be greater than zero!"); + Assert.isTrue(sampleSize > 0, "Sample size must be greater than zero"); this.sampleSize = sampleSize; } - /* - (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$sample", new Document("size", this.sampleSize)); + return new Document(getOperator(), new Document("size", this.sampleSize)); + } + + @Override + public String getOperator() { + return "$sample"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ScriptOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ScriptOperators.java new file mode 100644 index 0000000000..9eab041e88 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ScriptOperators.java @@ -0,0 +1,587 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.springframework.data.mongodb.core.aggregation.ScriptOperators.Accumulator.AccumulatorBuilder; +import org.springframework.data.mongodb.core.aggregation.ScriptOperators.Accumulator.AccumulatorInitBuilder; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * Gateway to {@literal $function} and {@literal $accumulator} aggregation operations. + *
          + * Using {@link ScriptOperators} as part of the {@link Aggregation} requires MongoDB server to have + * server-side JavaScript execution + * enabled. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.1 + */ +public class ScriptOperators { + + /** + * Create a custom aggregation + * $function in JavaScript. + * + * @param body The function definition. Must not be {@literal null}. + * @return new instance of {@link Function}. + */ + public static Function function(String body) { + return Function.function(body); + } + + /** + * Create a custom $accumulator operator + * in Javascript. + * + * @return new instance of {@link AccumulatorInitBuilder}. + */ + public static AccumulatorInitBuilder accumulatorBuilder() { + return new AccumulatorBuilder(); + } + + /** + * {@link Function} defines a custom aggregation + * $function in JavaScript. + *
          + * + * { + * $function: { + * body: ..., + * args: ..., + * lang: "js" + * } + * } + * + *
          + * {@link Function} cannot be used as part of {@link org.springframework.data.mongodb.core.schema.MongoJsonSchema + * schema} validation query expression.
          + * NOTE: Server-Side JavaScript + * execution must be + * enabled + * + * @see MongoDB Documentation: + * $function + */ + public static class Function extends AbstractAggregationExpression { + + private Function(Map values) { + super(values); + } + + /** + * Create a new {@link Function} with the given function definition. + * + * @param body must not be {@literal null}. + * @return new instance of {@link Function}. + */ + public static Function function(String body) { + + Assert.notNull(body, "Function body must not be null"); + + Map function = new LinkedHashMap<>(2); + function.put(Fields.BODY.toString(), body); + function.put(Fields.ARGS.toString(), Collections.emptyList()); + function.put(Fields.LANG.toString(), "js"); + + return new Function(function); + } + + /** + * Set the arguments passed to the function body. + * + * @param args the arguments passed to the function body. Leave empty if the function does not take any arguments. + * @return new instance of {@link Function}. + */ + public Function args(Object... args) { + return args(Arrays.asList(args)); + } + + /** + * Set the arguments passed to the function body. + * + * @param args the arguments passed to the function body. Leave empty if the function does not take any arguments. + * @return new instance of {@link Function}. + */ + public Function args(List args) { + + Assert.notNull(args, "Args must not be null Use an empty list instead"); + + return new Function(appendAt(1, Fields.ARGS.toString(), args)); + } + + /** + * The language used in the body. + * + * @param lang must not be {@literal null} nor empty. + * @return new instance of {@link Function}. + */ + public Function lang(String lang) { + + Assert.hasText(lang, "Lang must not be null nor empty; The default would be 'js'"); + + return new Function(appendAt(2, Fields.LANG.toString(), lang)); + } + + @Nullable + List getArgs() { + return get(Fields.ARGS.toString()); + } + + String getBody() { + return get(Fields.BODY.toString()); + } + + String getLang() { + return get(Fields.LANG.toString()); + } + + @Override + protected String getMongoMethod() { + return "$function"; + } + + enum Fields { + + BODY, ARGS, LANG; + + @Override + public String toString() { + return name().toLowerCase(); + } + } + } + + /** + * {@link Accumulator} defines a custom aggregation + * $accumulator operator, + * one that maintains its state (e.g. totals, maximums, minimums, and related data) as documents progress through the + * pipeline, in JavaScript. + *
          + * + * { + * $accumulator: { + * init: ..., + * intArgs: ..., + * accumulate: ..., + * accumulateArgs: ..., + * merge: ..., + * finalize: ..., + * lang: "js" + * } + * } + * + *
          + * {@link Accumulator} can be used as part of {@link GroupOperation $group}, {@link BucketOperation $bucket} and + * {@link BucketAutoOperation $bucketAuto} pipeline stages.
          + * NOTE: Server-Side JavaScript + * execution must be + * enabled + * + * @see MongoDB Documentation: + * $accumulator + */ + public static class Accumulator extends AbstractAggregationExpression { + + private Accumulator(Map value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$accumulator"; + } + + enum Fields { + + ACCUMULATE("accumulate"), // + ACCUMULATE_ARGS("accumulateArgs"), // + FINALIZE("finalize"), // + INIT("init"), // + INIT_ARGS("initArgs"), // + LANG("lang"), // + MERGE("merge"); // + + private final String field; + + Fields(String field) { + this.field = field; + } + + @Override + public String toString() { + return field; + } + } + + public interface AccumulatorInitBuilder { + + /** + * Define the {@code init} {@link Function} for the {@link Accumulator accumulators} initial state. The function + * receives its arguments from the {@link Function#args(Object...) initArgs} array expression. + *
          + * + * function(initArg1, initArg2, ...) { + * ... + * return initialState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + default AccumulatorAccumulateBuilder init(Function function) { + return init(function.getBody()).initArgs(function.getArgs()); + } + + /** + * Define the {@code init} function for the {@link Accumulator accumulators} initial state. The function receives + * its arguments from the {@link AccumulatorInitArgsBuilder#initArgs(Object...)} array expression. + *
          + * + * function(initArg1, initArg2, ...) { + * ... + * return initialState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + AccumulatorInitArgsBuilder init(String function); + + /** + * The language used in the {@code $accumulator} code. + * + * @param lang must not be {@literal null}. Default is {@literal js}. + * @return this. + */ + AccumulatorInitBuilder lang(String lang); + } + + public interface AccumulatorInitArgsBuilder extends AccumulatorAccumulateBuilder { + + /** + * Define the optional {@code initArgs} for the {@link AccumulatorInitBuilder#init(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + default AccumulatorAccumulateBuilder initArgs(Object... args) { + return initArgs(Arrays.asList(args)); + } + + /** + * Define the optional {@code initArgs} for the {@link AccumulatorInitBuilder#init(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + AccumulatorAccumulateBuilder initArgs(List args); + } + + public interface AccumulatorAccumulateBuilder { + + /** + * Set the {@code accumulate} {@link Function} that updates the state for each document. The functions first + * argument is the current {@code state}, additional arguments can be defined via {@link Function#args(Object...) + * accumulateArgs}. + *
          + * + * function(state, accumArg1, accumArg2, ...) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + default AccumulatorMergeBuilder accumulate(Function function) { + return accumulate(function.getBody()).accumulateArgs(function.getArgs()); + } + + /** + * Set the {@code accumulate} function that updates the state for each document. The functions first argument is + * the current {@code state}, additional arguments can be defined via + * {@link AccumulatorAccumulateArgsBuilder#accumulateArgs(Object...)}. + *
          + * + * function(state, accumArg1, accumArg2, ...) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + AccumulatorAccumulateArgsBuilder accumulate(String function); + } + + public interface AccumulatorAccumulateArgsBuilder extends AccumulatorMergeBuilder { + + /** + * Define additional {@code accumulateArgs} for the {@link AccumulatorAccumulateBuilder#accumulate(String)} + * function. + * + * @param args must not be {@literal null}. + * @return this. + */ + default AccumulatorMergeBuilder accumulateArgs(Object... args) { + return accumulateArgs(Arrays.asList(args)); + } + + /** + * Define additional {@code accumulateArgs} for the {@link AccumulatorAccumulateBuilder#accumulate(String)} + * function. + * + * @param args must not be {@literal null}. + * @return this. + */ + AccumulatorMergeBuilder accumulateArgs(List args); + } + + public interface AccumulatorMergeBuilder { + + /** + * Set the {@code merge} function used to merge two internal states.
          + * This might be required because the operation is run on a sharded cluster or when the operator exceeds its + * memory limit. + *
          + * + * function(state1, state2) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + AccumulatorFinalizeBuilder merge(String function); + } + + public interface AccumulatorFinalizeBuilder { + + /** + * Set the {@code finalize} function used to update the result of the accumulation when all documents have been + * processed. + *
          + * + * function(state) { + * ... + * return finalState + * } + * + * + * @param function must not be {@literal null}. + * @return new instance of {@link Accumulator}. + */ + Accumulator finalize(String function); + + /** + * Build the {@link Accumulator} object without specifying a {@link #finalize(String) finalize function}. + * + * @return new instance of {@link Accumulator}. + */ + Accumulator build(); + } + + static class AccumulatorBuilder + implements AccumulatorInitBuilder, AccumulatorInitArgsBuilder, AccumulatorAccumulateBuilder, + AccumulatorAccumulateArgsBuilder, AccumulatorMergeBuilder, AccumulatorFinalizeBuilder { + + private List initArgs; + private String initFunction; + private List accumulateArgs; + private String accumulateFunction; + private String mergeFunction; + private String finalizeFunction; + private String lang = "js"; + + /** + * Define the {@code init} function for the {@link Accumulator accumulators} initial state. The function receives + * its arguments from the {@link #initArgs(Object...)} array expression. + *
          + * + * function(initArg1, initArg2, ...) { + * ... + * return initialState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder init(String function) { + + this.initFunction = function; + return this; + } + + /** + * Define the optional {@code initArgs} for the {@link #init(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder initArgs(List args) { + + Assert.notNull(args, "Args must not be null"); + + this.initArgs = new ArrayList<>(args); + return this; + } + + /** + * Set the {@code accumulate} function that updates the state for each document. The functions first argument is + * the current {@code state}, additional arguments can be defined via {@link #accumulateArgs(Object...)}. + *
          + * + * function(state, accumArg1, accumArg2, ...) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder accumulate(String function) { + + Assert.notNull(function, "Accumulate function must not be null"); + + this.accumulateFunction = function; + return this; + } + + /** + * Define additional {@code accumulateArgs} for the {@link #accumulate(String)} function. + * + * @param args must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder accumulateArgs(List args) { + + Assert.notNull(args, "Args must not be null"); + + this.accumulateArgs = new ArrayList<>(args); + return this; + } + + /** + * Set the {@code merge} function used to merge two internal states.
          + * This might be required because the operation is run on a sharded cluster or when the operator exceeds its + * memory limit. + *
          + * + * function(state1, state2) { + * ... + * return newState + * } + * + * + * @param function must not be {@literal null}. + * @return this. + */ + @Override + public AccumulatorBuilder merge(String function) { + + Assert.notNull(function, "Merge function must not be null"); + + this.mergeFunction = function; + return this; + } + + /** + * The language used in the {@code $accumulator} code. + * + * @param lang must not be {@literal null}. Default is {@literal js}. + * @return this. + */ + public AccumulatorBuilder lang(String lang) { + + Assert.hasText(lang, "Lang must not be null nor empty; The default would be 'js'"); + + this.lang = lang; + return this; + } + + /** + * Set the {@code finalize} function used to update the result of the accumulation when all documents have been + * processed. + *
          + * + * function(state) { + * ... + * return finalState + * } + * + * + * @param function must not be {@literal null}. + * @return new instance of {@link Accumulator}. + */ + @Override + public Accumulator finalize(String function) { + + Assert.notNull(function, "Finalize function must not be null"); + + this.finalizeFunction = function; + + Map args = createArgumentMap(); + args.put(Fields.FINALIZE.toString(), finalizeFunction); + + return new Accumulator(args); + } + + @Override + public Accumulator build() { + return new Accumulator(createArgumentMap()); + } + + private Map createArgumentMap() { + + Map args = new LinkedHashMap<>(); + args.put(Fields.INIT.toString(), initFunction); + if (!CollectionUtils.isEmpty(initArgs)) { + args.put(Fields.INIT_ARGS.toString(), initArgs); + } + args.put(Fields.ACCUMULATE.toString(), accumulateFunction); + if (!CollectionUtils.isEmpty(accumulateArgs)) { + args.put(Fields.ACCUMULATE_ARGS.toString(), accumulateArgs); + } + args.put(Fields.MERGE.toString(), mergeFunction); + args.put(Fields.LANG.toString(), lang); + + return args; + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SelectionOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SelectionOperators.java new file mode 100644 index 0000000000..9da80c4668 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SelectionOperators.java @@ -0,0 +1,424 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.Collections; + +import org.springframework.data.domain.Sort; + +/** + * Gateway to {@literal selection operators} such as {@literal $bottom}. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class SelectionOperators { + + /** + * {@link AbstractAggregationExpression} to return the bottom element according to the specified {@link #sortBy(Sort) + * order}. + */ + public static class Bottom extends AbstractAggregationExpression { + + private Bottom(Object value) { + super(value); + } + + /** + * In case a limit value ({@literal n}) is present {@literal $bottomN} is used instead of {@literal $bottom}. + * + * @return + */ + @Override + protected String getMongoMethod() { + return get("n") == null ? "$bottom" : "$bottomN"; + } + + /** + * @return new instance of {@link Bottom}. + */ + public static Bottom bottom() { + return new Bottom(Collections.emptyMap()); + } + + /** + * @param numberOfResults Limits the number of returned elements to the given value. + * @return new instance of {@link Bottom}. + */ + public static Bottom bottom(int numberOfResults) { + return bottom().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link Bottom}. + */ + public Bottom limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Bottom}. + */ + public Bottom limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private Bottom limit(Object value) { + return new Bottom(append("n", value)); + } + + /** + * Define result ordering. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link Bottom}. + */ + public Bottom sortBy(Sort sort) { + return new Bottom(append("sortBy", sort)); + } + + /** + * Define result ordering. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Bottom}. + */ + public Bottom output(Fields out) { + return new Bottom(append("output", out)); + } + + /** + * Define fields included in the output for each element. + * + * @param fieldNames must not be {@literal null}. + * @return new instance of {@link Bottom}. + * @see #output(Fields) + */ + public Bottom output(String... fieldNames) { + return output(Fields.fields(fieldNames)); + } + + /** + * Define expressions building the value included in the output for each element. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Bottom}. + * @see #output(Fields) + */ + public Bottom output(AggregationExpression... out) { + return new Bottom(append("output", Arrays.asList(out))); + } + } + + /** + * {@link AbstractAggregationExpression} to return the top element according to the specified {@link #sortBy(Sort) + * order}. + */ + public static class Top extends AbstractAggregationExpression { + + private Top(Object value) { + super(value); + } + + /** + * In case a limit value ({@literal n}) is present {@literal $topN} is used instead of {@literal $top}. + * + * @return + */ + @Override + protected String getMongoMethod() { + return get("n") == null ? "$top" : "$topN"; + } + + /** + * @return new instance of {@link Top}. + */ + public static Top top() { + return new Top(Collections.emptyMap()); + } + + /** + * @param numberOfResults Limits the number of returned elements to the given value. + * @return new instance of {@link Top}. + */ + public static Top top(int numberOfResults) { + return top().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link Top}. + */ + public Top limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Top}. + */ + public Top limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private Top limit(Object value) { + return new Top(append("n", value)); + } + + /** + * Define result ordering. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link Top}. + */ + public Top sortBy(Sort sort) { + return new Top(append("sortBy", sort)); + } + + /** + * Define result ordering. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Top}. + */ + public Top output(Fields out) { + return new Top(append("output", out)); + } + + /** + * Define fields included in the output for each element. + * + * @param fieldNames must not be {@literal null}. + * @return new instance of {@link Top}. + * @see #output(Fields) + */ + public Top output(String... fieldNames) { + return output(Fields.fields(fieldNames)); + } + + /** + * Define expressions building the value included in the output for each element. + * + * @param out must not be {@literal null}. + * @return new instance of {@link Top}. + * @see #output(Fields) + */ + public Top output(AggregationExpression... out) { + return new Top(append("output", Arrays.asList(out))); + } + } + + /** + * {@link AbstractAggregationExpression} to return the {@literal $firstN} elements. + */ + public static class First extends AbstractAggregationExpression { + + protected First(Object value) { + super(value); + } + + /** + * @return new instance of {@link First}. + */ + public static First first() { + return new First(Collections.emptyMap()); + } + + /** + * @return new instance of {@link First}. + */ + public static First first(int numberOfResults) { + return first().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link First}. + */ + public First limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private First limit(Object value) { + return new First(append("n", value)); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First of(String fieldName) { + return input(fieldName); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First of(AggregationExpression expression) { + return input(expression); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First input(String fieldName) { + return new First(append("input", Fields.field(fieldName))); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public First input(AggregationExpression expression) { + return new First(append("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$firstN"; + } + } + + /** + * {@link AbstractAggregationExpression} to return the {@literal $lastN} elements. + */ + public static class Last extends AbstractAggregationExpression { + + protected Last(Object value) { + super(value); + } + + /** + * @return new instance of {@link Last}. + */ + public static Last last() { + return new Last(Collections.emptyMap()); + } + + /** + * @return new instance of {@link Last}. + */ + public static Last last(int numberOfResults) { + return last().limit(numberOfResults); + } + + /** + * Limits the number of returned elements to the given value. + * + * @param numberOfResults + * @return new instance of {@link Last}. + */ + public Last limit(int numberOfResults) { + return limit((Object) numberOfResults); + } + + /** + * Limits the number of returned elements to the value defined by the given {@link AggregationExpression + * expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last limit(AggregationExpression expression) { + return limit((Object) expression); + } + + private Last limit(Object value) { + return new Last(append("n", value)); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last of(String fieldName) { + return input(fieldName); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last of(AggregationExpression expression) { + return input(expression); + } + + /** + * Define the field to serve as source. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last input(String fieldName) { + return new Last(append("input", Fields.field(fieldName))); + } + + /** + * Define the expression building the value to serve as source. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public Last input(AggregationExpression expression) { + return new Last(append("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$lastN"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java new file mode 100644 index 0000000000..b188b16b5f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java @@ -0,0 +1,194 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.springframework.data.mongodb.core.aggregation.SetOperation.FieldAppender.ValueAppender; +import org.springframework.lang.Nullable; + +/** + * Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + *
          + * SetOperation.set("totalHomework").toValue("A+").and().set("totalQuiz").toValue("B-")
          + * 
          + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation Framework: + * $set + */ +public class SetOperation extends DocumentEnhancingOperation { + + /** + * Create new instance of {@link SetOperation} adding map keys as exposed fields. + * + * @param source must not be {@literal null}. + */ + private SetOperation(Map source) { + super(source); + } + + /** + * Create new instance of {@link SetOperation} + * + * @param field must not be {@literal null}. + * @param value can be {@literal null}. + */ + public SetOperation(Object field, @Nullable Object value) { + this(Collections.singletonMap(field, value)); + } + + /** + * Define the {@link SetOperation} via {@link FieldAppender}. + * + * @return new instance of {@link FieldAppender}. + */ + public static FieldAppender builder() { + return new FieldAppender(); + } + + /** + * Concatenate another field to set. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public static ValueAppender set(String field) { + return new FieldAppender().set(field); + } + + /** + * Append the value for a specific field to the operation. + * + * @param field the target field to set. + * @param value the value to assign. + * @return new instance of {@link SetOperation}. + */ + public SetOperation set(Object field, Object value) { + + LinkedHashMap target = new LinkedHashMap<>(getValueMap()); + target.put(field, value); + + return new SetOperation(target); + } + + /** + * Concatenate additional fields to set. + * + * @return new instance of {@link FieldAppender}. + */ + public FieldAppender and() { + return new FieldAppender(getValueMap()); + } + + @Override + protected String mongoOperator() { + return "$set"; + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public static class FieldAppender { + + private final Map valueMap; + + private FieldAppender() { + this.valueMap = new LinkedHashMap<>(); + } + + private FieldAppender(Map source) { + this.valueMap = new LinkedHashMap<>(source); + } + + /** + * Define the field to set. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public ValueAppender set(String field) { + + return new ValueAppender() { + + @Override + public SetOperation toValue(Object value) { + + valueMap.put(field, value); + return FieldAppender.this.build(); + } + + @Override + public SetOperation toValueOf(Object value) { + + valueMap.put(field, value instanceof String stringValue ? Fields.field(stringValue) : value); + return FieldAppender.this.build(); + } + + @Override + public SetOperation withValueOfExpression(String operation, Object... values) { + + valueMap.put(field, new ExpressionProjection(operation, values)); + return FieldAppender.this.build(); + } + }; + } + + private SetOperation build() { + return new SetOperation(valueMap); + } + + /** + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ + public interface ValueAppender { + + /** + * Define the value to assign as is. + * + * @param value can be {@literal null}. + * @return new instance of {@link SetOperation}. + */ + SetOperation toValue(@Nullable Object value); + + /** + * Define the value to assign. Plain {@link String} values are treated as {@link Field field references}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link SetOperation}. + */ + SetOperation toValueOf(Object value); + + /** + * Adds a generic projection for the current field. + * + * @param operation the operation key, e.g. {@code $add}. + * @param values the values to be set for the projection operation. + * @return new instance of {@link SetOperation}. + */ + SetOperation withValueOfExpression(String operation, Object... values); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java index f14ebd4315..094ef7365b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,7 +34,7 @@ public class SetOperators { * Take the array referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link SetOperatorFactory}. */ public static SetOperatorFactory arrayAsSet(String fieldReference) { return new SetOperatorFactory(fieldReference); @@ -44,7 +44,7 @@ public static SetOperatorFactory arrayAsSet(String fieldReference) { * Take the array resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetOperatorFactory}. */ public static SetOperatorFactory arrayAsSet(AggregationExpression expression) { return new SetOperatorFactory(expression); @@ -65,7 +65,7 @@ public static class SetOperatorFactory { */ public SetOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -77,7 +77,7 @@ public SetOperatorFactory(String fieldReference) { */ public SetOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -87,7 +87,7 @@ public SetOperatorFactory(AggregationExpression expression) { * returns {@literal true} if they have the same distinct elements and {@literal false} otherwise. * * @param arrayReferences must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public SetEquals isEqualTo(String... arrayReferences) { return createSetEquals().isEqualTo(arrayReferences); @@ -98,7 +98,7 @@ public SetEquals isEqualTo(String... arrayReferences) { * returns {@literal true} if they have the same distinct elements and {@literal false} otherwise. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public SetEquals isEqualTo(AggregationExpression... expressions) { return createSetEquals().isEqualTo(expressions); @@ -113,7 +113,7 @@ private SetEquals createSetEquals() { * arrays and returns an array that contains the elements that appear in every of those. * * @param arrayReferences must not be {@literal null}. - * @return + * @return new instance of {@link SetIntersection}. */ public SetIntersection intersects(String... arrayReferences) { return createSetIntersection().intersects(arrayReferences); @@ -124,7 +124,7 @@ public SetIntersection intersects(String... arrayReferences) { * arrays and returns an array that contains the elements that appear in every of those. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link SetIntersection}. */ public SetIntersection intersects(AggregationExpression... expressions) { return createSetIntersection().intersects(expressions); @@ -139,7 +139,7 @@ private SetIntersection createSetIntersection() { * arrays and returns an array that contains the elements that appear in any of those. * * @param arrayReferences must not be {@literal null}. - * @return + * @return new instance of {@link SetUnion}. */ public SetUnion union(String... arrayReferences) { return createSetUnion().union(arrayReferences); @@ -150,7 +150,7 @@ public SetUnion union(String... arrayReferences) { * arrays and returns an array that contains the elements that appear in any of those. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link SetUnion}. */ public SetUnion union(AggregationExpression... expressions) { return createSetUnion().union(expressions); @@ -165,7 +165,7 @@ private SetUnion createSetUnion() { * containing the elements that do not exist in the given {@literal arrayReference}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetDifference}. */ public SetDifference differenceTo(String arrayReference) { return createSetDifference().differenceTo(arrayReference); @@ -176,7 +176,7 @@ public SetDifference differenceTo(String arrayReference) { * containing the elements that do not exist in the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetDifference}. */ public SetDifference differenceTo(AggregationExpression expression) { return createSetDifference().differenceTo(expression); @@ -191,7 +191,7 @@ private SetDifference createSetDifference() { * {@literal true} if it is a subset of the given {@literal arrayReference}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetIsSubset}. */ public SetIsSubset isSubsetOf(String arrayReference) { return createSetIsSubset().isSubsetOf(arrayReference); @@ -202,7 +202,7 @@ public SetIsSubset isSubsetOf(String arrayReference) { * {@literal true} if it is a subset of the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetIsSubset}. */ public SetIsSubset isSubsetOf(AggregationExpression expression) { return createSetIsSubset().isSubsetOf(expression); @@ -216,7 +216,7 @@ private SetIsSubset createSetIsSubset() { * Creates new {@link AggregationExpression} that takes array of the previously mentioned field and returns * {@literal true} if any of the elements are {@literal true} and {@literal false} otherwise. * - * @return + * @return new instance of {@link AnyElementTrue}. */ public AnyElementTrue anyElementTrue() { return usesFieldRef() ? AnyElementTrue.arrayAsSet(fieldReference) : AnyElementTrue.arrayAsSet(expression); @@ -226,7 +226,7 @@ public AnyElementTrue anyElementTrue() { * Creates new {@link AggregationExpression} that tkes array of the previously mentioned field and returns * {@literal true} if no elements is {@literal false}. * - * @return + * @return new instance of {@link AllElementsTrue}. */ public AllElementsTrue allElementsTrue() { return usesFieldRef() ? AllElementsTrue.arrayAsSet(fieldReference) : AllElementsTrue.arrayAsSet(expression); @@ -257,11 +257,11 @@ protected String getMongoMethod() { * Create new {@link SetEquals}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public static SetEquals arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetEquals(asFields(arrayReference)); } @@ -269,11 +269,11 @@ public static SetEquals arrayAsSet(String arrayReference) { * Create new {@link SetEquals}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public static SetEquals arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetEquals(Collections.singletonList(expression)); } @@ -281,11 +281,11 @@ public static SetEquals arrayAsSet(AggregationExpression expression) { * Creates new {@link java.util.Set} with all previously added arguments appending the given one. * * @param arrayReferences must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public SetEquals isEqualTo(String... arrayReferences) { - Assert.notNull(arrayReferences, "ArrayReferences must not be null!"); + Assert.notNull(arrayReferences, "ArrayReferences must not be null"); return new SetEquals(append(Fields.fields(arrayReferences).asList())); } @@ -293,11 +293,11 @@ public SetEquals isEqualTo(String... arrayReferences) { * Creates new {@link Sum} with all previously added arguments appending the given one. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public SetEquals isEqualTo(AggregationExpression... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); + Assert.notNull(expressions, "Expressions must not be null"); return new SetEquals(append(Arrays.asList(expressions))); } @@ -305,11 +305,11 @@ public SetEquals isEqualTo(AggregationExpression... expressions) { * Creates new {@link Sum} with all previously added arguments appending the given one. * * @param array must not be {@literal null}. - * @return + * @return new instance of {@link SetEquals}. */ public SetEquals isEqualTo(Object[] array) { - Assert.notNull(array, "Array must not be null!"); + Assert.notNull(array, "Array must not be null"); return new SetEquals(append(array)); } } @@ -334,11 +334,11 @@ protected String getMongoMethod() { * Creates new {@link SetIntersection} * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetIntersection}. */ public static SetIntersection arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetIntersection(asFields(arrayReference)); } @@ -346,11 +346,11 @@ public static SetIntersection arrayAsSet(String arrayReference) { * Creates new {@link SetIntersection}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetIntersection}. */ public static SetIntersection arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetIntersection(Collections.singletonList(expression)); } @@ -358,11 +358,11 @@ public static SetIntersection arrayAsSet(AggregationExpression expression) { * Creates new {@link SetIntersection} with all previously added arguments appending the given one. * * @param arrayReferences must not be {@literal null}. - * @return + * @return new instance of {@link SetIntersection}. */ public SetIntersection intersects(String... arrayReferences) { - Assert.notNull(arrayReferences, "ArrayReferences must not be null!"); + Assert.notNull(arrayReferences, "ArrayReferences must not be null"); return new SetIntersection(append(asFields(arrayReferences))); } @@ -370,11 +370,11 @@ public SetIntersection intersects(String... arrayReferences) { * Creates new {@link SetIntersection} with all previously added arguments appending the given one. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link SetIntersection}. */ public SetIntersection intersects(AggregationExpression... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); + Assert.notNull(expressions, "Expressions must not be null"); return new SetIntersection(append(Arrays.asList(expressions))); } } @@ -399,11 +399,11 @@ protected String getMongoMethod() { * Creates new {@link SetUnion}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetUnion}. */ public static SetUnion arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetUnion(asFields(arrayReference)); } @@ -411,11 +411,11 @@ public static SetUnion arrayAsSet(String arrayReference) { * Creates new {@link SetUnion}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetUnion}. */ public static SetUnion arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetUnion(Collections.singletonList(expression)); } @@ -423,11 +423,11 @@ public static SetUnion arrayAsSet(AggregationExpression expression) { * Creates new {@link SetUnion} with all previously added arguments appending the given one. * * @param arrayReferences must not be {@literal null}. - * @return + * @return new instance of {@link SetUnion}. */ public SetUnion union(String... arrayReferences) { - Assert.notNull(arrayReferences, "ArrayReferences must not be null!"); + Assert.notNull(arrayReferences, "ArrayReferences must not be null"); return new SetUnion(append(asFields(arrayReferences))); } @@ -435,11 +435,11 @@ public SetUnion union(String... arrayReferences) { * Creates new {@link SetUnion} with all previously added arguments appending the given one. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link SetUnion}. */ public SetUnion union(AggregationExpression... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); + Assert.notNull(expressions, "Expressions must not be null"); return new SetUnion(append(Arrays.asList(expressions))); } } @@ -464,11 +464,11 @@ protected String getMongoMethod() { * Creates new {@link SetDifference}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetDifference}. */ public static SetDifference arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetDifference(asFields(arrayReference)); } @@ -476,11 +476,11 @@ public static SetDifference arrayAsSet(String arrayReference) { * Creates new {@link SetDifference}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetDifference}. */ public static SetDifference arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetDifference(Collections.singletonList(expression)); } @@ -488,11 +488,11 @@ public static SetDifference arrayAsSet(AggregationExpression expression) { * Creates new {@link SetDifference} with all previously added arguments appending the given one. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetDifference}. */ public SetDifference differenceTo(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetDifference(append(Fields.field(arrayReference))); } @@ -500,11 +500,11 @@ public SetDifference differenceTo(String arrayReference) { * Creates new {@link SetDifference} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetDifference}. */ public SetDifference differenceTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetDifference(append(expression)); } } @@ -529,11 +529,11 @@ protected String getMongoMethod() { * Creates new {@link SetIsSubset}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetIsSubset}. */ public static SetIsSubset arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetIsSubset(asFields(arrayReference)); } @@ -541,11 +541,11 @@ public static SetIsSubset arrayAsSet(String arrayReference) { * Creates new {@link SetIsSubset}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetIsSubset}. */ public static SetIsSubset arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetIsSubset(Collections.singletonList(expression)); } @@ -553,11 +553,11 @@ public static SetIsSubset arrayAsSet(AggregationExpression expression) { * Creates new {@link SetIsSubset} with all previously added arguments appending the given one. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link SetIsSubset}. */ public SetIsSubset isSubsetOf(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new SetIsSubset(append(Fields.field(arrayReference))); } @@ -565,11 +565,11 @@ public SetIsSubset isSubsetOf(String arrayReference) { * Creates new {@link SetIsSubset} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SetIsSubset}. */ public SetIsSubset isSubsetOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SetIsSubset(append(expression)); } } @@ -594,11 +594,11 @@ protected String getMongoMethod() { * Creates new {@link AnyElementTrue}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link AnyElementTrue}. */ public static AnyElementTrue arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new AnyElementTrue(asFields(arrayReference)); } @@ -606,11 +606,11 @@ public static AnyElementTrue arrayAsSet(String arrayReference) { * Creates new {@link AnyElementTrue}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link AnyElementTrue}. */ public static AnyElementTrue arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new AnyElementTrue(Collections.singletonList(expression)); } @@ -639,11 +639,11 @@ protected String getMongoMethod() { * Creates new {@link AllElementsTrue}. * * @param arrayReference must not be {@literal null}. - * @return + * @return new instance of {@link AllElementsTrue}. */ public static AllElementsTrue arrayAsSet(String arrayReference) { - Assert.notNull(arrayReference, "ArrayReference must not be null!"); + Assert.notNull(arrayReference, "ArrayReference must not be null"); return new AllElementsTrue(asFields(arrayReference)); } @@ -651,11 +651,11 @@ public static AllElementsTrue arrayAsSet(String arrayReference) { * Creates new {@link AllElementsTrue}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link AllElementsTrue}. */ public static AllElementsTrue arrayAsSet(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new AllElementsTrue(Collections.singletonList(expression)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java new file mode 100644 index 0000000000..2b8df539e1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -0,0 +1,857 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.data.domain.Sort; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Encapsulates the {@code setWindowFields}-operation. + * + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/ + */ +public class SetWindowFieldsOperation + implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation { + + private static final String CURRENT = "current"; + private static final String UNBOUNDED = "unbounded"; + + private final @Nullable Object partitionBy; + private final @Nullable AggregationOperation sortBy; + private final WindowOutput output; + + /** + * Create a new {@link SetWindowFieldsOperation} with given args. + * + * @param partitionBy The field or {@link AggregationExpression} to group by. + * @param sortBy the {@link SortOperation operation} to sort the documents by in the partition. + * @param output the {@link WindowOutput} containing the fields to add and the rules to calculate their respective + * values. + */ + protected SetWindowFieldsOperation(@Nullable Object partitionBy, @Nullable AggregationOperation sortBy, + WindowOutput output) { + + this.partitionBy = partitionBy; + this.sortBy = sortBy; + this.output = output; + } + + /** + * Obtain a {@link SetWindowFieldsOperationBuilder builder} to create a {@link SetWindowFieldsOperation}. + * + * @return new instance of {@link SetWindowFieldsOperationBuilder}. + */ + public static SetWindowFieldsOperationBuilder builder() { + return new SetWindowFieldsOperationBuilder(); + } + + @Override + public ExposedFields getFields() { + return ExposedFields.synthetic(Fields.from(output.fields.toArray(new Field[0]))); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $setWindowFields = new Document(); + if (partitionBy != null) { + if (partitionBy instanceof AggregationExpression aggregationExpression) { + $setWindowFields.append("partitionBy", aggregationExpression.toDocument(context)); + } else if (partitionBy instanceof Field field) { + $setWindowFields.append("partitionBy", context.getReference(field).toString()); + } else { + $setWindowFields.append("partitionBy", partitionBy); + } + } + + if (sortBy != null) { + $setWindowFields.append("sortBy", sortBy.toDocument(context).get(sortBy.getOperator())); + } + + Document output = new Document(); + for (ComputedField field : this.output.fields) { + + Document fieldOperation = field.getWindowOperator().toDocument(context); + if (field.window != null) { + fieldOperation.put("window", field.window.toDocument(context)); + } + output.append(field.getName(), fieldOperation); + } + $setWindowFields.append("output", output); + + return new Document(getOperator(), $setWindowFields); + } + + @Override + public String getOperator() { + return "$setWindowFields"; + } + + /** + * {@link WindowOutput} defines output of {@literal $setWindowFields} stage by defining the {@link ComputedField + * field(s)} to append to the documents in the output. + */ + public static class WindowOutput { + + private final List fields; + + /** + * Create a new output containing the single given {@link ComputedField field}. + * + * @param outputField must not be {@literal null}. + */ + public WindowOutput(ComputedField outputField) { + + Assert.notNull(outputField, "OutputField must not be null"); + + this.fields = new ArrayList<>(); + this.fields.add(outputField); + } + + /** + * Append the given {@link ComputedField field} to the outptut. + * + * @param field must not be {@literal null}. + * @return this. + */ + public WindowOutput append(ComputedField field) { + + Assert.notNull(field, "Field must not be null"); + + fields.add(field); + return this; + } + + /** + * Append the given {@link AggregationExpression} as a {@link ComputedField field} in a fluent way. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ComputedFieldAppender}. + * @see #append(ComputedField) + */ + public ComputedFieldAppender append(AggregationExpression expression) { + + return new ComputedFieldAppender() { + + @Nullable private Window window; + + @Override + public WindowOutput as(String fieldname) { + + return WindowOutput.this.append(new ComputedField(fieldname, expression, window)); + } + + @Override + public ComputedFieldAppender within(Window window) { + this.window = window; + return this; + } + }; + } + + /** + * Tiny little helper to allow fluent API usage for {@link #append(ComputedField)}. + */ + public interface ComputedFieldAppender { + + /** + * Specify the target field name. + * + * @param fieldname the name of field to add to the target document. + * @return the {@link WindowOutput} that started the append operation. + */ + WindowOutput as(String fieldname); + + /** + * Specify the window boundaries. + * + * @param window must not be {@literal null}. + * @return this. + */ + ComputedFieldAppender within(Window window); + } + } + + /** + * A {@link Field} that the result of a computation done via an {@link AggregationExpression}. + * + * @author Christoph Strobl + */ + public static class ComputedField implements Field { + + private final String name; + private final AggregationExpression windowOperator; + private final @Nullable Window window; + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + */ + public ComputedField(String name, AggregationExpression windowOperator) { + this(name, windowOperator, null); + } + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + * @param window the boundaries to operate within. Can be {@literal null}. + */ + public ComputedField(String name, AggregationExpression windowOperator, @Nullable Window window) { + + this.name = name; + this.windowOperator = windowOperator; + this.window = window; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getTarget() { + return getName(); + } + + @Override + public boolean isAliased() { + return false; + } + + public AggregationExpression getWindowOperator() { + return windowOperator; + } + + @Nullable + public Window getWindow() { + return window; + } + } + + /** + * Quick access to {@link DocumentWindow documents} and {@literal RangeWindow range} {@link Window windows}. + * + * @author Christoph Strobl + */ + public interface Windows { + + /** + * Create a document window relative to the position of the current document. + * + * @param lower an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @param upper an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link DocumentWindow}. + */ + static DocumentWindow documents(Object lower, Object upper) { + return new DocumentWindow(lower, upper); + } + + /** + * Create a range window defined based on sort expression. + * + * @param lower a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @param upper a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link RangeWindow}. + */ + static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) { + return new RangeWindow(lower, upper, unit == null ? WindowUnits.DEFAULT : unit); + } + + /** + * Create a range window based on the {@link Sort sort value} of the current document via a fluent API. + * + * @return new instance of {@link RangeWindowBuilder}. + */ + static RangeWindowBuilder range() { + return new RangeWindowBuilder(); + } + + /** + * Create a document window relative to the position of the current document via a fluent API. + * + * @return new instance of {@link DocumentWindowBuilder}. + */ + static DocumentWindowBuilder documents() { + return new DocumentWindowBuilder(); + } + } + + /** + * A {@link Window} to be used for {@link ComputedField#getWindow() ComputedField}. + */ + public interface Window { + + /** + * The lower (inclusive) boundary. + * + * @return + */ + Object getLower(); + + /** + * The upper (inclusive) boundary. + * + * @return + */ + Object getUpper(); + + /** + * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } + + /** + * Obtain the document representation of the window in the given {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + Document toDocument(AggregationOperationContext ctx); + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class RangeWindowBuilder { + + private @Nullable Object lower; + private @Nullable Object upper; + private @Nullable WindowUnit unit; + + /** + * The lower (inclusive) range limit based on the sortBy field. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit based on the sortBy field. + * + * @param upper eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. + * + * @param lower + * @return this. + */ + public RangeWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. + * + * @param upper + * @return this. + */ + public RangeWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + /** + * Use {@literal current} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromCurrent() { + return from(CURRENT); + } + + /** + * Use {@literal unbounded} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromUnbounded() { + return from(UNBOUNDED); + } + + /** + * Use {@literal current} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toCurrent() { + return to(CURRENT); + } + + /** + * Use {@literal unbounded} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toUnbounded() { + return to(UNBOUNDED); + } + + /** + * Set the {@link WindowUnit unit} or measure for the given {@link Window}. + * + * @param windowUnit must not be {@literal null}. Can be on of {@link Windows}. + * @return this. + */ + public RangeWindowBuilder unit(WindowUnit windowUnit) { + + Assert.notNull(windowUnit, "WindowUnit must not be null"); + this.unit = windowUnit; + return this; + } + + /** + * Build the {@link RangeWindow}. + * + * @return new instance of {@link RangeWindow}. + */ + public RangeWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + Assert.notNull(unit, "WindowUnit bound must not be null"); + + return new RangeWindow(lower, upper, unit); + } + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class DocumentWindowBuilder { + + private @Nullable Object lower; + private @Nullable Object upper; + + /** + * The lower (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. + * + * @param lower + * @return this. + */ + public DocumentWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + public DocumentWindowBuilder fromCurrent() { + return from(CURRENT); + } + + public DocumentWindowBuilder fromUnbounded() { + return from(UNBOUNDED); + } + + public DocumentWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit based on current document. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public DocumentWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. + * + * @param upper + * @return this. + */ + public DocumentWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + public DocumentWindowBuilder toCurrent() { + return to(CURRENT); + } + + public DocumentWindowBuilder toUnbounded() { + return to(UNBOUNDED); + } + + public DocumentWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + + return new DocumentWindow(lower, upper); + } + } + + /** + * Common base class for {@link Window} implementation. + * + * @author Christoph Strobl + */ + static abstract class WindowImpl implements Window { + + private final Object lower; + private final Object upper; + + protected WindowImpl(Object lower, Object upper) { + this.lower = lower; + this.upper = upper; + } + + @Override + public Object getLower() { + return lower; + } + + @Override + public Object getUpper() { + return upper; + } + } + + /** + * {@link Window} implementation based on the current document. + * + * @author Christoph Strobl + */ + public static class DocumentWindow extends WindowImpl { + + DocumentWindow(Object lower, Object upper) { + super(lower, upper); + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + return new Document("documents", Arrays.asList(getLower(), getUpper())); + } + } + + /** + * {@link Window} implementation based on the sort fields. + * + * @author Christoph Strobl + */ + public static class RangeWindow extends WindowImpl { + + private final WindowUnit unit; + + protected RangeWindow(Object lower, Object upper, WindowUnit unit) { + + super(lower, upper); + this.unit = unit; + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + + Document range = new Document("range", new Object[] { getLower(), getUpper() }); + if (unit != null && !WindowUnits.DEFAULT.equals(unit)) { + range.append("unit", unit.name().toLowerCase()); + } + return range; + } + } + + /** + * The actual time unit to apply to a {@link Window}. + */ + public interface WindowUnit { + + String name(); + + /** + * Converts the given time unit into a {@link WindowUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + return switch (timeUnit) { + case DAYS -> WindowUnits.DAY; + case HOURS -> WindowUnits.HOUR; + case MINUTES -> WindowUnits.MINUTE; + case SECONDS -> WindowUnits.SECOND; + case MILLISECONDS -> WindowUnits.MILLISECOND; + default -> throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", timeUnit)); + }; + + } + + /** + * Converts the given chrono unit into a {@link WindowUnit}. Supported units are: years, weeks, months, days, hours, + * minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(ChronoUnit chronoUnit) { + + return switch (chronoUnit) { + case YEARS -> WindowUnits.YEAR; + case WEEKS -> WindowUnits.WEEK; + case MONTHS -> WindowUnits.MONTH; + case DAYS -> WindowUnits.DAY; + case HOURS -> WindowUnits.HOUR; + case MINUTES -> WindowUnits.MINUTE; + case SECONDS -> WindowUnits.SECOND; + case MILLIS -> WindowUnits.MILLISECOND; + default -> throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", chronoUnit)); + }; + + } + } + + /** + * Quick access to available {@link WindowUnit units}. + */ + public enum WindowUnits implements WindowUnit { + DEFAULT, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND + } + + /** + * A fluent builder to create a {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ + public static class SetWindowFieldsOperationBuilder { + + private Object partitionBy; + private SortOperation sortOperation; + private WindowOutput output; + + /** + * Specify the field to group by. + * + * @param fieldName must not be {@literal null} or null. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByField(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be empty or null"); + return partitionBy(Fields.field("$" + fieldName, fieldName)); + } + + /** + * Specify the {@link AggregationExpression expression} to group by. + * + * @param expression must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByExpression(AggregationExpression expression) { + return partitionBy(expression); + } + + /** + * Sort {@link Sort.Direction#ASC ascending} by the given fields. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(String... fields) { + return sortBy(Sort.by(fields)); + } + + /** + * Set the sort order. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(Sort sort) { + return sortBy(new SortOperation(sort)); + } + + /** + * Set the {@link SortOperation} to use. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { + + Assert.notNull(sort, "SortOperation must not be null"); + + this.sortOperation = sort; + return this; + } + + /** + * Define the actual output computation. + * + * @param output must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder output(WindowOutput output) { + + Assert.notNull(output, "WindowOutput must not be null"); + + this.output = output; + return this; + } + + /** + * Add a field capturing the result of the given {@link AggregationExpression expression} to the output. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link WindowChoice}. + */ + public WindowChoice output(AggregationExpression expression) { + + return new WindowChoice() { + + @Nullable private Window window; + + @Override + public As within(Window window) { + + Assert.notNull(window, "Window must not be null"); + + this.window = window; + return this; + } + + @Override + public SetWindowFieldsOperationBuilder as(String targetFieldName) { + + Assert.hasText(targetFieldName, "Target field name must not be empty or null"); + + ComputedField computedField = new ComputedField(targetFieldName, expression, window); + + if (SetWindowFieldsOperationBuilder.this.output == null) { + SetWindowFieldsOperationBuilder.this.output = new WindowOutput(computedField); + } else { + SetWindowFieldsOperationBuilder.this.output.append(computedField); + } + + return SetWindowFieldsOperationBuilder.this; + } + }; + } + + /** + * Interface to capture field name used to capture the computation result. + */ + public interface As { + + /** + * Define the target name field name to hold the computation result. + * + * @param targetFieldName must not be {@literal null} or empty. + * @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance. + */ + SetWindowFieldsOperationBuilder as(String targetFieldName); + } + + /** + * Interface to capture an optional {@link Window} applicable to the field computation. + */ + public interface WindowChoice extends As { + + /** + * Specify calculation boundaries. + * + * @param window must not be {@literal null}. + * @return never {@literal null}. + */ + As within(Window window); + + } + + /** + * Partition by a value that translates to a valid mongodb expression. + * + * @param value must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionBy(Object value) { + + Assert.notNull(value, "Partition By must not be null"); + + partitionBy = value; + return this; + } + + /** + * Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments. + * + * @return new instance of {@link SetWindowFieldsOperation}. + */ + public SetWindowFieldsOperation build() { + return new SetWindowFieldsOperation(partitionBy, sortOperation, output); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java index f08542caa7..4d5de23087 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SkipOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,14 +21,15 @@ /** * Encapsulates the aggregation framework {@code $skip}-operation. *

          - * We recommend to use the static factory method {@link Aggregation#skip(int)} instead of creating instances of this + * We recommend to use the static factory method {@link Aggregation#skip(long)} instead of creating instances of this * class directly. * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl * @since 1.3 - * @see MongoDB Aggregation Framework: $skip + * @see MongoDB Aggregation Framework: + * $skip */ public class SkipOperation implements AggregationOperation { @@ -41,16 +42,17 @@ public class SkipOperation implements AggregationOperation { */ public SkipOperation(long skipCount) { - Assert.isTrue(skipCount >= 0, "Skip count must not be negative!"); + Assert.isTrue(skipCount >= 0, "Skip count must not be negative"); this.skipCount = skipCount; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$skip", skipCount); + return new Document(getOperator(), skipCount); + } + + @Override + public String getOperator() { + return "$skip"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java index 945b000156..ffc0aa0654 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,17 +21,16 @@ /** * Encapsulates the aggregation framework {@code $sortByCount}-operation. - *

          + *
          * {@code $sortByCount} stage is typically used with {@link Aggregation} and {@code $facet}. Groups incoming documents * based on the value of a specified expression and computes the count of documents in each distinct group. * {@link SortByCountOperation} is equivalent to {@code { $group: { _id: , count: { $sum: 1 } } }, { $sort: * { count: -1 } }}. - *

          + *
          * We recommend to use the static factory method {@link Aggregation#sortByCount(String)} instead of creating instances * of this class directly. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/ + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/ * @author Jérôme Guyon * @author Mark Paluch * @since 2.1 @@ -48,7 +47,7 @@ public class SortByCountOperation implements AggregationOperation { */ public SortByCountOperation(Field groupByField) { - Assert.notNull(groupByField, "Group by field must not be null!"); + Assert.notNull(groupByField, "Group by field must not be null"); this.groupByField = groupByField; this.groupByExpression = null; @@ -61,20 +60,21 @@ public SortByCountOperation(Field groupByField) { */ public SortByCountOperation(AggregationExpression groupByExpression) { - Assert.notNull(groupByExpression, "Group by expression must not be null!"); + Assert.notNull(groupByExpression, "Group by expression must not be null"); this.groupByExpression = groupByExpression; this.groupByField = null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$sortByCount", groupByExpression == null ? context.getReference(groupByField).toString() + return new Document(getOperator(), groupByExpression == null ? context.getReference(groupByField).toString() : groupByExpression.toDocument(context)); } + + @Override + public String getOperator() { + return "$sortByCount"; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java index 2c4e5d1a62..b8c6096f1e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SortOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,7 +33,8 @@ * @author Christoph Strobl * @author Mark Paluch * @since 1.3 - * @see MongoDB Aggregation Framework: $sort + * @see MongoDB Aggregation Framework: + * $sort */ public class SortOperation implements AggregationOperation { @@ -46,7 +47,7 @@ public class SortOperation implements AggregationOperation { */ public SortOperation(Sort sort) { - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(sort, "Sort must not be null"); this.sort = sort; } @@ -58,10 +59,6 @@ public SortOperation and(Sort sort) { return new SortOperation(this.sort.and(sort)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -74,6 +71,11 @@ public Document toDocument(AggregationOperationContext context) { object.put(reference.getRaw(), order.isAscending() ? 1 : -1); } - return new Document("$sort", object); + return new Document(getOperator(), object); + } + + @Override + public String getOperator() { + return "$sort"; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java index 6dd94f0e90..3119e2729c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; import org.bson.Document; @@ -66,19 +65,11 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer { * Creates a new {@link SpelExpressionTransformer}. */ SpelExpressionTransformer() { - - List> conversions = new ArrayList>(); - conversions.add(new OperatorNodeConversion(this)); - conversions.add(new LiteralNodeConversion(this)); - conversions.add(new IndexerNodeConversion(this)); - conversions.add(new InlineListNodeConversion(this)); - conversions.add(new PropertyOrFieldReferenceNodeConversion(this)); - conversions.add(new CompoundExpressionNodeConversion(this)); - conversions.add(new MethodReferenceNodeConversion(this)); - conversions.add(new NotOperatorNodeConversion(this)); - conversions.add(new ValueRetrievingNodeConversion(this)); - - this.conversions = Collections.unmodifiableList(conversions); + this.conversions = List.of(new OperatorNodeConversion(this), new LiteralNodeConversion(this), + new IndexerNodeConversion(this), new InlineListNodeConversion(this), + new PropertyOrFieldReferenceNodeConversion(this), new CompoundExpressionNodeConversion(this), + new MethodReferenceNodeConversion(this), new NotOperatorNodeConversion(this), + new ValueRetrievingNodeConversion(this)); } /** @@ -94,21 +85,17 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer { */ public Object transform(String expression, AggregationOperationContext context, Object... params) { - Assert.notNull(expression, "Expression must not be null!"); - Assert.notNull(context, "AggregationOperationContext must not be null!"); - Assert.notNull(params, "Parameters must not be null!"); + Assert.notNull(expression, "Expression must not be null"); + Assert.notNull(context, "AggregationOperationContext must not be null"); + Assert.notNull(params, "Parameters must not be null"); SpelExpression spelExpression = (SpelExpression) PARSER.parseExpression(expression); ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG); ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state); - return transform(new AggregationExpressionTransformationContext(node, null, null, context)); + return transform(new AggregationExpressionTransformationContext<>(node, null, null, context)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionTransformer#transform(org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport) - */ public Object transform(AggregationExpressionTransformationContext context) { return lookupConversionFor(context.getCurrentNode()).convert(context); } @@ -130,7 +117,7 @@ private ExpressionNodeConversion lookupConversionFor(ExpressionN } throw new IllegalArgumentException("Unsupported Element: " + node + " Type: " + node.getClass() - + " You probably have a syntax error in your SpEL expression!"); + + " You probably have a syntax error in your SpEL expression"); } /** @@ -153,7 +140,7 @@ private static abstract class ExpressionNodeConversion @SuppressWarnings("unchecked") public ExpressionNodeConversion(AggregationExpressionTransformer transformer) { - Assert.notNull(transformer, "Transformer must not be null!"); + Assert.notNull(transformer, "Transformer must not be null"); this.nodeType = (Class) GenericTypeResolver.resolveTypeArgument(this.getClass(), ExpressionNodeConversion.class); @@ -180,8 +167,8 @@ protected boolean supports(ExpressionNode node) { */ protected Object transform(ExpressionNode node, AggregationExpressionTransformationContext context) { - Assert.notNull(node, "ExpressionNode must not be null!"); - Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!"); + Assert.notNull(node, "ExpressionNode must not be null"); + Assert.notNull(context, "AggregationExpressionTransformationContext must not be null"); return transform(node, context.getParentNode(), null, context); } @@ -199,17 +186,13 @@ protected Object transform(ExpressionNode node, AggregationExpressionTransformat protected Object transform(ExpressionNode node, @Nullable ExpressionNode parent, @Nullable Document operation, AggregationExpressionTransformationContext context) { - Assert.notNull(node, "ExpressionNode must not be null!"); - Assert.notNull(context, "AggregationExpressionTransformationContext must not be null!"); + Assert.notNull(node, "ExpressionNode must not be null"); + Assert.notNull(context, "AggregationExpressionTransformationContext must not be null"); return transform(new AggregationExpressionTransformationContext(node, parent, operation, context.getAggregationContext())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#transform(org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext) - */ @Override public Object transform(AggregationExpressionTransformationContext context) { return transformer.transform(context); @@ -235,10 +218,6 @@ public OperatorNodeConversion(AggregationExpressionTransformer transformer) { super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { @@ -271,7 +250,7 @@ protected Object convert(AggregationExpressionTransformationContext context, OperatorNode currentNode) { - Document nextDocument = new Document(currentNode.getMongoOperator(), new ArrayList()); + Document nextDocument = new Document(currentNode.getMongoOperator(), new ArrayList<>()); if (!context.hasPreviousOperation()) { return nextDocument; @@ -294,7 +273,7 @@ private Object convertUnaryMinusOp(ExpressionTransformationContextSupport asList(Integer.valueOf(-1), leftResult)); + : new Document("$multiply", Arrays.asList(-1, leftResult)); if (leftResult != null && context.hasPreviousOperation()) { context.addToPreviousOperation(result); @@ -303,10 +282,6 @@ private Object convertUnaryMinusOp(ExpressionTransformationContextSupport context) { return context.addToPreviousOrReturn(context.getCurrentNode().getValue()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.NodeConversion#supports(org.springframework.data.mongodb.core.spel.ExpressionNode) - */ @Override protected boolean supports(ExpressionNode node) { return node.isOfType(Indexer.class); @@ -355,10 +322,6 @@ private static class InlineListNodeConversion extends ExpressionNodeConversion context) { @@ -373,10 +336,6 @@ protected Object convert(AggregationExpressionTransformationContext context) { @@ -406,10 +361,6 @@ protected Object convert(AggregationExpressionTransformationContext context) { @@ -453,10 +400,6 @@ protected Object convert(AggregationExpressionTransformationContext return value; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#supports(org.springframework.expression.spel.SpelNode) - */ @Override protected boolean supports(ExpressionNode node) { return node.isLiteral(); @@ -475,17 +418,13 @@ private static class MethodReferenceNodeConversion extends ExpressionNodeConvers super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { MethodReferenceNode node = context.getCurrentNode(); AggregationMethodReference methodReference = node.getMethodReference(); - Assert.state(methodReference != null, "Cannot resolve current node to AggregationMethodReference!"); + Assert.state(methodReference != null, "Cannot resolve current node to AggregationMethodReference"); Object args = null; @@ -500,9 +439,11 @@ protected Object convert(AggregationExpressionTransformationContext argList = new ArrayList(); + List argList = new ArrayList<>(); for (ExpressionNode childNode : node) { argList.add(transform(childNode, context)); @@ -527,10 +468,6 @@ private static class CompoundExpressionNodeConversion extends ExpressionNodeConv super(transformer); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.SpelNodeWrapper#convertSpelNodeToMongoObjectExpression(org.springframework.data.mongodb.core.aggregation.SpelExpressionTransformer.ExpressionConversionContext) - */ @Override protected Object convert(AggregationExpressionTransformationContext context) { @@ -544,10 +481,6 @@ protected Object convert(AggregationExpressionTransformationContext context) { NotOperatorNode node = context.getCurrentNode(); - List args = new ArrayList(); + List args = new ArrayList<>(); for (ExpressionNode childNode : node) { args.add(transform(childNode, context)); @@ -586,10 +515,6 @@ protected Object convert(AggregationExpressionTransformationContext context) { @@ -622,10 +543,6 @@ protected Object convert(AggregationExpressionTransformationContext + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link Trim}. + * @since 2.1 + */ + public Trim trim() { + return createTrim(); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the given + * character sequence from the beginning and end.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link Trim}. + * @since 2.1 + */ + public Trim trim(String chars) { + return trim().chars(chars); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the character + * sequence resulting from the given {@link AggregationExpression} from the beginning and end.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trim}. + * @since 2.1 + */ + public Trim trim(AggregationExpression expression) { + return trim().charsOf(expression); + } + + private Trim createTrim() { + return usesFieldRef() ? Trim.valueOf(fieldReference) : Trim.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims whitespaces + * from the beginning.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link LTrim}. + * @since 2.1 + */ + public LTrim ltrim() { + return createLTrim(); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the given + * character sequence from the beginning.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link LTrim}. + * @since 2.1 + */ + public LTrim ltrim(String chars) { + return ltrim().chars(chars); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the character + * sequence resulting from the given {@link AggregationExpression} from the beginning.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LTrim}. + * @since 2.1 + */ + public LTrim ltrim(AggregationExpression expression) { + return ltrim().charsOf(expression); + } + + private LTrim createLTrim() { + return usesFieldRef() ? LTrim.valueOf(fieldReference) : LTrim.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims whitespaces + * from the end.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RTrim}. + * @since 2.1 + */ + public RTrim rtrim() { + return createRTrim(); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the given + * character sequence from the end.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link RTrim}. + * @since 2.1 + */ + public RTrim rtrim(String chars) { + return rtrim().chars(chars); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and trims the character + * sequence resulting from the given {@link AggregationExpression} from the end.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RTrim}. + * @since 2.1 + */ + public RTrim rtrim(AggregationExpression expression) { + return rtrim().charsOf(expression); + } + + private RTrim createRTrim() { + return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find the document with the first match.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(String regex) { + return createRegexFind().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find the document with the first + * match.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(AggregationExpression expression) { + return createRegexFind().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the {@link Pattern} and applies the regular expression with + * the options specified in the argument to find the document with the first match. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(Pattern pattern) { + return createRegexFind().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find the document with the first match. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(String regex, String options) { + return createRegexFind().regex(regex).options(options); + } + + private RegexFind createRegexFind() { + return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find all the documents with the match.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(String regex) { + return createRegexFindAll().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find all the documents with the + * match..
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(AggregationExpression expression) { + return createRegexFindAll().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find all the documents with the match. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(Pattern pattern) { + return createRegexFindAll().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find all the documents with the match. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(String regex, String options) { + return createRegexFindAll().regex(regex).options(options); + } + + private RegexFindAll createRegexFindAll() { + return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find if a match is found or not.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(String regex) { + return createRegexMatch().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find if a match is found or not.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(AggregationExpression expression) { + return createRegexMatch().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find if a match is found or not. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(Pattern pattern) { + return createRegexMatch().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find if a match is found or not. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(String regex, String options) { + return createRegexMatch().regex(regex).options(options); + } + + private RegexMatch createRegexMatch() { + return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces the first + * occurrence of the search string with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceOne replaceOne(String search, String replacement) { + return createReplaceOne().find(search).replacement(replacement); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces the first + * occurrence of the search string computed by the given {@link AggregationExpression} with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceOne replaceOne(AggregationExpression search, String replacement) { + return createReplaceOne().findValueOf(search).replacement(replacement); + } + + private ReplaceOne createReplaceOne() { + return usesFieldRef() ? ReplaceOne.valueOf(fieldReference) : ReplaceOne.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces all + * occurrences of the search string with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceAll replaceAll(String search, String replacement) { + return createReplaceAll().find(search).replacement(replacement); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and replaces all + * occurrences of the search string computed by the given {@link AggregationExpression} with the given replacement. + * + * @param search + * @param replacement + * @return new instance of {@link ReplaceOne}. + * @since 3.4 + */ + public ReplaceAll replaceAll(AggregationExpression search, String replacement) { + return createReplaceAll().findValueOf(search).replacement(replacement); + } + + private ReplaceAll createReplaceAll() { + return usesFieldRef() ? ReplaceAll.valueOf(fieldReference) : ReplaceAll.valueOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -416,11 +772,11 @@ protected String getMongoMethod() { * Creates new {@link Concat}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Concat}. */ public static Concat valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Concat(asFields(fieldReference)); } @@ -428,11 +784,11 @@ public static Concat valueOf(String fieldReference) { * Creates new {@link Concat}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Concat}. */ public static Concat valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Concat(Collections.singletonList(expression)); } @@ -440,26 +796,44 @@ public static Concat valueOf(AggregationExpression expression) { * Creates new {@link Concat}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Concat}. */ public static Concat stringValue(String value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Concat(Collections.singletonList(value)); } + /** + * Concat the value of the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Concat}. + */ public Concat concatValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Concat(append(Fields.field(fieldReference))); } + /** + * Concat the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Concat}. + */ public Concat concatValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Concat(append(expression)); } + /** + * Concat the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Concat}. + */ public Concat concat(String value) { return new Concat(append(value)); } @@ -485,11 +859,11 @@ protected String getMongoMethod() { * Creates new {@link Substr}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Substr}. */ public static Substr valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Substr(asFields(fieldReference)); } @@ -497,18 +871,27 @@ public static Substr valueOf(String fieldReference) { * Creates new {@link Substr}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Substr}. */ public static Substr valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Substr(Collections.singletonList(expression)); } + /** + * @param start start index (including) + * @return new instance of {@link Substr}. + */ public Substr substring(int start) { return substring(start, -1); } + /** + * @param start start index (including) + * @param nrOfChars + * @return new instance of {@link Substr}. + */ public Substr substring(int start, int nrOfChars) { return new Substr(append(Arrays.asList(start, nrOfChars))); } @@ -538,7 +921,7 @@ protected String getMongoMethod() { */ public static ToLower lowerValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ToLower(Fields.field(fieldReference)); } @@ -550,7 +933,7 @@ public static ToLower lowerValueOf(String fieldReference) { */ public static ToLower lowerValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ToLower(Collections.singletonList(expression)); } @@ -562,7 +945,7 @@ public static ToLower lowerValueOf(AggregationExpression expression) { */ public static ToLower lower(String value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new ToLower(value); } } @@ -591,7 +974,7 @@ protected String getMongoMethod() { */ public static ToUpper upperValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ToUpper(Fields.field(fieldReference)); } @@ -603,7 +986,7 @@ public static ToUpper upperValueOf(String fieldReference) { */ public static ToUpper upperValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ToUpper(Collections.singletonList(expression)); } @@ -615,7 +998,7 @@ public static ToUpper upperValueOf(AggregationExpression expression) { */ public static ToUpper upper(String value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new ToUpper(value); } } @@ -644,7 +1027,7 @@ protected String getMongoMethod() { */ public static StrCaseCmp valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StrCaseCmp(asFields(fieldReference)); } @@ -652,11 +1035,11 @@ public static StrCaseCmp valueOf(String fieldReference) { * Creates new {@link StrCaseCmp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StrCaseCmp}. */ public static StrCaseCmp valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StrCaseCmp(Collections.singletonList(expression)); } @@ -664,11 +1047,11 @@ public static StrCaseCmp valueOf(AggregationExpression expression) { * Creates new {@link StrCaseCmp}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link StrCaseCmp}. */ public static StrCaseCmp stringValue(String value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new StrCaseCmp(Collections.singletonList(value)); } @@ -678,13 +1061,13 @@ public StrCaseCmp strcasecmp(String value) { public StrCaseCmp strcasecmpValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StrCaseCmp(append(Fields.field(fieldReference))); } public StrCaseCmp strcasecmpValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StrCaseCmp(append(expression)); } } @@ -709,11 +1092,11 @@ protected String getMongoMethod() { * Start creating a new {@link IndexOfBytes}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link SubstringBuilder}. */ public static SubstringBuilder valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new SubstringBuilder(Fields.field(fieldReference)); } @@ -721,11 +1104,11 @@ public static SubstringBuilder valueOf(String fieldReference) { * Start creating a new {@link IndexOfBytes}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SubstringBuilder}. */ public static SubstringBuilder valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SubstringBuilder(expression); } @@ -733,7 +1116,7 @@ public static SubstringBuilder valueOf(AggregationExpression expression) { * Optionally define the substring search start and end position. * * @param range must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfBytes}. */ public IndexOfBytes within(Range range) { return new IndexOfBytes(append(AggregationUtils.toRangeValues(range))); @@ -751,7 +1134,7 @@ private SubstringBuilder(Object stringExpression) { * Creates a new {@link IndexOfBytes} given {@literal substring}. * * @param substring must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfBytes}. */ public IndexOfBytes indexOf(String substring) { return new IndexOfBytes(Arrays.asList(stringExpression, substring)); @@ -761,7 +1144,7 @@ public IndexOfBytes indexOf(String substring) { * Creates a new {@link IndexOfBytes} given {@link AggregationExpression} that resolves to the substring. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfBytes}. */ public IndexOfBytes indexOf(AggregationExpression expression) { return new IndexOfBytes(Arrays.asList(stringExpression, expression)); @@ -771,7 +1154,7 @@ public IndexOfBytes indexOf(AggregationExpression expression) { * Creates a new {@link IndexOfBytes} given {@link Field} that resolves to the substring. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfBytes}. */ public IndexOfBytes indexOf(Field fieldReference) { return new IndexOfBytes(Arrays.asList(stringExpression, fieldReference)); @@ -799,11 +1182,11 @@ protected String getMongoMethod() { * Start creating a new {@link IndexOfCP}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfCP}. */ public static SubstringBuilder valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new SubstringBuilder(Fields.field(fieldReference)); } @@ -811,11 +1194,11 @@ public static SubstringBuilder valueOf(String fieldReference) { * Start creating a new {@link IndexOfCP}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfCP}. */ public static SubstringBuilder valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SubstringBuilder(expression); } @@ -823,7 +1206,7 @@ public static SubstringBuilder valueOf(AggregationExpression expression) { * Optionally define the substring search start and end position. * * @param range must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfCP}. */ public IndexOfCP within(Range range) { return new IndexOfCP(append(AggregationUtils.toRangeValues(range))); @@ -841,7 +1224,7 @@ private SubstringBuilder(Object stringExpression) { * Creates a new {@link IndexOfCP} given {@literal substring}. * * @param substring must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfCP}. */ public IndexOfCP indexOf(String substring) { return new IndexOfCP(Arrays.asList(stringExpression, substring)); @@ -851,7 +1234,7 @@ public IndexOfCP indexOf(String substring) { * Creates a new {@link IndexOfCP} given {@link AggregationExpression} that resolves to the substring. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfCP}. */ public IndexOfCP indexOf(AggregationExpression expression) { return new IndexOfCP(Arrays.asList(stringExpression, expression)); @@ -889,11 +1272,11 @@ protected String getMongoMethod() { * Start creating a new {@link Split}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Split}. */ public static Split valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Split(asFields(fieldReference)); } @@ -901,11 +1284,11 @@ public static Split valueOf(String fieldReference) { * Start creating a new {@link Split}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Split}. */ public static Split valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Split(Collections.singletonList(expression)); } @@ -913,11 +1296,11 @@ public static Split valueOf(AggregationExpression expression) { * Use given {@link String} as delimiter. * * @param delimiter must not be {@literal null}. - * @return + * @return new instance of {@link Split}. */ public Split split(String delimiter) { - Assert.notNull(delimiter, "Delimiter must not be null!"); + Assert.notNull(delimiter, "Delimiter must not be null"); return new Split(append(delimiter)); } @@ -925,11 +1308,11 @@ public Split split(String delimiter) { * Use value of referenced field as delimiter. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Split}. */ public Split split(Field fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Split(append(fieldReference)); } @@ -937,11 +1320,11 @@ public Split split(Field fieldReference) { * Use value resulting from {@link AggregationExpression} as delimiter. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Split}. */ public Split split(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Split(append(expression)); } } @@ -966,7 +1349,7 @@ protected String getMongoMethod() { * Creates new {@link StrLenBytes}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StrLenBytes}. */ public static StrLenBytes stringLengthOf(String fieldReference) { return new StrLenBytes(Fields.field(fieldReference)); @@ -976,11 +1359,11 @@ public static StrLenBytes stringLengthOf(String fieldReference) { * Creates new {@link StrLenBytes}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StrLenBytes}. */ public static StrLenBytes stringLengthOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StrLenBytes(expression); } } @@ -1005,7 +1388,7 @@ protected String getMongoMethod() { * Creates new {@link StrLenCP}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StrLenCP}. */ public static StrLenCP stringLengthOfCP(String fieldReference) { return new StrLenCP(Fields.field(fieldReference)); @@ -1015,11 +1398,11 @@ public static StrLenCP stringLengthOfCP(String fieldReference) { * Creates new {@link StrLenCP}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StrLenCP}. */ public static StrLenCP stringLengthOfCP(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StrLenCP(expression); } } @@ -1044,11 +1427,11 @@ protected String getMongoMethod() { * Creates new {@link SubstrCP}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link SubstrCP}. */ public static SubstrCP valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new SubstrCP(asFields(fieldReference)); } @@ -1056,11 +1439,11 @@ public static SubstrCP valueOf(String fieldReference) { * Creates new {@link SubstrCP}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link SubstrCP}. */ public static SubstrCP valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new SubstrCP(Collections.singletonList(expression)); } @@ -1072,4 +1455,973 @@ public SubstrCP substringCP(int start, int nrOfChars) { return new SubstrCP(append(Arrays.asList(start, nrOfChars))); } } + + /** + * {@link AggregationExpression} for {@code $trim} which removes whitespace or the specified characters from the + * beginning and end of a string.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class Trim extends AbstractAggregationExpression { + + private Trim(Object value) { + super(value); + } + + /** + * Creates new {@link Trim} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public static Trim valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Trim(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Trim} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public static Trim valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Trim(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the character(s) to trim from the beginning. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public Trim chars(String chars) { + + Assert.notNull(chars, "Chars must not be null"); + return new Trim(append("chars", chars)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the character values to trim from the + * beginning. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public Trim charsOf(String fieldReference) { + return new Trim(append("chars", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the character sequence to trim from the + * beginning. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Trim}. + */ + public Trim charsOf(AggregationExpression expression) { + return new Trim(append("chars", expression)); + } + + /** + * Remove whitespace or the specified characters from the beginning of a string.
          + * + * @return new instance of {@link LTrim}. + */ + public LTrim left() { + return new LTrim(argumentMap()); + } + + /** + * Remove whitespace or the specified characters from the end of a string.
          + * + * @return new instance of {@link RTrim}. + */ + public RTrim right() { + return new RTrim(argumentMap()); + } + + @Override + protected String getMongoMethod() { + return "$trim"; + } + } + + /** + * {@link AggregationExpression} for {@code $ltrim} which removes whitespace or the specified characters from the + * beginning of a string.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class LTrim extends AbstractAggregationExpression { + + private LTrim(Object value) { + super(value); + } + + /** + * Creates new {@link LTrim} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public static LTrim valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new LTrim(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link LTrim} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public static LTrim valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new LTrim(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the character(s) to trim from the beginning. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public LTrim chars(String chars) { + + Assert.notNull(chars, "Chars must not be null"); + return new LTrim(append("chars", chars)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the character values to trim from the + * beginning. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public LTrim charsOf(String fieldReference) { + return new LTrim(append("chars", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the character sequence to trim from the + * beginning. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link LTrim}. + */ + public LTrim charsOf(AggregationExpression expression) { + return new LTrim(append("chars", expression)); + } + + @Override + protected String getMongoMethod() { + return "$ltrim"; + } + } + + /** + * {@link AggregationExpression} for {@code $rtrim} which removes whitespace or the specified characters from the end + * of a string.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @since 2.1 + */ + public static class RTrim extends AbstractAggregationExpression { + + private RTrim(Object value) { + super(value); + } + + /** + * Creates new {@link RTrim} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public static RTrim valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new RTrim(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RTrim} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public static RTrim valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new RTrim(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the character(s) to trim from the end. + * + * @param chars must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public RTrim chars(String chars) { + + Assert.notNull(chars, "Chars must not be null"); + return new RTrim(append("chars", chars)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the character values to trim from the end. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public RTrim charsOf(String fieldReference) { + return new RTrim(append("chars", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the character sequence to trim from the end. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RTrim}. + */ + public RTrim charsOf(AggregationExpression expression) { + return new RTrim(append("chars", expression)); + } + + @Override + protected String getMongoMethod() { + return "$rtrim"; + } + } + + /** + * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and + * returns information on the first matched substring.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class RegexFind extends AbstractAggregationExpression { + + protected RegexFind(Object value) { + super(value); + } + + /** + * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFind(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind options(String options) { + + Assert.notNull(options, "Options must not be null"); + + return new RegexFind(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexFind(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFind(append("options", expression)); + } + + /** + * Specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regex(String regex) { + + Assert.notNull(regex, "Regex must not be null"); + + return new RegexFind(append("regex", regex)); + } + + /** + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFind(regex); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new RegexFind(append("regex", Fields.field(fieldReference))); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFind(append("regex", expression)); + } + + @Override + protected String getMongoMethod() { + return "$regexFind"; + } + } + + /** + * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and + * returns information on all the matched substrings.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class RegexFindAll extends AbstractAggregationExpression { + + protected RegexFindAll(Object value) { + super(value); + } + + /** + * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as + * {@literal input} value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFindAll(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll options(String options) { + + Assert.notNull(options, "Options must not be null"); + + return new RegexFindAll(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new RegexFindAll(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFindAll(append("options", expression)); + } + + /** + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFindAll(regex); + } + + /** + * Specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regex(String regex) { + + Assert.notNull(regex, "Regex must not be null"); + + return new RegexFindAll(append("regex", regex)); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new RegexFindAll(append("regex", Fields.field(fieldReference))); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexFindAll(append("regex", expression)); + } + + @Override + protected String getMongoMethod() { + return "$regexFindAll"; + } + } + + /** + * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and + * returns a boolean that indicates if a match is found or not.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class RegexMatch extends AbstractAggregationExpression { + + protected RegexMatch(Object value) { + super(value); + } + + /** + * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexMatch(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch options(String options) { + + Assert.notNull(options, "Options must not be null"); + + return new RegexMatch(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexMatch(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexMatch(append("options", expression)); + } + + /** + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexMatch(regex); + } + + /** + * Specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regex(String regex) { + + Assert.notNull(regex, "Regex must not be null"); + + return new RegexMatch(append("regex", regex)); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new RegexMatch(append("regex", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new RegexMatch(append("regex", expression)); + } + + @Override + protected String getMongoMethod() { + return "$regexMatch"; + } + } + + /** + * {@link AggregationExpression} for {@code $replaceOne} which replaces the first instance of a search string in an + * input string with a replacement string.
          + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class ReplaceOne extends AbstractAggregationExpression { + + protected ReplaceOne(Object value) { + super(value); + } + + /** + * Creates new {@link ReplaceOne} using the given as {@literal input}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceOne value(String value) { + + Assert.notNull(value, "Value must not be null"); + + return new ReplaceOne(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link ReplaceOne} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceOne valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceOne(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link ReplaceOne} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceOne valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceOne(Collections.singletonMap("input", expression)); + } + + /** + * The string to use to replace the first matched instance of {@code find} in input. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne replacement(String replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ReplaceOne(append("replacement", replacement)); + } + + /** + * Specifies the reference to the {@link Field field} holding the string to use to replace the first matched + * instance of {@code find} in input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne replacementOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceOne(append("replacement", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression} evaluating to the string to use to replace the first matched instance + * of {@code find} in {@code input}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne replacementOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceOne(append("replacement", expression)); + } + + /** + * The string to search for within the given input field. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne find(String value) { + + Assert.notNull(value, "Search string must not be null"); + + return new ReplaceOne(append("find", value)); + } + + /** + * Specify the reference to the {@link Field field} holding the string to search for within the given input field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne findValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new ReplaceOne(append("find", fieldReference)); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the the string to search for within the given input + * field. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public ReplaceOne findValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceOne(append("find", expression)); + } + + @Override + protected String getMongoMethod() { + return "$replaceOne"; + } + } + + /** + * {@link AggregationExpression} for {@code $replaceAll} which replaces all instances of a search string in an input + * string with a replacement string.
          + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class ReplaceAll extends AbstractAggregationExpression { + + protected ReplaceAll(Object value) { + super(value); + } + + /** + * Creates new {@link ReplaceAll} using the given as {@literal input}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceOne}. + */ + public static ReplaceAll value(String value) { + + Assert.notNull(value, "Value must not be null"); + + return new ReplaceAll(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link ReplaceAll} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public static ReplaceAll valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceAll(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link ReplaceAll} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public static ReplaceAll valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceAll(Collections.singletonMap("input", expression)); + } + + /** + * The string to use to replace the first matched instance of {@code find} in input. + * + * @param replacement must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll replacement(String replacement) { + + Assert.notNull(replacement, "Replacement must not be null"); + + return new ReplaceAll(append("replacement", replacement)); + } + + /** + * Specifies the reference to the {@link Field field} holding the string to use to replace the first matched + * instance of {@code find} in input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll replacementValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + return new ReplaceAll(append("replacement", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression} evaluating to the string to use to replace the first matched instance + * of {@code find} in input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll replacementValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceAll(append("replacement", expression)); + } + + /** + * The string to search for within the given input field. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll find(String value) { + + Assert.notNull(value, "Search string must not be null"); + + return new ReplaceAll(append("find", value)); + } + + /** + * Specify the reference to the {@link Field field} holding the string to search for within the given input field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll findValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + + return new ReplaceAll(append("find", fieldReference)); + } + + /** + * Specify the {@link AggregationExpression} evaluating to the string to search for within the given input field. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ReplaceAll}. + */ + public ReplaceAll findValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + return new ReplaceAll(append("find", expression)); + } + + @Override + protected String getMongoMethod() { + return "$replaceAll"; + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SystemVariable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SystemVariable.java new file mode 100644 index 0000000000..1fcf87d2a0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SystemVariable.java @@ -0,0 +1,129 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.lang.Nullable; + +/** + * Describes the system variables available in MongoDB aggregation framework pipeline expressions. + * + * @author Thomas Darimont + * @author Christoph Strobl + * @see Aggregation Variables. + */ +public enum SystemVariable implements AggregationVariable { + + /** + * Variable for the current datetime. + * + * @since 4.0 + */ + NOW, + + /** + * Variable for the current timestamp. + * + * @since 4.0 + */ + CLUSTER_TIME, + + /** + * Variable that references the root document. + */ + ROOT, + + /** + * Variable that references the start of the field path being processed. + */ + CURRENT, + + /** + * Variable that evaluates to a missing value. + */ + REMOVE, + + /** + * One of the allowed results of a {@literal $redact} expression + * + * @since 4.0 + */ + DESCEND, + + /** + * One of the allowed results of a {@literal $redact} expression + * + * @since 4.0 + */ + PRUNE, + /** + * One of the allowed results of a {@literal $redact} expression + * + * @since 4.0 + */ + KEEP, + + /** + * A variable that stores the metadata results of an Atlas Search query. + * + * @since 4.0 + */ + SEARCH_META; + + /** + * Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false} + * otherwise. + * + * @param fieldRef may be {@literal null}. + * @return {@literal true} if the given field refers to a {@link SystemVariable}. + */ + public static boolean isReferingToSystemVariable(@Nullable String fieldRef) { + + String candidate = variableNameFrom(fieldRef); + if (candidate == null) { + return false; + } + + candidate = candidate.startsWith(PREFIX) ? candidate.substring(2) : candidate; + for (SystemVariable value : values()) { + if (value.name().equals(candidate)) { + return true; + } + } + + return false; + } + + @Override + public String toString() { + return PREFIX.concat(name()); + } + + @Override + public String getTarget() { + return toString(); + } + + @Nullable + static String variableNameFrom(@Nullable String fieldRef) { + + if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) { + return null; + } + + int indexOfFirstDot = fieldRef.indexOf('.'); + return indexOfFirstDot == -1 ? fieldRef : fieldRef.substring(2, indexOfFirstDot); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java index be4d32bfe1..f30ebf394b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,23 @@ import static org.springframework.data.mongodb.core.aggregation.Fields.*; +import java.util.ArrayList; +import java.util.List; + import org.bson.Document; -import org.springframework.data.mapping.PropertyPath; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.PersistentPropertyPath; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; -import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -43,6 +50,8 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio private final Class type; private final MappingContext, MongoPersistentProperty> mappingContext; private final QueryMapper mapper; + private final Lazy> entity; + private final FieldLookupPolicy lookupPolicy; /** * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and @@ -54,52 +63,139 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio */ public TypeBasedAggregationOperationContext(Class type, MappingContext, MongoPersistentProperty> mappingContext, QueryMapper mapper) { + this(type, mappingContext, mapper, FieldLookupPolicy.strict()); + } + + /** + * Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and + * {@link QueryMapper}. + * + * @param type must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + * @param mapper must not be {@literal null}. + * @param lookupPolicy must not be {@literal null}. + * @since 4.3.1 + */ + public TypeBasedAggregationOperationContext(Class type, + MappingContext, MongoPersistentProperty> mappingContext, QueryMapper mapper, + FieldLookupPolicy lookupPolicy) { - Assert.notNull(type, "Type must not be null!"); - Assert.notNull(mappingContext, "MappingContext must not be null!"); - Assert.notNull(mapper, "QueryMapper must not be null!"); + Assert.notNull(type, "Type must not be null"); + Assert.notNull(mappingContext, "MappingContext must not be null"); + Assert.notNull(mapper, "QueryMapper must not be null"); + Assert.notNull(lookupPolicy, "FieldLookupPolicy must not be null"); this.type = type; this.mappingContext = mappingContext; this.mapper = mapper; + this.entity = Lazy.of(() -> mappingContext.getPersistentEntity(type)); + this.lookupPolicy = lookupPolicy; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document) - */ @Override public Document getMappedObject(Document document) { - return mapper.getMappedObject(document, mappingContext.getPersistentEntity(type)); + return getMappedObject(document, type); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field) - */ @Override - public FieldReference getReference(Field field) { + public Document getMappedObject(Document document, @Nullable Class type) { + return mapper.getMappedObject(document, type != null ? mappingContext.getPersistentEntity(type) : null); + } - PropertyPath.from(field.getTarget(), type); + @Override + public FieldReference getReference(Field field) { return getReferenceFor(field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return getReferenceFor(field(name)); } - private FieldReference getReferenceFor(Field field) { + @Override + public Fields getFields(Class type) { + + Assert.notNull(type, "Type must not be null"); + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(type); + + if (entity == null) { + return AggregationOperationContext.super.getFields(type); + } + + List fields = new ArrayList<>(); + + for (MongoPersistentProperty property : entity) { + fields.add(Fields.field(property.getName(), property.getFieldName())); + } + + return Fields.from(fields.toArray(new Field[0])); + } + + @Override + @Deprecated(since = "4.3.1", forRemoval = true) + public AggregationOperationContext continueOnMissingFieldReference() { + return continueOnMissingFieldReference(type); + } + + /** + * This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for + * its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that + * are not present in one of the previous stages or the input source, throughout the pipeline. + * + * @param type The domain type to map fields to. + * @return a more relaxed {@link AggregationOperationContext}. + * @since 3.1 + * @see RelaxedTypeBasedAggregationOperationContext + */ + public AggregationOperationContext continueOnMissingFieldReference(Class type) { + return new TypeBasedAggregationOperationContext(type, mappingContext, mapper, FieldLookupPolicy.relaxed()); + } + + @Override + public AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + + @Override + public AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, lookupPolicy); + } + + protected FieldReference getReferenceFor(Field field) { + + try { + return doGetFieldReference(field); + } catch (MappingException e) { + + if (lookupPolicy.isStrict()) { + throw e; + } + + return new DirectFieldReference(new ExposedField(field, true)); + } + } - PersistentPropertyPath propertyPath = mappingContext.getPersistentPropertyPath( - field.getTarget(), type); + private DirectFieldReference doGetFieldReference(Field field) { + + if (entity.getNullable() == null || AggregationVariable.isVariable(field)) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + PersistentPropertyPath propertyPath = mappingContext + .getPersistentPropertyPath(field.getTarget(), type); Field mappedField = field(field.getName(), propertyPath.toDotPath(MongoPersistentProperty.PropertyToFieldNameConverter.INSTANCE)); return new DirectFieldReference(new ExposedField(mappedField, true)); } + + public Class getType() { + return type; + } + + @Override + public CodecRegistry getCodecRegistry() { + return this.mapper.getConverter().getCodecRegistry(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java index 73ea879de0..432a0c6c6b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/TypedAggregation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -61,7 +61,7 @@ public TypedAggregation(Class inputType, List operation super(operations, options); - Assert.notNull(inputType, "Input type must not be null!"); + Assert.notNull(inputType, "Input type must not be null"); this.inputType = inputType; } @@ -74,13 +74,9 @@ public Class getInputType() { return inputType; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Aggregation#withOptions(org.springframework.data.mongodb.core.aggregation.AggregationOptions) - */ public TypedAggregation withOptions(AggregationOptions options) { - Assert.notNull(options, "AggregationOptions must not be null."); - return new TypedAggregation(inputType, operations, options); + Assert.notNull(options, "AggregationOptions must not be null"); + return new TypedAggregation(inputType, pipeline.getOperations(), options); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperation.java new file mode 100644 index 0000000000..057ada12d5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperation.java @@ -0,0 +1,156 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * The $unionWith aggregation + * stage (available since MongoDB 4.4) performs a union of two collections by combining pipeline results, potentially + * containing duplicates, into a single result set that is handed over to the next stage.
          + * In order to remove duplicates it is possible to append a {@link GroupOperation} right after + * {@link UnionWithOperation}. + *
          + * If the {@link UnionWithOperation} uses a + * pipeline + * to process documents, field names within the pipeline will be treated as is. In order to map domain type property + * names to actual field names (considering potential {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations) make sure the enclosing aggregation is a {@link TypedAggregation} and provide the target type for the + * {@code $unionWith} stage via {@link #mapFieldsTo(Class)}. + * + * @author Christoph Strobl + * @see Aggregation Pipeline Stage: + * $unionWith + * @since 3.1 + */ +public class UnionWithOperation implements AggregationOperation { + + private final String collection; + + private final @Nullable AggregationPipeline pipeline; + + private final @Nullable Class domainType; + + public UnionWithOperation(String collection, @Nullable AggregationPipeline pipeline, @Nullable Class domainType) { + + Assert.notNull(collection, "Collection must not be null"); + + this.collection = collection; + this.pipeline = pipeline; + this.domainType = domainType; + } + + /** + * Set the name of the collection from which pipeline results should be included in the result set.
          + * The collection name is used to set the {@code coll} parameter of {@code $unionWith}. + * + * @param collection the MongoDB collection name. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public static UnionWithOperation unionWith(String collection) { + return new UnionWithOperation(collection, null, null); + } + + /** + * Set the {@link AggregationPipeline} to apply to the specified collection. The pipeline corresponds to the optional + * {@code pipeline} field of the {@code $unionWith} aggregation stage and is used to compute the documents going into + * the result set. + * + * @param pipeline the {@link AggregationPipeline} that computes the documents. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation pipeline(AggregationPipeline pipeline) { + return new UnionWithOperation(collection, pipeline, domainType); + } + + /** + * Set the aggregation pipeline stages to apply to the specified collection. The pipeline corresponds to the optional + * {@code pipeline} field of the {@code $unionWith} aggregation stage and is used to compute the documents going into + * the result set. + * + * @param aggregationStages the aggregation pipeline stages that compute the documents. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation pipeline(List aggregationStages) { + return new UnionWithOperation(collection, new AggregationPipeline(aggregationStages), domainType); + } + + /** + * Set the aggregation pipeline stages to apply to the specified collection. The pipeline corresponds to the optional + * {@code pipeline} field of the {@code $unionWith} aggregation stage and is used to compute the documents going into + * the result set. + * + * @param aggregationStages the aggregation pipeline stages that compute the documents. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation pipeline(AggregationOperation... aggregationStages) { + return new UnionWithOperation(collection, new AggregationPipeline(Arrays.asList(aggregationStages)), domainType); + } + + /** + * Set domain type used for field name mapping of property references used by the {@link AggregationPipeline}. + * Remember to also use a {@link TypedAggregation} in the outer pipeline.
          + * If not set, field names used within {@link AggregationOperation pipeline operations} are taken as is. + * + * @param domainType the domain type to map field names used in pipeline operations to. Must not be {@literal null}. + * @return new instance of {@link UnionWithOperation}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + */ + public UnionWithOperation mapFieldsTo(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + return new UnionWithOperation(collection, pipeline, domainType); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $unionWith = new Document("coll", collection); + if (pipeline == null || pipeline.isEmpty()) { + return new Document(getOperator(), $unionWith); + } + + $unionWith.append("pipeline", pipeline.toDocuments(computeContext(context))); + return new Document(getOperator(), $unionWith); + } + + private AggregationOperationContext computeContext(AggregationOperationContext source) { + + if (source instanceof TypeBasedAggregationOperationContext aggregationOperationContext) { + return aggregationOperationContext.continueOnMissingFieldReference(domainType != null ? domainType : Object.class); + } + + if (source instanceof ExposedFieldsAggregationOperationContext aggregationOperationContext) { + return computeContext(aggregationOperationContext.getRootContext()); + } + + return source; + } + + @Override + public String getOperator() { + return "$unionWith"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnsetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnsetOperation.java new file mode 100644 index 0000000000..ff765c37f7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnsetOperation.java @@ -0,0 +1,141 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * Removes fields from documents. + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoDB Aggregation Framework: + * $unset + */ +public class UnsetOperation implements InheritsFieldsAggregationOperation { + + private final Collection fields; + + /** + * Create new instance of {@link UnsetOperation}. + * + * @param fields must not be {@literal null}. + */ + public UnsetOperation(Collection fields) { + + Assert.notNull(fields, "Fields must not be null"); + Assert.noNullElements(fields, "Fields must not contain null values"); + + this.fields = fields; + } + + /** + * Create new instance of {@link UnsetOperation}. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link UnsetOperation}. + */ + public static UnsetOperation unset(String... fields) { + return new UnsetOperation(Arrays.asList(fields)); + } + + /** + * Also unset the given fields. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link UnsetOperation}. + */ + public UnsetOperation and(String... fields) { + + List target = new ArrayList<>(this.fields); + CollectionUtils.mergeArrayIntoCollection(fields, target); + return new UnsetOperation(target); + } + + /** + * Also unset the given fields. + * + * @param fields must not be {@literal null}. + * @return new instance of {@link UnsetOperation}. + */ + public UnsetOperation and(Field... fields) { + + List target = new ArrayList<>(this.fields); + CollectionUtils.mergeArrayIntoCollection(fields, target); + return new UnsetOperation(target); + } + + @Override + public ExposedFields getFields() { + return ExposedFields.from(); + } + + Collection removedFieldNames() { + + List fieldNames = new ArrayList<>(fields.size()); + for (Object it : fields) { + if (it instanceof Field field) { + fieldNames.add(field.getName()); + } else { + fieldNames.add(it.toString()); + } + } + return fieldNames; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (fields.size() == 1) { + return new Document(getOperator(), computeFieldName(fields.iterator().next(), context)); + } + + return new Document(getOperator(), + fields.stream().map(it -> computeFieldName(it, context)).collect(Collectors.toList())); + } + + @Override + public String getOperator() { + return "$unset"; + } + + private Object computeFieldName(Object field, AggregationOperationContext context) { + + if (field instanceof Field fieldObject) { + return context.getReference(fieldObject).getRaw(); + } + + if (field instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } + + if (field instanceof String stringValue) { + return context.getReference(stringValue).getRaw(); + } + + return field; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java index 4de6441ed1..d59ae01b12 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/UnwindOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -31,7 +31,8 @@ * @author Mark Paluch * @author Christoph Strobl * @since 1.3 - * @see MongoDB Aggregation Framework: $unwind + * @see MongoDB Aggregation Framework: + * $unwind */ public class UnwindOperation implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation { @@ -58,7 +59,7 @@ public UnwindOperation(Field field) { * @since 1.10 */ public UnwindOperation(Field field, boolean preserveNullAndEmptyArrays) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); this.arrayIndex = null; @@ -76,25 +77,21 @@ public UnwindOperation(Field field, boolean preserveNullAndEmptyArrays) { */ public UnwindOperation(Field field, Field arrayIndex, boolean preserveNullAndEmptyArrays) { - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(arrayIndex, "ArrayIndex must not be null!"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(arrayIndex, "ArrayIndex must not be null"); this.field = new ExposedField(field, true); this.arrayIndex = new ExposedField(arrayIndex, true); this.preserveNullAndEmptyArrays = preserveNullAndEmptyArrays; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { String path = context.getReference(field).toString(); if (!preserveNullAndEmptyArrays && arrayIndex == null) { - return new Document("$unwind", path); + return new Document(getOperator(), path); } Document unwindArgs = new Document(); @@ -104,13 +101,14 @@ public Document toDocument(AggregationOperationContext context) { } unwindArgs.put("preserveNullAndEmptyArrays", preserveNullAndEmptyArrays); - return new Document("$unwind", unwindArgs); + return new Document(getOperator(), unwindArgs); + } + + @Override + public String getOperator() { + return "$unwind"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return arrayIndex != null ? ExposedFields.from(arrayIndex) : ExposedFields.from(); @@ -130,7 +128,7 @@ public static PathBuilder newUnwind() { * @author Mark Paluch * @since 1.10 */ - public static interface PathBuilder { + public interface PathBuilder { /** * @param path the path to unwind, must not be {@literal null} or empty. @@ -143,7 +141,7 @@ public static interface PathBuilder { * @author Mark Paluch * @since 1.10 */ - public static interface IndexBuilder { + public interface IndexBuilder { /** * Exposes the array index as {@code field}. @@ -161,7 +159,7 @@ public static interface IndexBuilder { EmptyArraysBuilder noArrayIndex(); } - public static interface EmptyArraysBuilder { + public interface EmptyArraysBuilder { /** * Output documents if the array is null or empty. @@ -200,10 +198,6 @@ public static PathBuilder newBuilder() { return new UnwindOperationBuilder(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.UnwindOperation.EmptyArraysBuilder#preserveNullAndEmptyArrays() - */ @Override public UnwindOperation preserveNullAndEmptyArrays() { @@ -214,10 +208,6 @@ public UnwindOperation preserveNullAndEmptyArrays() { return new UnwindOperation(field, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.UnwindOperation.EmptyArraysBuilder#skipNullAndEmptyArrays() - */ @Override public UnwindOperation skipNullAndEmptyArrays() { @@ -228,22 +218,14 @@ public UnwindOperation skipNullAndEmptyArrays() { return new UnwindOperation(field, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.UnwindOperation.IndexBuilder#arrayIndex(java.lang.String) - */ @Override public EmptyArraysBuilder arrayIndex(String field) { - Assert.hasText(field, "'ArrayIndex' must not be null or empty!"); + Assert.hasText(field, "'ArrayIndex' must not be null or empty"); arrayIndex = Fields.field(field); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.UnwindOperation.IndexBuilder#noArrayIndex() - */ @Override public EmptyArraysBuilder noArrayIndex() { @@ -251,14 +233,10 @@ public EmptyArraysBuilder noArrayIndex() { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.UnwindOperation.PathBuilder#path(java.lang.String) - */ @Override public UnwindOperationBuilder path(String path) { - Assert.hasText(path, "'Path' must not be null or empty!"); + Assert.hasText(path, "'Path' must not be null or empty"); field = Fields.field(path); return this; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java index 14bd9f213c..8e676c72bc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VariableOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,9 +16,10 @@ package org.springframework.data.mongodb.core.aggregation; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.stream.Collectors; import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; @@ -89,9 +90,9 @@ public static class Map implements AggregationExpression { private Map(Object sourceArray, String itemVariableName, AggregationExpression functionToApply) { - Assert.notNull(sourceArray, "SourceArray must not be null!"); - Assert.notNull(itemVariableName, "ItemVariableName must not be null!"); - Assert.notNull(functionToApply, "FunctionToApply must not be null!"); + Assert.notNull(sourceArray, "SourceArray must not be null"); + Assert.notNull(itemVariableName, "ItemVariableName must not be null"); + Assert.notNull(functionToApply, "FunctionToApply must not be null"); this.sourceArray = sourceArray; this.itemVariableName = itemVariableName; @@ -107,21 +108,21 @@ private Map(Object sourceArray, String itemVariableName, AggregationExpression f */ public static AsBuilder itemsOf(final String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new AsBuilder() { @Override public FunctionBuilder as(final String variableName) { - Assert.notNull(variableName, "VariableName must not be null!"); + Assert.notNull(variableName, "VariableName must not be null"); return new FunctionBuilder() { @Override public Map andApply(final AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); return new Map(Fields.field(fieldReference), variableName, expression); } }; @@ -139,21 +140,21 @@ public Map andApply(final AggregationExpression expression) { */ public static AsBuilder itemsOf(final AggregationExpression source) { - Assert.notNull(source, "AggregationExpression must not be null!"); + Assert.notNull(source, "AggregationExpression must not be null"); return new AsBuilder() { @Override public FunctionBuilder as(final String variableName) { - Assert.notNull(variableName, "VariableName must not be null!"); + Assert.notNull(variableName, "VariableName must not be null"); return new FunctionBuilder() { @Override public Map andApply(final AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); return new Map(source, variableName, expression); } }; @@ -161,9 +162,6 @@ public Map andApply(final AggregationExpression expression) { }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(final AggregationOperationContext context) { return toMap(ExposedFields.synthetic(Fields.fields(itemVariableName)), context); @@ -172,12 +170,11 @@ public Document toDocument(final AggregationOperationContext context) { private Document toMap(ExposedFields exposedFields, AggregationOperationContext context) { Document map = new Document(); - InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext( - exposedFields, context); + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); Document input; - if (sourceArray instanceof Field) { - input = new Document("input", context.getReference((Field) sourceArray).toString()); + if (sourceArray instanceof Field field) { + input = new Document("input", context.getReference(field).toString()); } else { input = new Document("input", ((AggregationExpression) sourceArray).toDocument(context)); } @@ -185,7 +182,8 @@ private Document toMap(ExposedFields exposedFields, AggregationOperationContext map.putAll(context.getMappedObject(input)); map.put("as", itemVariableName); map.put("in", - functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext))); + functionToApply.toDocument(new NestedDelegatingExpressionAggregationOperationContext(operationContext, + Collections.singleton(Fields.field(itemVariableName))))); return new Document("$map", map); } @@ -224,30 +222,43 @@ public interface FunctionBuilder { public static class Let implements AggregationExpression { private final List vars; + + @Nullable // private final AggregationExpression expression; - private Let(List vars, AggregationExpression expression) { + private Let(List vars, @Nullable AggregationExpression expression) { this.vars = vars; this.expression = expression; } + /** + * Create a new {@link Let} holding just the given {@literal variables}. + * + * @param variables must not be {@literal null}. + * @return new instance of {@link Let}. + * @since 4.1 + */ + public static Let just(ExpressionVariable... variables) { + return new Let(List.of(variables), null); + } + /** * Start creating new {@link Let} by defining the variables for {@code $vars}. * * @param variables must not be {@literal null}. * @return */ - public static LetBuilder define(final Collection variables) { + public static LetBuilder define(Collection variables) { - Assert.notNull(variables, "Variables must not be null!"); + Assert.notNull(variables, "Variables must not be null"); return new LetBuilder() { @Override - public Let andApply(final AggregationExpression expression) { + public Let andApply(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Let(new ArrayList(variables), expression); } }; @@ -259,19 +270,10 @@ public Let andApply(final AggregationExpression expression) { * @param variables must not be {@literal null}. * @return */ - public static LetBuilder define(final ExpressionVariable... variables) { - - Assert.notNull(variables, "Variables must not be null!"); - - return new LetBuilder() { - - @Override - public Let andApply(final AggregationExpression expression) { + public static LetBuilder define(ExpressionVariable... variables) { - Assert.notNull(expression, "Expression must not be null!"); - return new Let(Arrays.asList(variables), expression); - } - }; + Assert.notNull(variables, "Variables must not be null"); + return define(List.of(variables)); } public interface LetBuilder { @@ -283,17 +285,15 @@ public interface LetBuilder { * @return */ Let andApply(AggregationExpression expression); + } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - public Document toDocument(final AggregationOperationContext context) { + public Document toDocument(AggregationOperationContext context) { return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context); } - private String[] getVariableNames() { + String[] getVariableNames() { String[] varNames = new String[this.vars.size()]; for (int i = 0; i < this.vars.size(); i++) { @@ -307,27 +307,35 @@ private Document toLet(ExposedFields exposedFields, AggregationOperationContext Document letExpression = new Document(); Document mappedVars = new Document(); - InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext( - exposedFields, context); for (ExpressionVariable var : this.vars) { mappedVars.putAll(getMappedVariable(var, context)); } letExpression.put("vars", mappedVars); - letExpression.put("in", getMappedIn(operationContext)); + if (expression != null) { + + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); + letExpression.put("in", getMappedIn(operationContext)); + } return new Document("$let", letExpression); } private Document getMappedVariable(ExpressionVariable var, AggregationOperationContext context) { - return new Document(var.variableName, var.expression instanceof AggregationExpression - ? ((AggregationExpression) var.expression).toDocument(context) : var.expression); + if (var.expression instanceof AggregationExpression expression) { + return new Document(var.variableName, expression.toDocument(context)); + } + if (var.expression instanceof Field field) { + return new Document(var.variableName, context.getReference(field).toString()); + } + return new Document(var.variableName, var.expression); } private Object getMappedIn(AggregationOperationContext context) { - return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context)); + return expression.toDocument(new NestedDelegatingExpressionAggregationOperationContext(context, + this.vars.stream().map(var -> Fields.field(var.variableName)).collect(Collectors.toList()))); } /** @@ -358,7 +366,7 @@ private ExpressionVariable(@Nullable String variableName, @Nullable Object expre */ public static ExpressionVariable newVariable(String variableName) { - Assert.notNull(variableName, "VariableName must not be null!"); + Assert.notNull(variableName, "VariableName must not be null"); return new ExpressionVariable(variableName, null); } @@ -370,10 +378,14 @@ public static ExpressionVariable newVariable(String variableName) { */ public ExpressionVariable forExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ExpressionVariable(variableName, expression); } + public ExpressionVariable forField(String fieldRef) { + return new ExpressionVariable(variableName, Fields.field(fieldRef)); + } + /** * Create a new {@link ExpressionVariable} with current name and given {@literal expressionObject}. * @@ -382,7 +394,7 @@ public ExpressionVariable forExpression(AggregationExpression expression) { */ public ExpressionVariable forExpression(Document expressionObject) { - Assert.notNull(expressionObject, "Expression must not be null!"); + Assert.notNull(expressionObject, "Expression must not be null"); return new ExpressionVariable(variableName, expressionObject); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperation.java new file mode 100644 index 0000000000..dd14ef20c9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperation.java @@ -0,0 +1,522 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.bson.BinaryVector; +import org.bson.Document; + +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.mapping.MongoVector; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.lang.Contract; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Performs a semantic search on data in your Atlas cluster. This stage is only available for Atlas Vector Search. + * Vector data must be less than or equal to 4096 dimensions in width. + *

          Limitations

          You cannot use this stage together with: + *
            + *
          • {@link org.springframework.data.mongodb.core.aggregation.LookupOperation Lookup} stages
          • + *
          • {@link org.springframework.data.mongodb.core.aggregation.FacetOperation Facet} stage
          • + *
          + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public class VectorSearchOperation implements AggregationOperation { + + private final SearchType searchType; + private final @Nullable CriteriaDefinition filter; + private final String indexName; + private final Limit limit; + private final @Nullable Integer numCandidates; + private final QueryPaths path; + private final Vector vector; + private final String score; + private final Consumer scoreCriteria; + + private VectorSearchOperation(SearchType searchType, @Nullable CriteriaDefinition filter, String indexName, + Limit limit, @Nullable Integer numCandidates, QueryPaths path, Vector vector, @Nullable String searchScore, + Consumer scoreCriteria) { + + this.searchType = searchType; + this.filter = filter; + this.indexName = indexName; + this.limit = limit; + this.numCandidates = numCandidates; + this.path = path; + this.vector = vector; + this.score = searchScore; + this.scoreCriteria = scoreCriteria; + } + + VectorSearchOperation(String indexName, QueryPaths path, Limit limit, Vector vector) { + this(SearchType.DEFAULT, null, indexName, limit, null, path, vector, null, null); + } + + /** + * Entrypoint to build a {@link VectorSearchOperation} starting from the {@code index} name to search. Atlas Vector + * Search doesn't return results if you misspell the index name or if the specified index doesn't already exist on the + * cluster. + * + * @param index must not be {@literal null} or empty. + * @return new instance of {@link VectorSearchOperation.PathContributor}. + */ + public static PathContributor search(String index) { + return new VectorSearchBuilder().index(index); + } + + /** + * Configure the search type to use. {@link SearchType#ENN} leads to an exact search while {@link SearchType#ANN} uses + * {@code exact=false}. + * + * @param searchType must not be null. + * @return a new {@link VectorSearchOperation} with {@link SearchType} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation searchType(SearchType searchType) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, score, + scoreCriteria); + } + + /** + * Criteria expression that compares an indexed field with a boolean, date, objectId, number (not decimals), string, + * or UUID to use as a pre-filter. + *

          + * Atlas Vector Search supports only the filters for the following MQL match expressions: + *

            + *
          • $gt
          • + *
          • $lt
          • + *
          • $gte
          • + *
          • $lte
          • + *
          • $eq
          • + *
          • $ne
          • + *
          • $in
          • + *
          • $nin
          • + *
          • $nor
          • + *
          • $not
          • + *
          • $and
          • + *
          • $or
          • + *
          + * + * @param filter must not be null. + * @return a new {@link VectorSearchOperation} with {@link CriteriaDefinition} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation filter(CriteriaDefinition filter) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, score, + scoreCriteria); + } + + /** + * Criteria expression that compares an indexed field with a boolean, date, objectId, number (not decimals), string, + * or UUID to use as a pre-filter. + *

          + * Atlas Vector Search supports only the filters for the following MQL match expressions: + *

            + *
          • $gt
          • + *
          • $lt
          • + *
          • $gte
          • + *
          • $lte
          • + *
          • $eq
          • + *
          • $ne
          • + *
          • $in
          • + *
          • $nin
          • + *
          • $nor
          • + *
          • $not
          • + *
          • $and
          • + *
          • $or
          • + *
          + * + * @param filter must not be null. + * @return a new {@link VectorSearchOperation} with {@link CriteriaDefinition} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation filter(Document filter) { + + return filter(new CriteriaDefinition() { + @Override + public Document getCriteriaObject() { + return filter; + } + + @Nullable + @Override + public String getKey() { + return null; + } + }); + } + + /** + * Number of nearest neighbors to use during the search. Value must be less than or equal to (<=) {@code 10000}. You + * can't specify a number less than the number of documents to return (limit). This field is required if + * {@link #searchType(SearchType)} is {@link SearchType#ANN} or {@link SearchType#DEFAULT}. + * + * @param numCandidates number of nearest neighbors to use during the search + * @return a new {@link VectorSearchOperation} with {@code numCandidates} applied. + */ + @Contract("_ -> new") + public VectorSearchOperation numCandidates(int numCandidates) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, score, + scoreCriteria); + } + + /** + * Add a {@link AddFieldsOperation} stage including the search score using {@code score} as field name. + * + * @return a new {@link VectorSearchOperation} with search score applied. + * @see #withSearchScore(String) + */ + @Contract("-> new") + public VectorSearchOperation withSearchScore() { + return withSearchScore("score"); + } + + /** + * Add a {@link AddFieldsOperation} stage including the search score using {@code scoreFieldName} as field name. + * + * @param scoreFieldName name of the score field. + * @return a new {@link VectorSearchOperation} with {@code scoreFieldName} applied. + * @see #withSearchScore() + */ + @Contract("_ -> new") + public VectorSearchOperation withSearchScore(String scoreFieldName) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, scoreFieldName, + scoreCriteria); + } + + /** + * Add a {@link MatchOperation} stage targeting the score field name. Implies that the score field is present by + * either reusing a previous {@link AddFieldsOperation} from {@link #withSearchScore()} or + * {@link #withSearchScore(String)} or by adding a new {@link AddFieldsOperation} stage. + * + * @return a new {@link VectorSearchOperation} with search score filter applied. + */ + @Contract("_ -> new") + public VectorSearchOperation withFilterBySore(Consumer score) { + return new VectorSearchOperation(searchType, filter, indexName, limit, numCandidates, path, vector, + StringUtils.hasText(this.score) ? this.score : "score", score); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $vectorSearch = new Document(); + + if (searchType != null && !searchType.equals(SearchType.DEFAULT)) { + $vectorSearch.append("exact", searchType.equals(SearchType.ENN)); + } + + if (filter != null) { + $vectorSearch.append("filter", context.getMappedObject(filter.getCriteriaObject())); + } + + $vectorSearch.append("index", indexName); + + if(limit.isLimited()) { + $vectorSearch.append("limit", limit.max()); + } + + if (numCandidates != null) { + $vectorSearch.append("numCandidates", numCandidates); + } + + Object path = this.path.getPathObject(); + + if (path instanceof String pathFieldName) { + Document mappedObject = context.getMappedObject(new Document(pathFieldName, 1)); + path = mappedObject.keySet().iterator().next(); + } + + Object source = vector.getSource(); + + if (source instanceof float[]) { + source = vector.toDoubleArray(); + } + + if (source instanceof double[] ds) { + source = Arrays.stream(ds).boxed().collect(Collectors.toList()); + } + + $vectorSearch.append("path", path); + $vectorSearch.append("queryVector", source); + + return new Document(getOperator(), $vectorSearch); + } + + @Override + public List toPipelineStages(AggregationOperationContext context) { + + if (!StringUtils.hasText(score)) { + return List.of(toDocument(context)); + } + + AddFieldsOperation $vectorSearchScore = Aggregation.addFields().addField(score) + .withValueOfExpression("{\"$meta\":\"vectorSearchScore\"}").build(); + + if (scoreCriteria == null) { + return List.of(toDocument(context), $vectorSearchScore.toDocument(context)); + } + + Criteria criteria = Criteria.where(score); + scoreCriteria.accept(criteria); + MatchOperation $filterByScore = Aggregation.match(criteria); + + return List.of(toDocument(context), $vectorSearchScore.toDocument(context), $filterByScore.toDocument(context)); + } + + @Override + public String getOperator() { + return "$vectorSearch"; + } + + /** + * Builder helper to create a {@link VectorSearchOperation}. + */ + private static class VectorSearchBuilder implements PathContributor, VectorContributor, LimitContributor { + + String index; + QueryPath paths; + Vector vector; + + PathContributor index(String index) { + this.index = index; + return this; + } + + @Override + public VectorContributor path(String path) { + + this.paths = QueryPath.path(path); + return this; + } + + @Override + public VectorSearchOperation limit(Limit limit) { + return new VectorSearchOperation(index, QueryPaths.of(paths), limit, vector); + } + + @Override + public LimitContributor vector(Vector vector) { + this.vector = vector; + return this; + } + } + + /** + * Search type, ANN as approximation or ENN for exact search. + */ + public enum SearchType { + + /** MongoDB Server default (value will be omitted) */ + DEFAULT, + /** Approximate Nearest Neighbour */ + ANN, + /** Exact Nearest Neighbour */ + ENN + } + + /** + * Value object capturing query paths. + */ + public static class QueryPaths { + + private final Set> paths; + + private QueryPaths(Set> paths) { + this.paths = paths; + } + + /** + * Factory method to create {@link QueryPaths} from a single {@link QueryPath}. + * + * @param path + * @return a new {@link QueryPaths} instance. + */ + public static QueryPaths of(QueryPath path) { + return new QueryPaths(Set.of(path)); + } + + Object getPathObject() { + + if (paths.size() == 1) { + return paths.iterator().next().value(); + } + return paths.stream().map(QueryPath::value).collect(Collectors.toList()); + } + } + + /** + * Interface describing a query path contract. Query paths might be simple field names, wildcard paths, or + * multi-paths. paths. + * + * @param + */ + public interface QueryPath { + + T value(); + + static QueryPath path(String field) { + return new SimplePath(field); + } + } + + public static class SimplePath implements QueryPath { + + String name; + + public SimplePath(String name) { + this.name = name; + } + + @Override + public String value() { + return name; + } + } + + /** + * Fluent API to configure a path on the VectorSearchOperation builder. + */ + public interface PathContributor { + + /** + * Indexed vector type field to search. + * + * @param path name of the search path. + * @return + */ + @Contract("_ -> this") + VectorContributor path(String path); + } + + /** + * Fluent API to configure a vector on the VectorSearchOperation builder. + */ + public interface VectorContributor { + + /** + * Array of float numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(float... vector) { + return vector(Vector.of(vector)); + } + + /** + * Array of byte numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(byte[] vector) { + return vector(BinaryVector.int8Vector(vector)); + } + + /** + * Array of double numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(double... vector) { + return vector(Vector.of(vector)); + } + + /** + * Array of numbers that represent the query vector. The number type must match the indexed field value type. + * Otherwise, Atlas Vector Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(List vector) { + return vector(Vector.of(vector)); + } + + /** + * Binary vector (BSON BinData vector subtype float32, or BSON BinData vector subtype int1 or int8 type) that + * represent the query vector. The number type must match the indexed field value type. Otherwise, Atlas Vector + * Search doesn't return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + default LimitContributor vector(BinaryVector vector) { + return vector(MongoVector.of(vector)); + } + + /** + * The query vector. The number type must match the indexed field value type. Otherwise, Atlas Vector Search doesn't + * return any results or errors. + * + * @param vector the query vector. + * @return + */ + @Contract("_ -> this") + LimitContributor vector(Vector vector); + } + + /** + * Fluent API to configure a limit on the VectorSearchOperation builder. + */ + public interface LimitContributor { + + /** + * Number (of type int only) of documents to return in the results. This value can't exceed the value of + * numCandidates if you specify numCandidates. + * + * @param limit + * @return + */ + @Contract("_ -> this") + default VectorSearchOperation limit(int limit) { + return limit(Limit.of(limit)); + } + + /** + * Number (of type int only) of documents to return in the results. This value can't exceed the value of + * numCandidates if you specify numCandidates. + * + * @param limit + * @return + */ + @Contract("_ -> this") + VectorSearchOperation limit(Limit limit); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/Collation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/Collation.java new file mode 100644 index 0000000000..2ce2d7ed46 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/Collation.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@link Collation} allows to define the rules used for language-specific string comparison. + * + * @see https://www.mongodb.com/docs/manual/reference/collation/ + * @author Christoph Strobl + * @since 4.0 + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +public @interface Collation { + + /** + * The actual collation definition in JSON format or a + * {@link org.springframework.expression.spel.standard.SpelExpression template expression} resolving to either a JSON + * String or a {@link org.bson.Document}. The keys of the JSON document are configuration options for the collation. + * + * @return an empty {@link String} by default. + */ + String value() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/package-info.java new file mode 100644 index 0000000000..3e08dc1014 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/annotation/package-info.java @@ -0,0 +1,6 @@ +/** + * Core Spring Data MongoDB annotations not limited to a special use case (like Query,...). + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.annotation; + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java index e20afbb5db..7a01677939 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,14 +16,17 @@ package org.springframework.data.mongodb.core.convert; import java.math.BigInteger; +import java.util.Date; +import org.bson.types.Code; import org.bson.types.ObjectId; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.data.convert.ConverterBuilder; import org.springframework.data.convert.CustomConversions; -import org.springframework.data.convert.EntityInstantiators; +import org.springframework.data.mapping.model.EntityInstantiators; import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToBigIntegerConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter; @@ -62,14 +65,14 @@ public AbstractMongoConverter(@Nullable GenericConversionService conversionServi */ public void setCustomConversions(CustomConversions conversions) { - Assert.notNull(conversions, "Conversions must not be null!"); + Assert.notNull(conversions, "Conversions must not be null"); this.conversions = conversions; } /** * Registers {@link EntityInstantiators} to customize entity instantiation. * - * @param instantiators + * @param instantiators can be {@literal null}. Uses default {@link EntityInstantiators} if so. */ public void setInstantiators(@Nullable EntityInstantiators instantiators) { this.instantiators = instantiators == null ? new EntityInstantiators() : instantiators; @@ -93,21 +96,31 @@ private void initializeConverters() { conversionService.addConverter(BigIntegerToObjectIdConverter.INSTANCE); } + if (!conversionService.canConvert(Date.class, Long.class)) { + conversionService + .addConverter(ConverterBuilder.writing(Date.class, Long.class, Date::getTime).getWritingConverter()); + } + + if (!conversionService.canConvert(Long.class, Date.class)) { + conversionService.addConverter(ConverterBuilder.reading(Long.class, Date.class, Date::new).getReadingConverter()); + } + + if (!conversionService.canConvert(ObjectId.class, Date.class)) { + + conversionService.addConverter(ConverterBuilder + .reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp())).getReadingConverter()); + } + + conversionService + .addConverter(ConverterBuilder.reading(Code.class, String.class, Code::getCode).getReadingConverter()); conversions.registerConvertersIn(conversionService); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.convert.MongoConverter#getConversionService() - */ @Override public ConversionService getConversionService() { return conversionService; } - /* (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ public void afterPropertiesSet() { initializeConverters(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/CustomConversions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/CustomConversions.java deleted file mode 100644 index 66007aebfe..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/CustomConversions.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import java.util.ArrayList; -import java.util.List; - -import org.springframework.data.mapping.model.SimpleTypeHolder; - -/** - * Value object to capture custom conversion. That is essentially a {@link List} of converters and some additional logic - * around them. The converters are pretty much builds up two sets of types which Mongo basic types {@see #MONGO_TYPES} - * can be converted into and from. These types will be considered simple ones (which means they neither need deeper - * inspection nor nested conversion. Thus the {@link CustomConversions} also act as factory for {@link SimpleTypeHolder} - * . - * - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - * @author Mark Paluch - * @deprecated since 2.0, use {@link MongoCustomConversions}. - */ -@Deprecated -public class CustomConversions extends MongoCustomConversions { - - /** - * Creates an empty {@link CustomConversions} object. - */ - CustomConversions() { - this(new ArrayList<>()); - } - - /** - * Creates a new {@link CustomConversions} instance registering the given converters. - * - * @param converters - */ - public CustomConversions(List converters) { - super(converters); - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java index 96b33da9f0..40afbb8c10 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefProxyHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java index bf65557e4d..0235694030 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,6 +22,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; import com.mongodb.DBRef; @@ -34,7 +35,7 @@ * @author Mark Paluch * @since 1.4 */ -public interface DbRefResolver { +public interface DbRefResolver extends ReferenceResolver { /** * Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method @@ -44,10 +45,10 @@ public interface DbRefResolver { * @param property will never be {@literal null}. * @param dbref the {@link DBRef} to resolve. * @param callback will never be {@literal null}. - * @return + * @return can be {@literal null}. */ @Nullable - Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler proxyHandler); /** @@ -57,16 +58,23 @@ Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolver * @param annotation will never be {@literal null}. * @param entity will never be {@literal null}. * @param id will never be {@literal null}. - * @return + * @return new instance of {@link DBRef}. */ - DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, MongoPersistentEntity entity, - Object id); + default DBRef createDbRef(@Nullable org.springframework.data.mongodb.core.mapping.DBRef annotation, + MongoPersistentEntity entity, Object id) { + + if (annotation != null && StringUtils.hasText(annotation.db())) { + return new DBRef(annotation.db(), entity.getCollection(), id); + } + + return new DBRef(entity.getCollection(), id); + } /** * Actually loads the {@link DBRef} from the datasource. * * @param dbRef must not be {@literal null}. - * @return + * @return can be {@literal null}. * @since 1.7 */ @Nullable diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java index 58c2e3b638..bf6b882375 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolverCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java index 2452c4c4c6..22b1ce7981 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefProxyHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,13 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.function.Function; + import org.bson.Document; + import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator; -import org.springframework.data.mapping.model.SpELContext; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; @@ -34,28 +35,22 @@ */ class DefaultDbRefProxyHandler implements DbRefProxyHandler { - private final SpELContext spELContext; private final MappingContext, MongoPersistentProperty> mappingContext; private final ValueResolver resolver; + private final Function evaluatorFactory; /** - * @param spELContext must not be {@literal null}. * @param mappingContext must not be {@literal null}. * @param resolver must not be {@literal null}. */ - public DefaultDbRefProxyHandler(SpELContext spELContext, - MappingContext, MongoPersistentProperty> mappingContext, - ValueResolver resolver) { + public DefaultDbRefProxyHandler(MappingContext, MongoPersistentProperty> mappingContext, + ValueResolver resolver, Function evaluatorFactory) { - this.spELContext = spELContext; this.mappingContext = mappingContext; this.resolver = resolver; + this.evaluatorFactory = evaluatorFactory; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefProxyHandler#populateId(com.mongodb.DBRef, java.lang.Object) - */ @Override public Object populateId(MongoPersistentProperty property, @Nullable DBRef source, Object proxy) { @@ -70,7 +65,7 @@ public Object populateId(MongoPersistentProperty property, @Nullable DBRef sourc return proxy; } - SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(proxy, spELContext); + ValueExpressionEvaluator evaluator = evaluatorFactory.apply(proxy); PersistentPropertyAccessor accessor = entity.getPropertyAccessor(proxy); Document object = new Document(idProperty.getFieldName(), source.getId()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java index e9b1865706..de66c3ea94 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,6 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.springframework.util.ReflectionUtils.*; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -29,29 +22,22 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.Document; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.cglib.proxy.Callback; -import org.springframework.cglib.proxy.Enhancer; -import org.springframework.cglib.proxy.Factory; -import org.springframework.cglib.proxy.MethodProxy; -import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.LazyLoadingException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; -import org.springframework.objenesis.ObjenesisStd; import org.springframework.util.Assert; -import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import com.mongodb.DBRef; -import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoCollection; import com.mongodb.client.model.Filters; /** @@ -64,37 +50,33 @@ * @author Mark Paluch * @since 1.4 */ -public class DefaultDbRefResolver implements DbRefResolver { +public class DefaultDbRefResolver extends DefaultReferenceResolver implements DbRefResolver, ReferenceResolver { + + private static final Log LOGGER = LogFactory.getLog(DefaultDbRefResolver.class); - private final MongoDbFactory mongoDbFactory; - private final PersistenceExceptionTranslator exceptionTranslator; - private final ObjenesisStd objenesis; + private final MongoDatabaseFactory mongoDbFactory; /** - * Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}. + * Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}. * * @param mongoDbFactory must not be {@literal null}. */ - public DefaultDbRefResolver(MongoDbFactory mongoDbFactory) { + public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) { + + super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory), mongoDbFactory.getExceptionTranslator()); - Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null"); this.mongoDbFactory = mongoDbFactory; - this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); - this.objenesis = new ObjenesisStd(true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#resolveDbRef(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.convert.DbRefResolverCallback) - */ @Override - public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler handler) { - Assert.notNull(property, "Property must not be null!"); - Assert.notNull(callback, "Callback must not be null!"); - Assert.notNull(handler, "Handler must not be null!"); + Assert.notNull(property, "Property must not be null"); + Assert.notNull(callback, "Callback must not be null"); + Assert.notNull(handler, "Handler must not be null"); if (isLazyDbRef(property)) { return createLazyLoadingProxy(property, dbref, callback, handler); @@ -103,43 +85,18 @@ public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefR return callback.resolve(property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#created(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.Object) - */ - @Override - public DBRef createDbRef(org.springframework.data.mongodb.core.mapping.DBRef annotation, - MongoPersistentEntity entity, Object id) { - - if (annotation != null && StringUtils.hasText(annotation.db())) { - return new DBRef(annotation.db(), entity.getCollection(), id); - } - - return new DBRef(entity.getCollection(), id); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#fetch(com.mongodb.DBRef) - */ @Override public Document fetch(DBRef dbRef) { - - StringUtils.hasText(dbRef.getDatabaseName()); - return (StringUtils.hasText(dbRef.getDatabaseName()) ? mongoDbFactory.getDb(dbRef.getDatabaseName()) - : mongoDbFactory.getDb()).getCollection(dbRef.getCollectionName(), Document.class) - .find(Filters.eq("_id", dbRef.getId())).first(); + return getReferenceLoader().fetchOne( + DocumentReferenceQuery.forSingleDocument(Filters.eq(FieldName.ID.name(), dbRef.getId())), + ReferenceCollection.fromDBRef(dbRef)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolver#bulkFetch(java.util.List) - */ @Override public List bulkFetch(List refs) { - Assert.notNull(mongoDbFactory, "Factory must not be null!"); - Assert.notNull(refs, "DBRef to fetch must not be null!"); + Assert.notNull(mongoDbFactory, "Factory must not be null"); + Assert.notNull(refs, "DBRef to fetch must not be null"); if (refs.isEmpty()) { return Collections.emptyList(); @@ -152,17 +109,25 @@ public List bulkFetch(List refs) { if (!collection.equals(ref.getCollectionName())) { throw new InvalidDataAccessApiUsageException( - "DBRefs must all target the same collection for bulk fetch operation."); + "DBRefs must all target the same collection for bulk fetch operation"); } ids.add(ref.getId()); } - MongoDatabase db = mongoDbFactory.getDb(); + DBRef databaseSource = refs.iterator().next(); + MongoCollection mongoCollection = getCollection(databaseSource); + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Bulk fetching DBRefs %s from %s.%s", ids, + StringUtils.hasText(databaseSource.getDatabaseName()) ? databaseSource.getDatabaseName() + : mongoCollection.getNamespace().getDatabaseName(), + databaseSource.getCollectionName())); + } - List result = db.getCollection(collection) // - .find(new Document("_id", new Document("$in", ids))) // - .into(new ArrayList<>()); + List result = mongoCollection // + .find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) // + .into(new ArrayList<>(ids.size())); return ids.stream() // .flatMap(id -> documentWithId(id, result)) // @@ -181,44 +146,9 @@ public List bulkFetch(List refs) { private Object createLazyLoadingProxy(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler handler) { - Class propertyType = property.getType(); - LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback); - - if (!propertyType.isInterface()) { - - Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType)); - factory.setCallbacks(new Callback[] { interceptor }); - - return handler.populateId(property, dbref, factory); - } - - ProxyFactory proxyFactory = new ProxyFactory(); + Object lazyLoadingProxy = getProxyFactory().createLazyLoadingProxy(property, callback, dbref); - for (Class type : propertyType.getInterfaces()) { - proxyFactory.addInterface(type); - } - - proxyFactory.addInterface(LazyLoadingProxy.class); - proxyFactory.addInterface(propertyType); - proxyFactory.addAdvice(interceptor); - - return handler.populateId(property, dbref, proxyFactory.getProxy()); - } - - /** - * Returns the CGLib enhanced type for the given source type. - * - * @param type - * @return - */ - private Class getEnhancedTypeFor(Class type) { - - Enhancer enhancer = new Enhancer(); - enhancer.setSuperclass(type); - enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class); - enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class }); - - return enhancer.createClass(); + return handler.populateId(property, dbref, lazyLoadingProxy); } /** @@ -241,237 +171,26 @@ private boolean isLazyDbRef(MongoPersistentProperty property) { private static Stream documentWithId(Object identifier, Collection documents) { return documents.stream() // - .filter(it -> it.get("_id").equals(identifier)) // + .filter(it -> it.get(BasicMongoPersistentProperty.ID_FIELD_NAME).equals(identifier)) // .limit(1); } /** - * A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a - * {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is - * guaranteed to be performed only once. + * Customization hook for obtaining the {@link MongoCollection} for a given {@link DBRef}. * - * @author Thomas Darimont - * @author Oliver Gierke - * @author Christoph Strobl + * @param dbref must not be {@literal null}. + * @return the {@link MongoCollection} the given {@link DBRef} points to. + * @since 2.1 */ - static class LazyLoadingInterceptor - implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable { - - private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD; - - private final DbRefResolverCallback callback; - private final MongoPersistentProperty property; - private final PersistenceExceptionTranslator exceptionTranslator; - - private volatile boolean resolved; - private final @Nullable DBRef dbref; - private @Nullable Object result; - - static { - try { - INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); - TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); - FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize"); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - /** - * Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty}, - * {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}. - * - * @param property must not be {@literal null}. - * @param dbref can be {@literal null}. - * @param callback must not be {@literal null}. - */ - public LazyLoadingInterceptor(MongoPersistentProperty property, @Nullable DBRef dbref, - PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) { - - Assert.notNull(property, "Property must not be null!"); - Assert.notNull(exceptionTranslator, "Exception translator must not be null!"); - Assert.notNull(callback, "Callback must not be null!"); - - this.dbref = dbref; - this.callback = callback; - this.exceptionTranslator = exceptionTranslator; - this.property = property; - } - - /* - * (non-Javadoc) - * @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation) - */ - @Override - public Object invoke(@Nullable MethodInvocation invocation) throws Throwable { - return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); - } - - /* - * (non-Javadoc) - * @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy) - */ - @Nullable - @Override - public Object intercept(Object obj, Method method, Object[] args, @Nullable MethodProxy proxy) throws Throwable { - - if (INITIALIZE_METHOD.equals(method)) { - return ensureResolved(); - } - - if (TO_DBREF_METHOD.equals(method)) { - return this.dbref; - } - - if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { - - if (ReflectionUtils.isToStringMethod(method)) { - return proxyToString(proxy); - } - - if (ReflectionUtils.isEqualsMethod(method)) { - return proxyEquals(proxy, args[0]); - } - - if (ReflectionUtils.isHashCodeMethod(method)) { - return proxyHashCode(proxy); - } - - // DATAMONGO-1076 - finalize methods should not trigger proxy initialization - if (FINALIZE_METHOD.equals(method)) { - return null; - } - } - - Object target = ensureResolved(); - - if (target == null) { - return null; - } - - ReflectionUtils.makeAccessible(method); - - return method.invoke(target, args); - } - - /** - * Returns a to string representation for the given {@code proxy}. - * - * @param proxy - * @return - */ - private String proxyToString(@Nullable Object proxy) { - - StringBuilder description = new StringBuilder(); - if (dbref != null) { - description.append(dbref.getCollectionName()); - description.append(":"); - description.append(dbref.getId()); - } else { - description.append(System.identityHashCode(proxy)); - } - description.append("$").append(LazyLoadingProxy.class.getSimpleName()); - - return description.toString(); - } - - /** - * Returns the hashcode for the given {@code proxy}. - * - * @param proxy - * @return - */ - private int proxyHashCode(@Nullable Object proxy) { - return proxyToString(proxy).hashCode(); - } - - /** - * Performs an equality check for the given {@code proxy}. - * - * @param proxy - * @param that - * @return - */ - private boolean proxyEquals(@Nullable Object proxy, Object that) { - - if (!(that instanceof LazyLoadingProxy)) { - return false; - } + protected MongoCollection getCollection(DBRef dbref) { - if (that == proxy) { - return true; - } - - return proxyToString(proxy).equals(that.toString()); - } - - /** - * Will trigger the resolution if the proxy is not resolved already or return a previously resolved result. - * - * @return - */ - @Nullable - private Object ensureResolved() { - - if (!resolved) { - this.result = resolve(); - this.resolved = true; - } - - return this.result; - } - - /** - * Callback method for serialization. - * - * @param out - * @throws IOException - */ - private void writeObject(ObjectOutputStream out) throws IOException { - - ensureResolved(); - out.writeObject(this.result); - } - - /** - * Callback method for deserialization. - * - * @param in - * @throws IOException - */ - private void readObject(ObjectInputStream in) throws IOException { - - try { - this.resolved = true; - this.result = in.readObject(); - } catch (ClassNotFoundException e) { - throw new LazyLoadingException("Could not deserialize result", e); - } - } - - /** - * Resolves the proxy into its backing object. - * - * @return - */ - @Nullable - private synchronized Object resolve() { - - if (!resolved) { - - try { - - return callback.resolve(property); + return MongoDatabaseUtils.getDatabase(dbref.getDatabaseName(), mongoDbFactory) + .getCollection(dbref.getCollectionName(), Document.class); + } - } catch (RuntimeException ex) { + protected MongoCollection getCollection(ReferenceCollection context) { - DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex); - throw new LazyLoadingException("Unable to lazily resolve DBRef!", - translatedException != null ? translatedException : ex); - } - } - - return result; - } + return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), + Document.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java index 59feae80c9..82e5c9d0eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,7 @@ import org.bson.Document; import org.bson.conversions.Bson; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; /** @@ -32,18 +32,18 @@ class DefaultDbRefResolverCallback implements DbRefResolverCallback { private final Bson surroundingObject; private final ObjectPath path; private final ValueResolver resolver; - private final SpELExpressionEvaluator evaluator; + private final ValueExpressionEvaluator evaluator; /** * Creates a new {@link DefaultDbRefResolverCallback} using the given {@link Document}, {@link ObjectPath}, - * {@link ValueResolver} and {@link SpELExpressionEvaluator}. + * {@link ValueResolver} and {@link ValueExpressionEvaluator}. * * @param surroundingObject must not be {@literal null}. * @param path must not be {@literal null}. * @param evaluator must not be {@literal null}. * @param resolver must not be {@literal null}. */ - public DefaultDbRefResolverCallback(Bson surroundingObject, ObjectPath path, SpELExpressionEvaluator evaluator, + DefaultDbRefResolverCallback(Bson surroundingObject, ObjectPath path, ValueExpressionEvaluator evaluator, ValueResolver resolver) { this.surroundingObject = surroundingObject; @@ -52,10 +52,6 @@ public DefaultDbRefResolverCallback(Bson surroundingObject, ObjectPath path, SpE this.evaluator = evaluator; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.DbRefResolverCallback#resolve(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty) - */ @Override public Object resolve(MongoPersistentProperty property) { return resolver.getValueInternal(property, surroundingObject, evaluator, path); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java index 40d0688d37..2c2b52afd5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,15 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.UnaryOperator; import org.bson.Document; import org.bson.conversions.Bson; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.convert.DefaultTypeMapper; import org.springframework.data.convert.SimpleTypeInformationMapper; import org.springframework.data.convert.TypeAliasAccessor; @@ -29,7 +31,6 @@ import org.springframework.data.mapping.Alias; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.util.ClassTypeInformation; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; @@ -39,9 +40,7 @@ /** * Default implementation of {@link MongoTypeMapper} allowing configuration of the key to lookup and store type - * information in {@link Document}. The key defaults to {@link #DEFAULT_TYPE_KEY}. Actual type-to-{@link String} - * conversion and back is done in {@link #getTypeString(TypeInformation)} or {@link #getTypeInformation(String)} - * respectively. + * information in {@link Document}. The key defaults to {@link #DEFAULT_TYPE_KEY}. * * @author Oliver Gierke * @author Thomas Darimont @@ -52,27 +51,64 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper implements M public static final String DEFAULT_TYPE_KEY = "_class"; @SuppressWarnings("rawtypes") // - private static final TypeInformation LIST_TYPE_INFO = ClassTypeInformation.from(List.class); + private static final TypeInformation LIST_TYPE_INFO = TypeInformation.of(List.class); @SuppressWarnings("rawtypes") // - private static final TypeInformation MAP_TYPE_INFO = ClassTypeInformation.from(Map.class); + private static final TypeInformation MAP_TYPE_INFO = TypeInformation.MAP; private final TypeAliasAccessor accessor; private final @Nullable String typeKey; + private UnaryOperator> writeTarget = UnaryOperator.identity(); + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code _class}. + */ public DefaultMongoTypeMapper() { this(DEFAULT_TYPE_KEY); } + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + */ public DefaultMongoTypeMapper(@Nullable String typeKey) { - this(typeKey, Arrays.asList(new SimpleTypeInformationMapper())); + this(typeKey, Collections.singletonList(new SimpleTypeInformationMapper())); } + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappingContext the mapping context. + */ public DefaultMongoTypeMapper(@Nullable String typeKey, MappingContext, ?> mappingContext) { this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext, - Arrays.asList(new SimpleTypeInformationMapper())); + Collections.singletonList(new SimpleTypeInformationMapper())); + } + + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses + * {@link UnaryOperator} to apply {@link CustomConversions}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappingContext the mapping context to look up types using type hints. + * @see MappingMongoConverter#getWriteTarget(Class) + */ + public DefaultMongoTypeMapper(@Nullable String typeKey, + MappingContext, ?> mappingContext, UnaryOperator> writeTarget) { + this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext, + Collections.singletonList(new SimpleTypeInformationMapper())); + this.writeTarget = writeTarget; } + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses + * {@link TypeInformationMapper} to map type hints. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappers must not be {@literal null}. + */ public DefaultMongoTypeMapper(@Nullable String typeKey, List mappers) { this(typeKey, new DocumentTypeAliasAccessor(typeKey), null, mappers); } @@ -87,18 +123,10 @@ private DefaultMongoTypeMapper(@Nullable String typeKey, TypeAliasAccessor this.accessor = accessor; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#isTypeKey(java.lang.String) - */ public boolean isTypeKey(String key) { - return typeKey == null ? false : typeKey.equals(key); + return typeKey != null && typeKey.equals(key); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#writeTypeRestrictions(java.util.Set) - */ @Override public void writeTypeRestrictions(Document result, @Nullable Set> restrictedTypes) { @@ -110,7 +138,7 @@ public void writeTypeRestrictions(Document result, @Nullable Set> restr for (Class restrictedType : restrictedTypes) { - Alias typeAlias = getAliasFor(ClassTypeInformation.from(restrictedType)); + Alias typeAlias = getAliasFor(TypeInformation.of(restrictedType)); if (!ObjectUtils.nullSafeEquals(Alias.NONE, typeAlias) && typeAlias.isPresent()) { restrictedMappedTypes.add(typeAlias.getValue()); @@ -120,10 +148,11 @@ public void writeTypeRestrictions(Document result, @Nullable Set> restr accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes)); } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object) - */ + @Override + public Class getWriteTargetTypeFor(Class source) { + return writeTarget.apply(source); + } + @Override protected TypeInformation getFallbackTypeFor(Bson source) { return source instanceof BasicDBList ? LIST_TYPE_INFO : MAP_TYPE_INFO; @@ -142,37 +171,30 @@ public DocumentTypeAliasAccessor(@Nullable String typeKey) { this.typeKey = typeKey; } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.TypeAliasAccessor#readAliasFrom(java.lang.Object) - */ + @Override public Alias readAliasFrom(Bson source) { if (source instanceof List) { return Alias.NONE; } - if (source instanceof Document) { - return Alias.ofNullable(((Document) source).get(typeKey)); - } else if (source instanceof DBObject) { - return Alias.ofNullable(((DBObject) source).get(typeKey)); + if (source instanceof Document document) { + return Alias.ofNullable(document.get(typeKey)); + } else if (source instanceof DBObject dbObject) { + return Alias.ofNullable(dbObject.get(typeKey)); } throw new IllegalArgumentException("Cannot read alias from " + source.getClass()); } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.TypeAliasAccessor#writeTypeTo(java.lang.Object, java.lang.Object) - */ public void writeTypeTo(Bson sink, Object alias) { if (typeKey != null) { - if (sink instanceof Document) { - ((Document) sink).put(typeKey, alias); - } else if (sink instanceof DBObject) { - ((DBObject) sink).put(typeKey, alias); + if (sink instanceof Document document) { + document.put(typeKey, alias); + } else if (sink instanceof DBObject dbObject) { + dbObject.put(typeKey, alias); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java new file mode 100644 index 0000000000..a7b3d6f21f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -0,0 +1,115 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*; + +import java.util.Collections; + +import org.bson.Document; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.util.Assert; + +/** + * {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity + * associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy + * proxies} for associations that should be lazily loaded. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Anton Buzdalkin + * @since 3.3 + */ +public class DefaultReferenceResolver implements ReferenceResolver { + + private final ReferenceLoader referenceLoader; + private final LazyLoadingProxyFactory proxyFactory; + + private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx); + private final LookupFunction singleValueLookupFunction = (filter, ctx) -> { + Document target = getReferenceLoader().fetchOne(filter, ctx); + return target == null ? Collections.emptyList() : Collections.singleton(target); + }; + + /** + * Create a new instance of {@link DefaultReferenceResolver}. + * + * @param referenceLoader must not be {@literal null}. + * @param exceptionTranslator must not be {@literal null}. + */ + public DefaultReferenceResolver(ReferenceLoader referenceLoader, PersistenceExceptionTranslator exceptionTranslator) { + + Assert.notNull(referenceLoader, "ReferenceLoader must not be null"); + Assert.notNull(exceptionTranslator, "ExceptionTranslator must not be null"); + + this.referenceLoader = referenceLoader; + this.proxyFactory = new LazyLoadingProxyFactory(exceptionTranslator); + } + + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + + LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction + : singleValueLookupFunction; + + if (isLazyReference(property)) { + return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader); + } + + return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader); + } + + /** + * Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily. + * + * @param property + * @return return {@literal true} if the defined association is lazy. + * @see DBRef#lazy() + * @see DocumentReference#lazy() + */ + protected boolean isLazyReference(MongoPersistentProperty property) { + + if (property.isDocumentReference()) { + return property.getDocumentReference().lazy(); + } + + return property.getDBRef() != null && property.getDBRef().lazy(); + } + + /** + * The {@link ReferenceLoader} executing the lookup. + * + * @return never {@literal null}. + */ + protected ReferenceLoader getReferenceLoader() { + return referenceLoader; + } + + LazyLoadingProxyFactory getProxyFactory() { + return proxyFactory; + } + + private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) { + return proxyFactory.createLazyLoadingProxy(property, + it -> referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader), + source instanceof DocumentReferenceSource documentSource ? documentSource.getTargetSource() : source); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java index 5db38cfc96..c795add9c8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,12 +21,13 @@ import org.bson.Document; import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.BasicDBObject; import com.mongodb.DBObject; /** @@ -49,34 +50,53 @@ class DocumentAccessor { */ public DocumentAccessor(Bson document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); if (!(document instanceof Document) && !(document instanceof DBObject)) { - Assert.isInstanceOf(Document.class, document, "Given Bson must be a Document or DBObject!"); + Assert.isInstanceOf(Document.class, document, "Given Bson must be a Document or DBObject"); } this.document = document; } + /** + * @return the underlying {@link Bson document}. + * @since 2.1 + */ + Bson getDocument() { + return this.document; + } + + /** + * Copies all of the mappings from the given {@link Document} to the underlying target {@link Document}. These + * mappings will replace any mappings that the target document had for any of the keys currently in the specified map. + * + * @param source + */ + public void putAll(Document source) { + + Map target = BsonUtils.asMap(document); + + target.putAll(source); + } + /** * Puts the given value into the backing {@link Document} based on the coordinates defined through the given * {@link MongoPersistentProperty}. By default this will be the plain field name. But field names might also consist - * of path traversals so we might need to create intermediate {@link BasicDocument}s. + * of path traversals so we might need to create intermediate {@link Document}s. * * @param prop must not be {@literal null}. - * @param value + * @param value can be {@literal null}. */ public void put(MongoPersistentProperty prop, @Nullable Object value) { - Assert.notNull(prop, "MongoPersistentProperty must not be null!"); - String fieldName = prop.getFieldName(); + Assert.notNull(prop, "MongoPersistentProperty must not be null"); - if (!fieldName.contains(".")) { - BsonUtils.addToMap(document, fieldName, value); + if (value == null && !prop.writeNullValues()) { return; } - Iterator parts = Arrays.asList(fieldName.split("\\.")).iterator(); + Iterator parts = Arrays.asList(prop.getMongoField().getName().parts()).iterator(); Bson document = this.document; while (parts.hasNext()) { @@ -97,104 +117,42 @@ public void put(MongoPersistentProperty prop, @Nullable Object value) { * a path expression in the field name metadata. * * @param property must not be {@literal null}. - * @return + * @return can be {@literal null}. */ @Nullable public Object get(MongoPersistentProperty property) { - - String fieldName = property.getFieldName(); - - if (!fieldName.contains(".")) { - return BsonUtils.asMap(this.document).get(fieldName); - } - - Iterator parts = Arrays.asList(fieldName.split("\\.")).iterator(); - Map source = BsonUtils.asMap(this.document); - Object result = null; - - while (source != null && parts.hasNext()) { - - result = source.get(parts.next()); - - if (parts.hasNext()) { - source = getAsMap(result); - } - } - - return result; + return BsonUtils.resolveValue(document, getFieldName(property)); } /** - * Returns whether the underlying {@link Document} has a value ({@literal null} or non-{@literal null}) for the given - * {@link MongoPersistentProperty}. + * Returns the raw identifier for the given {@link MongoPersistentEntity} or the value of the default identifier + * field. * - * @param property must not be {@literal null}. + * @param entity must not be {@literal null}. * @return */ - public boolean hasValue(MongoPersistentProperty property) { - - Assert.notNull(property, "Property must not be null!"); - - String fieldName = property.getFieldName(); - - if (!fieldName.contains(".")) { - - if (this.document instanceof Document) { - return ((Document) this.document).containsKey(fieldName); - } - - if (this.document instanceof DBObject) { - return ((DBObject) this.document).containsField(fieldName); - } - } - - String[] parts = fieldName.split("\\."); - Map source; - - if (this.document instanceof Document) { - source = ((Document) this.document); - } else { - source = ((DBObject) this.document).toMap(); - } - - Object result = null; - - for (int i = 1; i < parts.length; i++) { - - result = source.get(parts[i - 1]); - source = getAsMap(result); - - if (source == null) { - return false; - } - } - - return source.containsKey(parts[parts.length - 1]); + @Nullable + public Object getRawId(MongoPersistentEntity entity) { + return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.get(document, FieldName.ID.name()); } /** - * Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise. + * Returns whether the underlying {@link Document} has a value ({@literal null} or non-{@literal null}) for the given + * {@link MongoPersistentProperty}. * - * @param source can be {@literal null}. - * @return + * @param property must not be {@literal null}. + * @return {@literal true} if no non {@literal null} value present. */ - @Nullable @SuppressWarnings("unchecked") - private static Map getAsMap(Object source) { - - if (source instanceof Document) { - return (Document) source; - } + public boolean hasValue(MongoPersistentProperty property) { - if (source instanceof BasicDBObject) { - return (BasicDBObject) source; - } + Assert.notNull(property, "Property must not be null"); - if (source instanceof Map) { - return (Map) source; - } + return BsonUtils.hasValue(document, getFieldName(property)); + } - return null; + FieldName getFieldName(MongoPersistentProperty prop) { + return prop.getMongoField().getName(); } /** @@ -209,8 +167,8 @@ private static Document getOrCreateNestedDocument(String key, Bson source) { Object existing = BsonUtils.asMap(source).get(key); - if (existing instanceof Document) { - return (Document) existing; + if (existing instanceof Document document) { + return document; } Document nested = new Document(); @@ -218,4 +176,5 @@ private static Document getOrCreateNestedDocument(String key, Bson source) { return nested; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java new file mode 100644 index 0000000000..8429584a6f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -0,0 +1,263 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.WeakHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.springframework.core.convert.ConversionService; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Reference; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PersistentPropertyPathAccessor; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +/** + * Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy}, + * registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter}, + * simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query. + * + * @author Christoph Strobl + * @since 3.3 + */ +class DocumentPointerFactory { + + private final ConversionService conversionService; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final Map cache; + + /** + * A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of + * {'_id' : ?#{#target} }. + */ + private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt) + "['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id" + "?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces + "['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression + "\\s*}"); // some optional whitespaces and document close + + DocumentPointerFactory(ConversionService conversionService, + MappingContext, MongoPersistentProperty> mappingContext) { + + this.conversionService = conversionService; + this.mappingContext = mappingContext; + this.cache = new WeakHashMap<>(); + } + + DocumentPointer computePointer( + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentProperty property, Object value, Class typeHint) { + + if (value instanceof LazyLoadingProxy proxy) { + return proxy::getSource; + } + + if (conversionService.canConvert(typeHint, DocumentPointer.class)) { + return conversionService.convert(value, DocumentPointer.class); + } + + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(property.getAssociationTargetType()); + + if (usesDefaultLookup(property)) { + + MongoPersistentProperty idProperty = persistentEntity.getIdProperty(); + Object idValue = persistentEntity.getIdentifierAccessor(value).getIdentifier(); + + if (idProperty.hasExplicitWriteTarget() + && conversionService.canConvert(idValue.getClass(), idProperty.getFieldType())) { + return () -> conversionService.convert(idValue, idProperty.getFieldType()); + } + + if (idValue instanceof String stringValue && ObjectId.isValid((String) idValue)) { + return () -> new ObjectId(stringValue); + } + + return () -> idValue; + } + + MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); + PersistentPropertyAccessor propertyAccessor; + if (valueEntity == null) { + propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value); + } else { + propertyAccessor = valueEntity.getPropertyPathAccessor(value); + } + + return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from) + .getDocumentPointer(mappingContext, persistentEntity, propertyAccessor); + } + + private boolean usesDefaultLookup(MongoPersistentProperty property) { + + if (property.isDocumentReference()) { + return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches(); + } + + Reference atReference = property.findAnnotation(Reference.class); + if (atReference != null) { + return true; + } + + throw new IllegalStateException(String.format("%s does not seem to be define Reference", property)); + } + + /** + * Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and + * inverting it. + * + *
          +	 * // source
          +	 * { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
          +	 *
          +	 * // target
          +	 * { 'fn' : ..., 'ln' : ... }
          +	 * 
          + * + * The actual pointer is the computed via + * {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from + * the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions + * from the source. + */ + static class LinkageDocument { + + static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?[\\w\\d\\.\\-)]*)\\}"); + static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?\\d*)_###"); + + private final String lookup; + private final org.bson.Document documentPointer; + private final Map placeholderMap; + private final boolean isSimpleTargetPointer; + + static LinkageDocument from(String lookup) { + return new LinkageDocument(lookup); + } + + private LinkageDocument(String lookup) { + + this.lookup = lookup; + this.placeholderMap = new LinkedHashMap<>(); + + int index = 0; + Matcher matcher = EXPRESSION_PATTERN.matcher(lookup); + String targetLookup = lookup; + + while (matcher.find()) { + + String expression = matcher.group(); + String fieldName = matcher.group("fieldName").replace("target.", ""); + + String placeholder = placeholder(index); + placeholderMap.put(placeholder, fieldName); + targetLookup = targetLookup.replace(expression, "'" + placeholder + "'"); + index++; + } + + this.documentPointer = org.bson.Document.parse(targetLookup); + this.isSimpleTargetPointer = placeholderMap.size() == 1 && placeholderMap.containsValue("target") + && lookup.contains("#target"); + } + + private String placeholder(int index) { + return "###_" + index + "_###"; + } + + private boolean isPlaceholder(String key) { + return PLACEHOLDER_PATTERN.matcher(key).matches(); + } + + DocumentPointer getDocumentPointer( + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity, + propertyAccessor); + } + + Object updatePlaceholders(org.bson.Document source, org.bson.Document target, + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + + for (Entry entry : source.entrySet()) { + + if (entry.getKey().startsWith("$")) { + throw new InvalidDataAccessApiUsageException(String.format( + "Cannot derive document pointer from lookup '%s' using query operator (%s); Please consider registering a custom converter", + lookup, entry.getKey())); + } + + if (entry.getValue() instanceof Document document) { + + MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey()); + if (persistentProperty != null && persistentProperty.isEntity()) { + + MongoPersistentEntity nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType()); + target.put(entry.getKey(), updatePlaceholders(document, new Document(), mappingContext, + nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty)))); + } else { + target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext, + persistentEntity, propertyAccessor)); + } + continue; + } + + if (placeholderMap.containsKey(entry.getValue())) { + + String attribute = placeholderMap.get(entry.getValue()); + if (attribute.contains(".")) { + attribute = attribute.substring(attribute.lastIndexOf('.') + 1); + } + + String fieldName = entry.getKey().equals(FieldName.ID.name()) ? "id" : entry.getKey(); + if (!fieldName.contains(".")) { + + Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName)); + target.put(attribute, targetValue); + continue; + } + + PersistentPropertyPathAccessor propertyPathAccessor = persistentEntity + .getPropertyPathAccessor(propertyAccessor.getBean()); + PersistentPropertyPath path = mappingContext + .getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation())); + Object targetValue = propertyPathAccessor.getProperty(path); + target.put(attribute, targetValue); + continue; + } + + target.put(entry.getKey(), entry.getValue()); + } + + if (target.size() == 1 && isSimpleTargetPointer) { + return target.values().iterator().next(); + } + + return target; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java index 2aca63081c..ea5ce01b44 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPropertyAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,28 +34,16 @@ class DocumentPropertyAccessor extends MapAccessor { static final MapAccessor INSTANCE = new DocumentPropertyAccessor(); - /* - * (non-Javadoc) - * @see org.springframework.context.expression.MapAccessor#getSpecificTargetClasses() - */ @Override public Class[] getSpecificTargetClasses() { return new Class[] { Document.class }; } - /* - * (non-Javadoc) - * @see org.springframework.context.expression.MapAccessor#canRead(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String) - */ @Override public boolean canRead(EvaluationContext context, @Nullable Object target, String name) { return true; } - /* - * (non-Javadoc) - * @see org.springframework.context.expression.MapAccessor#read(org.springframework.expression.EvaluationContext, java.lang.Object, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public TypedValue read(EvaluationContext context, @Nullable Object target, String name) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java new file mode 100644 index 0000000000..bf21781058 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java @@ -0,0 +1,84 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.lang.Nullable; + +/** + * The source object to resolve document references upon. Encapsulates the actual source and the reference specific + * values. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentReferenceSource { + + private final Object self; + + private final @Nullable Object targetSource; + + /** + * Create a new instance of {@link DocumentReferenceSource}. + * + * @param self the entire wrapper object holding references. Must not be {@literal null}. + * @param targetSource the reference value source. + */ + DocumentReferenceSource(Object self, @Nullable Object targetSource) { + + this.self = self; + this.targetSource = targetSource; + } + + /** + * Get the outer document. + * + * @return never {@literal null}. + */ + public Object getSelf() { + return self; + } + + /** + * Get the actual (property specific) reference value. + * + * @return can be {@literal null}. + */ + @Nullable + public Object getTargetSource() { + return targetSource; + } + + /** + * Dereference a {@code targetSource} if it is a {@link DocumentReferenceSource} or return {@code source} otherwise. + * + * @param source + * @return + */ + @Nullable + static Object getTargetSource(Object source) { + return source instanceof DocumentReferenceSource referenceSource ? referenceSource.getTargetSource() : source; + } + + /** + * Dereference a {@code self} object if it is a {@link DocumentReferenceSource} or return {@code self} otherwise. + * + * @param self + * @return + */ + static Object getSelf(Object self) { + return self instanceof DocumentReferenceSource referenceSource ? referenceSource.getSelf() : self; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java index e75082053d..2bca260b79 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/GeoConverters.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,16 @@ */ package org.springframework.data.mongodb.core.convert; +import java.text.Collator; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Map; +import java.util.TreeMap; import org.bson.Document; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.convert.ReadingConverter; import org.springframework.data.convert.WritingConverter; @@ -45,7 +49,7 @@ import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; -import com.mongodb.BasicDBList; +import com.mongodb.Function; /** * Wrapper class to contain useful geo structure converters for the usage with Mongo. @@ -56,8 +60,28 @@ * @author Thiago Diniz da Silveira * @since 1.5 */ +@SuppressWarnings("ConstantConditions") abstract class GeoConverters { + private final static Map>> converters; + + static { + + Collator caseInsensitive = Collator.getInstance(); + caseInsensitive.setStrength(Collator.PRIMARY); + + Map>> geoConverters = new TreeMap<>(caseInsensitive); + geoConverters.put("point", DocumentToGeoJsonPointConverter.INSTANCE::convert); + geoConverters.put("multipoint", DocumentToGeoJsonMultiPointConverter.INSTANCE::convert); + geoConverters.put("linestring", DocumentToGeoJsonLineStringConverter.INSTANCE::convert); + geoConverters.put("multilinestring", DocumentToGeoJsonMultiLineStringConverter.INSTANCE::convert); + geoConverters.put("polygon", DocumentToGeoJsonPolygonConverter.INSTANCE::convert); + geoConverters.put("multipolygon", DocumentToGeoJsonMultiPolygonConverter.INSTANCE::convert); + geoConverters.put("geometrycollection", DocumentToGeoJsonGeometryCollectionConverter.INSTANCE::convert); + + converters = geoConverters; + } + /** * Private constructor to prevent instantiation. */ @@ -66,9 +90,8 @@ private GeoConverters() {} /** * Returns the geo converters to be registered. * - * @return + * @return never {@literal null}. */ - @SuppressWarnings("unchecked") public static Collection getConvertersToRegister() { return Arrays.asList( // BoxToDocumentConverter.INSTANCE // @@ -91,7 +114,8 @@ public static Collection getConvertersToRegister() { , DocumentToGeoJsonMultiLineStringConverter.INSTANCE // , DocumentToGeoJsonMultiPointConverter.INSTANCE // , DocumentToGeoJsonMultiPolygonConverter.INSTANCE // - , DocumentToGeoJsonGeometryCollectionConverter.INSTANCE); + , DocumentToGeoJsonGeometryCollectionConverter.INSTANCE // + , DocumentToGeoJsonConverter.INSTANCE); } /** @@ -101,14 +125,10 @@ public static Collection getConvertersToRegister() { * @since 1.5 */ @ReadingConverter - static enum DocumentToPointConverter implements Converter { + enum DocumentToPointConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Point convert(Document source) { @@ -132,14 +152,10 @@ public Point convert(Document source) { * @author Thomas Darimont * @since 1.5 */ - static enum PointToDocumentConverter implements Converter { + enum PointToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(Point source) { return source == null ? null : new Document("x", source.getX()).append("y", source.getY()); @@ -147,20 +163,16 @@ public Document convert(Point source) { } /** - * Converts a {@link Box} into a {@link BasicDBList}. + * Converts a {@link Box} into a {@link Document}. * * @author Thomas Darimont * @since 1.5 */ @WritingConverter - static enum BoxToDocumentConverter implements Converter { + enum BoxToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(Box source) { @@ -176,20 +188,16 @@ public Document convert(Box source) { } /** - * Converts a {@link BasicDBList} into a {@link Box}. + * Converts a {@link Document} into a {@link Box}. * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DocumentToBoxConverter implements Converter { + enum DocumentToBoxConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Box convert(Document source) { @@ -205,19 +213,15 @@ public Box convert(Document source) { } /** - * Converts a {@link Circle} into a {@link BasicDBList}. + * Converts a {@link Circle} into a {@link Document}. * * @author Thomas Darimont * @since 1.5 */ - static enum CircleToDocumentConverter implements Converter { + enum CircleToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(Circle source) { @@ -240,14 +244,10 @@ public Document convert(Circle source) { * @since 1.5 */ @ReadingConverter - static enum DocumentToCircleConverter implements Converter { + enum DocumentToCircleConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Circle convert(Document source) { @@ -258,15 +258,15 @@ public Circle convert(Document source) { Document center = (Document) source.get("center"); Number radius = (Number) source.get("radius"); - Assert.notNull(center, "Center must not be null!"); - Assert.notNull(radius, "Radius must not be null!"); + Assert.notNull(center, "Center must not be null"); + Assert.notNull(radius, "Radius must not be null"); Distance distance = new Distance(toPrimitiveDoubleValue(radius)); if (source.containsKey("metric")) { String metricString = (String) source.get("metric"); - Assert.notNull(metricString, "Metric must not be null!"); + Assert.notNull(metricString, "Metric must not be null"); distance = distance.in(Metrics.valueOf(metricString)); } @@ -276,19 +276,15 @@ public Circle convert(Document source) { } /** - * Converts a {@link Sphere} into a {@link BasicDBList}. + * Converts a {@link Sphere} into a {@link Document}. * * @author Thomas Darimont * @since 1.5 */ - static enum SphereToDocumentConverter implements Converter { + enum SphereToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(Sphere source) { @@ -305,20 +301,16 @@ public Document convert(Sphere source) { } /** - * Converts a {@link BasicDBList} into a {@link Sphere}. + * Converts a {@link Document} into a {@link Sphere}. * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DocumentToSphereConverter implements Converter { + enum DocumentToSphereConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Sphere convert(Document source) { @@ -329,15 +321,15 @@ public Sphere convert(Document source) { Document center = (Document) source.get("center"); Number radius = (Number) source.get("radius"); - Assert.notNull(center, "Center must not be null!"); - Assert.notNull(radius, "Radius must not be null!"); + Assert.notNull(center, "Center must not be null"); + Assert.notNull(radius, "Radius must not be null"); Distance distance = new Distance(toPrimitiveDoubleValue(radius)); if (source.containsKey("metric")) { String metricString = (String) source.get("metric"); - Assert.notNull(metricString, "Metric must not be null!"); + Assert.notNull(metricString, "Metric must not be null"); distance = distance.in(Metrics.valueOf(metricString)); } @@ -347,19 +339,15 @@ public Sphere convert(Document source) { } /** - * Converts a {@link Polygon} into a {@link BasicDBList}. + * Converts a {@link Polygon} into a {@link Document}. * * @author Thomas Darimont * @since 1.5 */ - static enum PolygonToDocumentConverter implements Converter { + enum PolygonToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(Polygon source) { @@ -368,7 +356,7 @@ public Document convert(Polygon source) { } List points = source.getPoints(); - List pointTuples = new ArrayList(points.size()); + List pointTuples = new ArrayList<>(points.size()); for (Point point : points) { pointTuples.add(PointToDocumentConverter.INSTANCE.convert(point)); @@ -381,20 +369,16 @@ public Document convert(Polygon source) { } /** - * Converts a {@link BasicDBList} into a {@link Polygon}. + * Converts a {@link Document} into a {@link Polygon}. * * @author Thomas Darimont * @since 1.5 */ @ReadingConverter - static enum DocumentToPolygonConverter implements Converter { + enum DocumentToPolygonConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override @SuppressWarnings({ "unchecked" }) public Polygon convert(Document source) { @@ -404,11 +388,11 @@ public Polygon convert(Document source) { } List points = (List) source.get("points"); - List newPoints = new ArrayList(points.size()); + List newPoints = new ArrayList<>(points.size()); for (Document element : points) { - Assert.notNull(element, "Point elements of polygon must not be null!"); + Assert.notNull(element, "Point elements of polygon must not be null"); newPoints.add(DocumentToPointConverter.INSTANCE.convert(element)); } @@ -417,19 +401,15 @@ public Polygon convert(Document source) { } /** - * Converts a {@link Sphere} into a {@link BasicDBList}. + * Converts a {@link Sphere} into a {@link Document}. * * @author Thomas Darimont * @since 1.5 */ - static enum GeoCommandToDocumentConverter implements Converter { + enum GeoCommandToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override @SuppressWarnings("rawtypes") public Document convert(GeoCommand source) { @@ -438,39 +418,36 @@ public Document convert(GeoCommand source) { return null; } - List argument = new ArrayList(); + List argument = new ArrayList<>(2); Shape shape = source.getShape(); - if (shape instanceof GeoJson) { - return GeoJsonToDocumentConverter.INSTANCE.convert((GeoJson) shape); + if (shape instanceof GeoJson geoJson) { + return GeoJsonToDocumentConverter.INSTANCE.convert(geoJson); } - if (shape instanceof Box) { - - argument.add(toList(((Box) shape).getFirst())); - argument.add(toList(((Box) shape).getSecond())); + if (shape instanceof Box box) { - } else if (shape instanceof Circle) { + argument.add(toList(box.getFirst())); + argument.add(toList(box.getSecond())); - argument.add(toList(((Circle) shape).getCenter())); - argument.add(((Circle) shape).getRadius().getNormalizedValue()); + } else if (shape instanceof Circle circle) { - } else if (shape instanceof Circle) { + argument.add(toList(circle.getCenter())); + argument.add(circle.getRadius().getNormalizedValue()); - argument.add(toList(((Circle) shape).getCenter())); - argument.add(((Circle) shape).getRadius()); + } else if (shape instanceof Polygon polygon) { - } else if (shape instanceof Polygon) { - - for (Point point : ((Polygon) shape).getPoints()) { + List points = polygon.getPoints(); + argument = new ArrayList<>(points.size()); + for (Point point : points) { argument.add(toList(point)); } - } else if (shape instanceof Sphere) { + } else if (shape instanceof Sphere sphere) { - argument.add(toList(((Sphere) shape).getCenter())); - argument.add(((Sphere) shape).getRadius().getNormalizedValue()); + argument.add(toList(sphere.getCenter())); + argument.add(sphere.getRadius().getNormalizedValue()); } return new Document(source.getCommand(), argument); @@ -481,17 +458,12 @@ public Document convert(GeoCommand source) { * @author Christoph Strobl * @since 1.7 */ - @SuppressWarnings("rawtypes") - static enum GeoJsonToDocumentConverter implements Converter { + enum GeoJsonToDocumentConverter implements Converter, Document> { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override - public Document convert(GeoJson source) { + public Document convert(GeoJson source) { if (source == null) { return null; @@ -499,42 +471,42 @@ public Document convert(GeoJson source) { Document dbo = new Document("type", source.getType()); - if (source instanceof GeoJsonGeometryCollection) { + if (source instanceof GeoJsonGeometryCollection collection) { - List dbl = new ArrayList(); + List dbl = new ArrayList<>(); - for (GeoJson geometry : ((GeoJsonGeometryCollection) source).getCoordinates()) { + for (GeoJson geometry : collection.getCoordinates()) { dbl.add(convert(geometry)); } dbo.put("geometries", dbl); } else { - dbo.put("coordinates", convertIfNecessarry(source.getCoordinates())); + dbo.put("coordinates", convertIfNecessary(source.getCoordinates())); } return dbo; } - private Object convertIfNecessarry(Object candidate) { + private Object convertIfNecessary(Object candidate) { - if (candidate instanceof GeoJson) { - return convertIfNecessarry(((GeoJson) candidate).getCoordinates()); + if (candidate instanceof GeoJson geoJson) { + return convertIfNecessary(geoJson.getCoordinates()); } - if (candidate instanceof Iterable) { + if (candidate instanceof Iterable iterable) { - List dbl = new ArrayList(); + List dbl = new ArrayList<>(); - for (Object element : (Iterable) candidate) { - dbl.add(convertIfNecessarry(element)); + for (Object element : iterable) { + dbl.add(convertIfNecessary(element)); } return dbl; } - if (candidate instanceof Point) { - return toList((Point) candidate); + if (candidate instanceof Point point) { + return toList(point); } return candidate; @@ -545,14 +517,10 @@ private Object convertIfNecessarry(Object candidate) { * @author Christoph Strobl * @since 1.7 */ - static enum GeoJsonPointToDocumentConverter implements Converter { + enum GeoJsonPointToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(GeoJsonPoint source) { return GeoJsonToDocumentConverter.INSTANCE.convert(source); @@ -563,14 +531,10 @@ public Document convert(GeoJsonPoint source) { * @author Christoph Strobl * @since 1.7 */ - static enum GeoJsonPolygonToDocumentConverter implements Converter { + enum GeoJsonPolygonToDocumentConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Document convert(GeoJsonPolygon source) { return GeoJsonToDocumentConverter.INSTANCE.convert(source); @@ -581,14 +545,10 @@ public Document convert(GeoJsonPolygon source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonPointConverter implements Converter { + enum DocumentToGeoJsonPointConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override @SuppressWarnings("unchecked") public GeoJsonPoint convert(Document source) { @@ -598,7 +558,7 @@ public GeoJsonPoint convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Point"), - String.format("Cannot convert type '%s' to Point.", source.get("type"))); + String.format("Cannot convert type '%s' to Point", source.get("type"))); List dbl = (List) source.get("coordinates"); return new GeoJsonPoint(toPrimitiveDoubleValue(dbl.get(0)), toPrimitiveDoubleValue(dbl.get(1))); @@ -609,14 +569,10 @@ public GeoJsonPoint convert(Document source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonPolygonConverter implements Converter { + enum DocumentToGeoJsonPolygonConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public GeoJsonPolygon convert(Document source) { @@ -625,9 +581,9 @@ public GeoJsonPolygon convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "Polygon"), - String.format("Cannot convert type '%s' to Polygon.", source.get("type"))); + String.format("Cannot convert type '%s' to Polygon", source.get("type"))); - return toGeoJsonPolygon((List) source.get("coordinates")); + return toGeoJsonPolygon((List) source.get("coordinates")); } } @@ -635,14 +591,10 @@ public GeoJsonPolygon convert(Document source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonMultiPolygonConverter implements Converter { + enum DocumentToGeoJsonMultiPolygonConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public GeoJsonMultiPolygon convert(Document source) { @@ -651,13 +603,13 @@ public GeoJsonMultiPolygon convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiPolygon"), - String.format("Cannot convert type '%s' to MultiPolygon.", source.get("type"))); + String.format("Cannot convert type '%s' to MultiPolygon", source.get("type"))); - List dbl = (List) source.get("coordinates"); - List polygones = new ArrayList(); + List dbl = (List) source.get("coordinates"); + List polygones = new ArrayList<>(); for (Object polygon : dbl) { - polygones.add(toGeoJsonPolygon((List) polygon)); + polygones.add(toGeoJsonPolygon((List) polygon)); } return new GeoJsonMultiPolygon(polygones); @@ -668,14 +620,10 @@ public GeoJsonMultiPolygon convert(Document source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonLineStringConverter implements Converter { + enum DocumentToGeoJsonLineStringConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public GeoJsonLineString convert(Document source) { @@ -684,9 +632,9 @@ public GeoJsonLineString convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "LineString"), - String.format("Cannot convert type '%s' to LineString.", source.get("type"))); + String.format("Cannot convert type '%s' to LineString", source.get("type"))); - List cords = (List) source.get("coordinates"); + List cords = (List) source.get("coordinates"); return new GeoJsonLineString(toListOfPoint(cords)); } @@ -696,14 +644,10 @@ public GeoJsonLineString convert(Document source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonMultiPointConverter implements Converter { + enum DocumentToGeoJsonMultiPointConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public GeoJsonMultiPoint convert(Document source) { @@ -712,9 +656,9 @@ public GeoJsonMultiPoint convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiPoint"), - String.format("Cannot convert type '%s' to MultiPoint.", source.get("type"))); + String.format("Cannot convert type '%s' to MultiPoint", source.get("type"))); - List cords = (List) source.get("coordinates"); + List cords = (List) source.get("coordinates"); return new GeoJsonMultiPoint(toListOfPoint(cords)); } @@ -724,14 +668,10 @@ public GeoJsonMultiPoint convert(Document source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonMultiLineStringConverter implements Converter { + enum DocumentToGeoJsonMultiLineStringConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public GeoJsonMultiLineString convert(Document source) { @@ -740,13 +680,13 @@ public GeoJsonMultiLineString convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "MultiLineString"), - String.format("Cannot convert type '%s' to MultiLineString.", source.get("type"))); + String.format("Cannot convert type '%s' to MultiLineString", source.get("type"))); - List lines = new ArrayList(); - List cords = (List) source.get("coordinates"); + List lines = new ArrayList<>(); + List cords = (List) source.get("coordinates"); for (Object line : cords) { - lines.add(new GeoJsonLineString(toListOfPoint((List) line))); + lines.add(new GeoJsonLineString(toListOfPoint((List) line))); } return new GeoJsonMultiLineString(lines); } @@ -756,14 +696,10 @@ public GeoJsonMultiLineString convert(Document source) { * @author Christoph Strobl * @since 1.7 */ - static enum DocumentToGeoJsonGeometryCollectionConverter implements Converter { + enum DocumentToGeoJsonGeometryCollectionConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @SuppressWarnings("rawtypes") @Override public GeoJsonGeometryCollection convert(Document source) { @@ -773,43 +709,14 @@ public GeoJsonGeometryCollection convert(Document source) { } Assert.isTrue(ObjectUtils.nullSafeEquals(source.get("type"), "GeometryCollection"), - String.format("Cannot convert type '%s' to GeometryCollection.", source.get("type"))); + String.format("Cannot convert type '%s' to GeometryCollection", source.get("type"))); - List> geometries = new ArrayList>(); + List> geometries = new ArrayList<>(); for (Object o : (List) source.get("geometries")) { - geometries.add(convertGeometries((Document) o)); - } - return new GeoJsonGeometryCollection(geometries); - - } - - private static GeoJson convertGeometries(Document source) { - - Object type = source.get("type"); - if (ObjectUtils.nullSafeEquals(type, "Point")) { - return DocumentToGeoJsonPointConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "MultiPoint")) { - return DocumentToGeoJsonMultiPointConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "LineString")) { - return DocumentToGeoJsonLineStringConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "MultiLineString")) { - return DocumentToGeoJsonMultiLineStringConverter.INSTANCE.convert(source); - } - - if (ObjectUtils.nullSafeEquals(type, "Polygon")) { - return DocumentToGeoJsonPolygonConverter.INSTANCE.convert(source); - } - if (ObjectUtils.nullSafeEquals(type, "MultiPolygon")) { - return DocumentToGeoJsonMultiPolygonConverter.INSTANCE.convert(source); + geometries.add(toGenericGeoJson((Document) o)); } - throw new IllegalArgumentException(String.format("Cannot convert unknown GeoJson type %s", type)); + return new GeoJsonGeometryCollection(geometries); } } @@ -818,16 +725,16 @@ static List toList(Point point) { } /** - * Converts a coordinate pairs nested in in {@link BasicDBList} into {@link GeoJsonPoint}s. + * Converts a coordinate pairs nested in {@link List} into {@link GeoJsonPoint}s. * - * @param listOfCoordinatePairs - * @return + * @param listOfCoordinatePairs must not be {@literal null}. + * @return never {@literal null}. * @since 1.7 */ @SuppressWarnings("unchecked") - static List toListOfPoint(List listOfCoordinatePairs) { + static List toListOfPoint(List listOfCoordinatePairs) { - List points = new ArrayList(); + List points = new ArrayList<>(listOfCoordinatePairs.size()); for (Object point : listOfCoordinatePairs) { @@ -842,19 +749,54 @@ static List toListOfPoint(List listOfCoordinatePairs) { } /** - * Converts a coordinate pairs nested in in {@link BasicDBList} into {@link GeoJsonPolygon}. + * Converts a coordinate pairs nested in {@link List} into {@link GeoJsonPolygon}. * - * @param dbList - * @return + * @param dbList must not be {@literal null}. + * @return never {@literal null}. * @since 1.7 */ - static GeoJsonPolygon toGeoJsonPolygon(List dbList) { - return new GeoJsonPolygon(toListOfPoint((List) dbList.get(0))); + static GeoJsonPolygon toGeoJsonPolygon(List dbList) { + + GeoJsonPolygon polygon = new GeoJsonPolygon(toListOfPoint((List) dbList.get(0))); + return dbList.size() > 1 ? polygon.withInnerRing(toListOfPoint((List) dbList.get(1))) : polygon; + } + + /** + * Converter implementation transforming a {@link Document} into a concrete {@link GeoJson} based on the embedded + * {@literal type} information. + * + * @since 2.1 + * @author Christoph Strobl + */ + @ReadingConverter + enum DocumentToGeoJsonConverter implements Converter> { + INSTANCE; + + @Override + public GeoJson convert(Document source) { + return toGenericGeoJson(source); + } + } + + private static GeoJson toGenericGeoJson(Document source) { + + String type = source.get("type", String.class); + + if (type != null) { + + Function> converter = converters.get(type); + + if (converter != null) { + return converter.apply(source); + } + } + + throw new IllegalArgumentException(String.format("No converter found capable of converting GeoJson type %s", type)); } private static double toPrimitiveDoubleValue(Object value) { - Assert.isInstanceOf(Number.class, value, "Argument must be a Number."); - return NumberUtils.convertNumberToTargetClass((Number) value, Double.class).doubleValue(); + Assert.isInstanceOf(Number.class, value, "Argument must be a Number"); + return NumberUtils.convertNumberToTargetClass((Number) value, Double.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java index 1530221d3c..0afba67025 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/JsonSchemaMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java index 455a02801d..77aac55813 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.data.mongodb.core.convert; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; import org.springframework.lang.Nullable; import com.mongodb.DBRef; /** - * Allows direct interaction with the underlying {@link LazyLoadingInterceptor}. + * Allows direct interaction with the underlying {@code LazyLoadingInterceptor}. * * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch * @since 1.5 + * @see LazyLoadingProxyFactory */ public interface LazyLoadingProxy { @@ -46,4 +46,15 @@ public interface LazyLoadingProxy { */ @Nullable DBRef toDBRef(); + + /** + * Returns the raw {@literal source} object that defines the reference. + * + * @return can be {@literal null}. + * @since 3.3 + */ + @Nullable + default Object getSource() { + return toDBRef(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java new file mode 100644 index 0000000000..76539ea431 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java @@ -0,0 +1,386 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.springframework.util.ReflectionUtils.*; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.reflect.Method; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Supplier; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.cglib.core.SpringNamingPolicy; +import org.springframework.cglib.proxy.Callback; +import org.springframework.cglib.proxy.Enhancer; +import org.springframework.cglib.proxy.Factory; +import org.springframework.cglib.proxy.MethodProxy; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.LazyLoadingException; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lock; +import org.springframework.data.util.Lock.AcquiredLock; +import org.springframework.lang.Nullable; +import org.springframework.objenesis.SpringObjenesis; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.DBRef; + +/** + * {@link ProxyFactory} to create a proxy for {@link MongoPersistentProperty#getType()} to resolve a reference lazily. + * NOTE: This class is intended for internal usage only. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +public final class LazyLoadingProxyFactory { + + private static final Log LOGGER = LogFactory.getLog(LazyLoadingProxyFactory.class); + + private final SpringObjenesis objenesis; + + private final PersistenceExceptionTranslator exceptionTranslator; + + private LazyLoadingProxyFactory() { + this(ex -> null); + } + + public LazyLoadingProxyFactory(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + this.objenesis = new SpringObjenesis(null); + } + + /** + * Predict the proxy target type. This will advice the infrastructure to resolve as many pieces as possible in a + * potential AOT scenario without necessarily resolving the entire object. + * + * @param propertyType the type to proxy + * @param interceptor the interceptor to be added. + * @return the proxy type. + * @since 4.0 + */ + public static Class resolveProxyType(Class propertyType, Supplier interceptor) { + + LazyLoadingProxyFactory factory = new LazyLoadingProxyFactory(); + + if (!propertyType.isInterface()) { + return factory.getEnhancedTypeFor(propertyType); + } + + return factory.prepareProxyFactory(propertyType, interceptor) + .getProxyClass(LazyLoadingProxy.class.getClassLoader()); + } + + /** + * Create the {@link ProxyFactory} for the given type, already adding required additional interfaces. + * + * @param targetType the type to proxy. + * @return the prepared {@link ProxyFactory}. + * @since 4.0.5 + */ + public static ProxyFactory prepareFactory(Class targetType) { + + ProxyFactory proxyFactory = new ProxyFactory(); + + for (Class type : targetType.getInterfaces()) { + proxyFactory.addInterface(type); + } + + proxyFactory.addInterface(LazyLoadingProxy.class); + proxyFactory.addInterface(targetType); + + return proxyFactory; + } + + private ProxyFactory prepareProxyFactory(Class propertyType, Supplier interceptor) { + + ProxyFactory proxyFactory = prepareFactory(propertyType); + proxyFactory.addAdvice(interceptor.get()); + + return proxyFactory; + } + + public Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback, + Object source) { + + Class propertyType = property.getType(); + LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, callback, source, exceptionTranslator); + + if (!propertyType.isInterface()) { + + Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType)); + factory.setCallbacks(new Callback[] { interceptor }); + + return factory; + } + + return prepareProxyFactory(propertyType, + () -> new LazyLoadingInterceptor(property, callback, source, exceptionTranslator)) + .getProxy(LazyLoadingProxy.class.getClassLoader()); + } + + /** + * Returns the CGLib enhanced type for the given source type. + * + * @param type + * @return + */ + private Class getEnhancedTypeFor(Class type) { + + Enhancer enhancer = new Enhancer(); + enhancer.setSuperclass(type); + enhancer.setCallbackType(LazyLoadingInterceptor.class); + enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class }); + enhancer.setNamingPolicy(SpringNamingPolicy.INSTANCE); + enhancer.setAttemptLoad(true); + + return enhancer.createClass(); + } + + public static class LazyLoadingInterceptor + implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable { + + private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD; + + static { + try { + INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); + TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); + FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize"); + GET_SOURCE_METHOD = LazyLoadingProxy.class.getMethod("getSource"); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private final ReadWriteLock rwLock = new ReentrantReadWriteLock(); + private final Lock readLock = Lock.of(rwLock.readLock()); + private final Lock writeLock = Lock.of(rwLock.writeLock()); + + private final MongoPersistentProperty property; + private final DbRefResolverCallback callback; + private final Object source; + private final PersistenceExceptionTranslator exceptionTranslator; + private volatile boolean resolved; + private @Nullable Object result; + + /** + * @return a {@link LazyLoadingInterceptor} that just continues with the invocation. + * @since 4.0 + */ + public static LazyLoadingInterceptor none() { + + return new LazyLoadingInterceptor(null, null, null, null) { + @Nullable + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); + } + + @Nullable + @Override + public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable { + + ReflectionUtils.makeAccessible(method); + return method.invoke(o, args); + } + }; + } + + public LazyLoadingInterceptor(MongoPersistentProperty property, DbRefResolverCallback callback, Object source, + PersistenceExceptionTranslator exceptionTranslator) { + + this.property = property; + this.callback = callback; + this.source = source; + this.exceptionTranslator = exceptionTranslator; + } + + @Nullable + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); + } + + @Nullable + @Override + public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable { + + if (INITIALIZE_METHOD.equals(method)) { + return ensureResolved(); + } + + if (TO_DBREF_METHOD.equals(method)) { + return source instanceof DBRef ? source : null; + } + + if (GET_SOURCE_METHOD.equals(method)) { + return source; + } + + if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { + + if (ReflectionUtils.isToStringMethod(method)) { + return proxyToString(source); + } + + if (ReflectionUtils.isEqualsMethod(method)) { + return proxyEquals(o, args[0]); + } + + if (ReflectionUtils.isHashCodeMethod(method)) { + return proxyHashCode(); + } + + // DATAMONGO-1076 - finalize methods should not trigger proxy initialization + if (FINALIZE_METHOD.equals(method)) { + return null; + } + } + + Object target = ensureResolved(); + + if (target == null) { + return null; + } + + ReflectionUtils.makeAccessible(method); + + return method.invoke(target, args); + } + + @Nullable + private Object ensureResolved() { + + if (!resolved) { + this.result = resolve(); + this.resolved = true; + } + + return this.result; + } + + private String proxyToString(@Nullable Object source) { + + StringBuilder description = new StringBuilder(); + if (source != null) { + if (source instanceof DBRef dbRef) { + description.append(dbRef.getCollectionName()); + description.append(":"); + description.append(dbRef.getId()); + } else { + description.append(source); + } + } else { + description.append(0); + } + description.append("$").append(LazyLoadingProxy.class.getSimpleName()); + + return description.toString(); + } + + private boolean proxyEquals(@Nullable Object proxy, Object that) { + + if (!(that instanceof LazyLoadingProxy)) { + return false; + } + + if (that == proxy) { + return true; + } + + return proxyToString(proxy).equals(that.toString()); + } + + private int proxyHashCode() { + return proxyToString(source).hashCode(); + } + + /** + * Callback method for serialization. + * + * @param out + * @throws IOException + */ + private void writeObject(ObjectOutputStream out) throws IOException { + + ensureResolved(); + out.writeObject(this.result); + } + + /** + * Callback method for deserialization. + * + * @param in + * @throws IOException + */ + private void readObject(ObjectInputStream in) throws IOException { + + try { + this.resolved = true; + this.result = in.readObject(); + } catch (ClassNotFoundException e) { + throw new LazyLoadingException("Could not deserialize result", e); + } + } + + @Nullable + private Object resolve() { + + try (AcquiredLock l = readLock.lock()) { + if (resolved) { + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Accessing already resolved lazy loading property %s.%s", + property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName())); + } + return result; + } + } + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Resolving lazy loading property %s.%s", + property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName())); + } + + try { + return writeLock.execute(() -> callback.resolve(property)); + } catch (RuntimeException ex) { + + DataAccessException translatedException = exceptionTranslator.translateExceptionIfPossible(ex); + + if (translatedException instanceof ClientSessionException) { + throw new LazyLoadingException("Unable to lazily resolve DBRef; Invalid session state", ex); + } + + throw new LazyLoadingException("Unable to lazily resolve DBRef", + translatedException != null ? translatedException : ex); + } + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index d51a65075a..864cc1c3e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.convert; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -23,48 +25,82 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Optional; import java.util.Set; +import java.util.function.BiPredicate; +import java.util.stream.Collectors; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.configuration.CodecRegistry; import org.bson.conversions.Bson; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.bson.json.JsonReader; +import org.bson.types.ObjectId; + import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.CollectionFactory; import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.data.convert.EntityInstantiator; +import org.springframework.core.env.Environment; +import org.springframework.core.env.EnvironmentCapable; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.annotation.Reference; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.PropertyValueConverter; import org.springframework.data.convert.TypeMapper; +import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.mapping.Association; +import org.springframework.data.mapping.InstanceCreatorMetadata; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.Parameter; +import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PreferredConstructor.Parameter; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.CachingValueExpressionEvaluatorFactory; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; -import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator; +import org.springframework.data.mapping.model.EntityInstantiator; import org.springframework.data.mapping.model.ParameterValueProvider; import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider; import org.springframework.data.mapping.model.PropertyValueProvider; import org.springframework.data.mapping.model.SpELContext; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionParameterValueProvider; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.mapping.Unwrapped.OnEmpty; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.util.Predicates; import org.springframework.data.util.TypeInformation; +import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; @@ -82,24 +118,59 @@ * @author Christoph Strobl * @author Jordi Llach * @author Mark Paluch + * @author Roman Puchkovskiy + * @author Heesu Jung + * @author Divya Srivastava + * @author Julia Lee */ -public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware, ValueResolver { +public class MappingMongoConverter extends AbstractMongoConverter + implements ApplicationContextAware, EnvironmentCapable { + + private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s; Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions; Parent object was: %4$s"; + private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter"; + + private static final BiPredicate, MongoPersistentProperty> PROPERTY_FILTER = (e, + property) -> { + + if (e.isIdProperty(property)) { + return false; + } + + if (e.isCreatorArgument(property)) { + return false; + } + + if (!property.isReadable()) { + return false; + } + return true; + }; - private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s"; - private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter!"; + public static final TypeInformation BSON = TypeInformation.of(Bson.class); - protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class); + protected static final Log LOGGER = LogFactory.getLog(MappingMongoConverter.class); protected final MappingContext, MongoPersistentProperty> mappingContext; protected final QueryMapper idMapper; protected final DbRefResolver dbRefResolver; protected final DefaultDbRefProxyHandler dbRefProxyHandler; + protected final ReferenceLookupDelegate referenceLookupDelegate; protected @Nullable ApplicationContext applicationContext; + protected @Nullable Environment environment; protected MongoTypeMapper typeMapper; protected @Nullable String mapKeyDotReplacement = null; + protected @Nullable CodecRegistryProvider codecRegistryProvider; + private MongoTypeMapper defaultTypeMapper; private SpELContext spELContext; + private @Nullable EntityCallbacks entityCallbacks; + private final SpelExpressionParser expressionParser = new SpelExpressionParser(); + private final DocumentPointerFactory documentPointerFactory; + private final SpelAwareProxyProjectionFactory projectionFactory = new SpelAwareProxyProjectionFactory( + expressionParser); + private final CachingValueExpressionEvaluatorFactory expressionEvaluatorFactory = new CachingValueExpressionEvaluatorFactory( + expressionParser, this, o -> spELContext.getEvaluationContext(o)); /** * Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}. @@ -112,29 +183,53 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, super(new DefaultConversionService()); - Assert.notNull(dbRefResolver, "DbRefResolver must not be null!"); - Assert.notNull(mappingContext, "MappingContext must not be null!"); + Assert.notNull(dbRefResolver, "DbRefResolver must not be null"); + Assert.notNull(mappingContext, "MappingContext must not be null"); this.dbRefResolver = dbRefResolver; + this.mappingContext = mappingContext; - this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext); + this.defaultTypeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext, + this::getWriteTarget); this.idMapper = new QueryMapper(this); this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE); - this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext, MappingMongoConverter.this); + this.dbRefProxyHandler = new DefaultDbRefProxyHandler(mappingContext, (prop, bson, evaluator, path) -> { + + ConversionContext context = getConversionContext(path); + return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); + }, expressionEvaluatorFactory::create); + + this.referenceLookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext); + this.documentPointerFactory = new DocumentPointerFactory(conversionService, mappingContext); + } + + /** + * Creates a new {@link ConversionContext} given {@link ObjectPath}. + * + * @param path the current {@link ObjectPath}, must not be {@literal null}. + * @return the {@link ConversionContext}. + */ + protected ConversionContext getConversionContext(ObjectPath path) { + + Assert.notNull(path, "ObjectPath must not be null"); + + return new DefaultConversionContext(this, conversions, path, this::readDocument, this::readCollectionOrArray, + this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead); } /** - * Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}. + * Creates a new {@link MappingMongoConverter} given the new {@link MongoDatabaseFactory} and {@link MappingContext}. * * @deprecated use the constructor taking a {@link DbRefResolver} instead. * @param mongoDbFactory must not be {@literal null}. * @param mappingContext must not be {@literal null}. */ @Deprecated - public MappingMongoConverter(MongoDbFactory mongoDbFactory, + public MappingMongoConverter(MongoDatabaseFactory mongoDbFactory, MappingContext, MongoPersistentProperty> mappingContext) { this(new DefaultDbRefResolver(mongoDbFactory), mappingContext); + setCodecRegistryProvider(mongoDbFactory); } /** @@ -146,25 +241,32 @@ public MappingMongoConverter(MongoDbFactory mongoDbFactory, * @param typeMapper the typeMapper to set. Can be {@literal null}. */ public void setTypeMapper(@Nullable MongoTypeMapper typeMapper) { - this.typeMapper = typeMapper == null - ? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext) - : typeMapper; + this.typeMapper = typeMapper; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoConverter#getTypeMapper() - */ @Override public MongoTypeMapper getTypeMapper() { - return this.typeMapper; + return this.typeMapper == null ? this.defaultTypeMapper : this.typeMapper; + } + + @Override + public ProjectionFactory getProjectionFactory() { + return projectionFactory; + } + + @Override + public CustomConversions getCustomConversions() { + return conversions; } /** - * Configure the characters dots potentially contained in a {@link Map} shall be replaced with. By default we don't do - * any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire + * Configure the characters dots potentially contained in a {@link Map} shall be replaced with. By default, we don't + * do any translation but rather reject a {@link Map} with keys containing dots causing the conversion for the entire * object to fail. If further customization of the translation is needed, have a look at * {@link #potentiallyEscapeMapKey(String)} as well as {@link #potentiallyUnescapeMapKey(String)}. + *

          + * {@code mapKeyDotReplacement} is used as-is during replacement operations without further processing (i.e. regex or + * normalization). * * @param mapKeyDotReplacement the mapKeyDotReplacement to set. Can be {@literal null}. */ @@ -172,164 +274,403 @@ public void setMapKeyDotReplacement(@Nullable String mapKeyDotReplacement) { this.mapKeyDotReplacement = mapKeyDotReplacement; } - /* - * (non-Javadoc) - * @see org.springframework.data.convert.EntityConverter#getMappingContext() + /** + * If {@link #preserveMapKeys(boolean) preserve} is set to {@literal true} the conversion will treat map keys + * containing dot ({@literal .}) characters as is. + * + * @since 4.2 + * @see #setMapKeyDotReplacement(String) + */ + public void preserveMapKeys(boolean preserve) { + setMapKeyDotReplacement(preserve ? "." : null); + } + + /** + * Configure a {@link CodecRegistryProvider} that provides native MongoDB {@link org.bson.codecs.Codec codecs} for + * reading values. + * + * @param codecRegistryProvider can be {@literal null}. + * @since 2.2 */ + public void setCodecRegistryProvider(@Nullable CodecRegistryProvider codecRegistryProvider) { + this.codecRegistryProvider = codecRegistryProvider; + } + + @Override public MappingContext, MongoPersistentProperty> getMappingContext() { return mappingContext; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) - */ + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; + this.environment = applicationContext.getEnvironment(); this.spELContext = new SpELContext(this.spELContext, applicationContext); + this.projectionFactory.setBeanFactory(applicationContext); + this.projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); + + if (entityCallbacks == null) { + setEntityCallbacks(EntityCallbacks.create(applicationContext)); + } + + ClassLoader classLoader = applicationContext.getClassLoader(); + if (this.defaultTypeMapper instanceof BeanClassLoaderAware beanClassLoaderAware && classLoader != null) { + beanClassLoaderAware.setBeanClassLoader(classLoader); + } + } + + @Override + public Environment getEnvironment() { + + if (environment == null) { + environment = new StandardEnvironment(); + } + return environment; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoReader#read(java.lang.Class, com.mongodb.Document) + /** + * Set the {@link EntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link AfterConvertCallback}. + *
          + * Overrides potentially existing {@link EntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 3.0 */ - public S read(Class clazz, final Bson bson) { - return read(ClassTypeInformation.from(clazz), bson); + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; } - protected S read(TypeInformation type, Bson bson) { - return read(type, bson, ObjectPath.ROOT); + @Override + public R project(EntityProjection projection, Bson bson) { + + if (!projection.isProjection()) { // backed by real object + + TypeInformation typeToRead = projection.getMappedType().getType().isInterface() ? projection.getDomainType() + : projection.getMappedType(); + return (R) read(typeToRead, bson); + } + + ProjectingConversionContext context = new ProjectingConversionContext(this, conversions, ObjectPath.ROOT, + this::readCollectionOrArray, this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead, + projection); + + return doReadProjection(context, bson, projection); } - @Nullable @SuppressWarnings("unchecked") - private S read(TypeInformation type, @Nullable Bson bson, ObjectPath path) { + private R doReadProjection(ConversionContext context, Bson bson, EntityProjection projection) { - if (null == bson) { - return null; + MongoPersistentEntity entity = getMappingContext().getRequiredPersistentEntity(projection.getActualDomainType()); + TypeInformation mappedType = projection.getActualMappedType(); + MongoPersistentEntity mappedEntity = (MongoPersistentEntity) getMappingContext() + .getPersistentEntity(mappedType); + ValueExpressionEvaluator evaluator = expressionEvaluatorFactory.create(bson); + + boolean isInterfaceProjection = mappedType.getType().isInterface(); + if (isInterfaceProjection) { + + PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(mappedEntity); + DocumentAccessor documentAccessor = new DocumentAccessor(bson); + PersistentPropertyAccessor accessor = new MapPersistentPropertyAccessor(); + + PersistentPropertyAccessor convertingAccessor = PropertyTranslatingPropertyAccessor + .create(new ConvertingPropertyAccessor<>(accessor, conversionService), propertyTranslator); + MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(context, documentAccessor, + evaluator, spELContext); + + readProperties(context, entity, convertingAccessor, documentAccessor, valueProvider, evaluator, + (mongoPersistentProperties, mongoPersistentProperty) -> true); + return (R) projectionFactory.createProjection(mappedType.getType(), accessor.getBean()); } - TypeInformation typeToUse = typeMapper.readType(bson, type); - Class rawType = typeToUse.getType(); + // DTO projection + if (mappedEntity == null) { + throw new MappingException(String.format("No mapping metadata found for %s", mappedType.getType().getName())); + } - if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { - return conversionService.convert(bson, rawType); + // create target instance, merge metadata from underlying DTO type + PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(entity, + Predicates.negate(MongoPersistentProperty::hasExplicitFieldName)); + DocumentAccessor documentAccessor = new DocumentAccessor(bson) { + @Override + FieldName getFieldName(MongoPersistentProperty prop) { + return propertyTranslator.translate(prop).getMongoField().getName(); + } + }; + + InstanceCreatorMetadata instanceCreatorMetadata = mappedEntity + .getInstanceCreatorMetadata(); + ParameterValueProvider provider = instanceCreatorMetadata != null + && instanceCreatorMetadata.hasParameters() + ? getParameterProvider(context, mappedEntity, documentAccessor, evaluator) + : NoOpParameterValueProvider.INSTANCE; + + EntityInstantiator instantiator = instantiators.getInstantiatorFor(mappedEntity); + R instance = instantiator.createInstance(mappedEntity, provider); + PersistentPropertyAccessor accessor = mappedEntity.getPropertyAccessor(instance); + + populateProperties(context, mappedEntity, documentAccessor, evaluator, instance); + + return accessor.getBean(); + } + + private Object doReadOrProject(ConversionContext context, Bson source, TypeInformation typeHint, + EntityProjection typeDescriptor) { + + if (typeDescriptor.isProjection()) { + return doReadProjection(context, BsonUtils.asDocument(source), typeDescriptor); } - if (DBObject.class.isAssignableFrom(rawType)) { - return (S) bson; + return readDocument(context, source, typeHint); + } + + static class MapPersistentPropertyAccessor implements PersistentPropertyAccessor> { + + Map map = new LinkedHashMap<>(); + + @Override + public void setProperty(PersistentProperty persistentProperty, Object o) { + map.put(persistentProperty.getName(), o); } - if (Document.class.isAssignableFrom(rawType)) { - return (S) bson; + @Override + public Object getProperty(PersistentProperty persistentProperty) { + return map.get(persistentProperty.getName()); } - if (typeToUse.isCollectionLike() && bson instanceof List) { - return (S) readCollectionOrArray(typeToUse, (List) bson, path); + @Override + public Map getBean() { + return map; } + } + + @Override + public S read(Class clazz, Bson bson) { + return read(TypeInformation.of(clazz), bson); + } + + protected S read(TypeInformation type, Bson bson) { + return readDocument(getConversionContext(ObjectPath.ROOT), bson, type); + } + + /** + * Conversion method to materialize an object from a {@link Bson document}. Can be overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param typeHint the {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted object, will never be {@literal null}. + * @since 3.2 + */ + @SuppressWarnings("unchecked") + protected S readDocument(ConversionContext context, Bson bson, + TypeInformation typeHint) { - if (typeToUse.isMap()) { - return (S) readMap(typeToUse, bson, path); + Document document = bson instanceof BasicDBObject dbObject ? new Document(dbObject) : (Document) bson; + TypeInformation typeToRead = getTypeMapper().readType(document, typeHint); + Class rawType = typeToRead.getType(); + + if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { + return doConvert(bson, rawType, typeHint.getType()); } - if (bson instanceof Collection) { - throw new MappingException(String.format(INCOMPATIBLE_TYPES, bson, BasicDBList.class, typeToUse.getType(), path)); + if (Document.class.isAssignableFrom(rawType)) { + return (S) bson; } - if (typeToUse.equals(ClassTypeInformation.OBJECT)) { + if (DBObject.class.isAssignableFrom(rawType)) { + + if (bson instanceof DBObject) { + return (S) bson; + } + + if (bson instanceof Document doc) { + return (S) new BasicDBObject(doc); + } + return (S) bson; } - // Retrieve persistent entity info - Document target = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; + if (typeToRead.isMap()) { + return context.convert(bson, typeToRead); + } + + if (BSON.isAssignableFrom(typeHint)) { + return (S) bson; + } - MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToUse); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToRead); if (entity == null) { - throw new MappingException(String.format(INVALID_TYPE_TO_READ, target, typeToUse.getType())); + + if (codecRegistryProvider != null) { + + Optional> codec = codecRegistryProvider.getCodecFor(rawType); + if (codec.isPresent()) { + return codec.get().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()); + } + } + + throw new MappingException(String.format(INVALID_TYPE_TO_READ, document, rawType)); } - return read((MongoPersistentEntity) mappingContext.getRequiredPersistentEntity(typeToUse), target, path); + return read(context, (MongoPersistentEntity) entity, document); } - private ParameterValueProvider getParameterProvider(MongoPersistentEntity entity, - Bson source, DefaultSpELExpressionEvaluator evaluator, ObjectPath path) { + private ParameterValueProvider getParameterProvider(ConversionContext context, + MongoPersistentEntity entity, DocumentAccessor source, ValueExpressionEvaluator evaluator) { - MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(source, evaluator, path); + AssociationAwareMongoDbPropertyValueProvider provider = new AssociationAwareMongoDbPropertyValueProvider(context, + source, evaluator); PersistentEntityParameterValueProvider parameterProvider = new PersistentEntityParameterValueProvider<>( - entity, provider, path.getCurrentObject()); + entity, provider, context.getPath().getCurrentObject()); - return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, - path); + return new ConverterAwareValueExpressionParameterValueProvider(context, evaluator, conversionService, + parameterProvider); } - @Nullable - private S read(final MongoPersistentEntity entity, final Document bson, final ObjectPath path) { + class EvaluatingDocumentAccessor extends DocumentAccessor implements ValueExpressionEvaluator { + + /** + * Creates a new {@link DocumentAccessor} for the given {@link Document}. + * + * @param document must be a {@link Document} effectively, must not be {@literal null}. + */ + public EvaluatingDocumentAccessor(Bson document) { + super(document); + } + + @Override + public T evaluate(String expression) { + return expressionEvaluatorFactory.create(getDocument()).evaluate(expression); + } + } + + private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { + + S existing = context.findContextualEntity(entity, bson); + if (existing != null) { + return existing; + } - DefaultSpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext); + EvaluatingDocumentAccessor documentAccessor = new EvaluatingDocumentAccessor(bson); + InstanceCreatorMetadata instanceCreatorMetadata = entity.getInstanceCreatorMetadata(); + + ParameterValueProvider provider = instanceCreatorMetadata != null + && instanceCreatorMetadata.hasParameters() + ? getParameterProvider(context, entity, documentAccessor, documentAccessor) + : NoOpParameterValueProvider.INSTANCE; - ParameterValueProvider provider = getParameterProvider(entity, bson, evaluator, path); EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity); S instance = instantiator.createInstance(entity, provider); - PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor(entity.getPropertyAccessor(instance), + return populateProperties(context, entity, documentAccessor, documentAccessor, instance); + } + + private S populateProperties(ConversionContext context, MongoPersistentEntity entity, + DocumentAccessor documentAccessor, ValueExpressionEvaluator evaluator, S instance) { + + if (!entity.requiresPropertyPopulation()) { + return instance; + } + + PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor<>(entity.getPropertyAccessor(instance), conversionService); - MongoPersistentProperty idProperty = entity.getIdProperty(); - DocumentAccessor documentAccessor = new DocumentAccessor(bson); + // Make sure id property is set before all other properties + + Object rawId = readAndPopulateIdentifier(context, accessor, documentAccessor, entity, evaluator); + ObjectPath currentPath = context.getPath().push(accessor.getBean(), entity, rawId); + ConversionContext contextToUse = context.withPath(currentPath); + + MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(contextToUse, documentAccessor, + evaluator, spELContext); + + readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator, PROPERTY_FILTER); + + return accessor.getBean(); + } - // make sure id property is set before all other properties - Object idValue = null; + /** + * Reads the identifier from either the bean backing the {@link PersistentPropertyAccessor} or the source document in + * case the identifier has not be populated yet. In this case the identifier is set on the bean for further reference. + */ + @Nullable + private Object readAndPopulateIdentifier(ConversionContext context, PersistentPropertyAccessor accessor, + DocumentAccessor document, MongoPersistentEntity entity, ValueExpressionEvaluator evaluator) { - if (idProperty != null && documentAccessor.hasValue(idProperty)) { + Object rawId = document.getRawId(entity); - idValue = readIdValue(path, evaluator, idProperty, documentAccessor); - accessor.setProperty(idProperty, idValue); + if (!entity.hasIdProperty() || rawId == null) { + return rawId; } - ObjectPath currentPath = path.push(instance, entity, idValue != null ? bson.get(idProperty.getFieldName()) : null); + MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); - MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(documentAccessor, evaluator, - currentPath); + if (idProperty.isImmutable() && entity.isCreatorArgument(idProperty)) { + return rawId; + } - DbRefResolverCallback callback = new DefaultDbRefResolverCallback(bson, currentPath, evaluator, - MappingMongoConverter.this); - readProperties(entity, accessor, idProperty, documentAccessor, valueProvider, callback); + accessor.setProperty(idProperty, readIdValue(context, evaluator, idProperty, rawId)); - return instance; + return rawId; } - private Object readIdValue(ObjectPath path, DefaultSpELExpressionEvaluator evaluator, - MongoPersistentProperty idProperty, DocumentAccessor documentAccessor) { + @Nullable + private Object readIdValue(ConversionContext context, ValueExpressionEvaluator evaluator, + MongoPersistentProperty idProperty, Object rawId) { String expression = idProperty.getSpelExpression(); - Object resolvedValue = expression != null ? evaluator.evaluate(expression) : documentAccessor.get(idProperty); + Object resolvedValue = expression != null ? evaluator.evaluate(expression) : rawId; - return resolvedValue != null ? readValue(resolvedValue, idProperty.getTypeInformation(), path) : null; + return resolvedValue != null + ? readValue(context.forProperty(idProperty), resolvedValue, idProperty.getTypeInformation()) + : null; } - private void readProperties(MongoPersistentEntity entity, PersistentPropertyAccessor accessor, - @Nullable MongoPersistentProperty idProperty, DocumentAccessor documentAccessor, - MongoDbPropertyValueProvider valueProvider, DbRefResolverCallback callback) { + private void readProperties(ConversionContext context, MongoPersistentEntity entity, + PersistentPropertyAccessor accessor, DocumentAccessor documentAccessor, + MongoDbPropertyValueProvider valueProvider, ValueExpressionEvaluator evaluator, + BiPredicate, MongoPersistentProperty> propertyFilter) { + + DbRefResolverCallback callback = null; for (MongoPersistentProperty prop : entity) { - if (prop.isAssociation() && !entity.isConstructorArgument(prop)) { - readAssociation(prop.getAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); + if (!propertyFilter.test(entity, prop)) { continue; } - // we skip the id property since it was already set - if (idProperty != null && idProperty.equals(prop)) { + + ConversionContext propertyContext = context.forProperty(prop); + + if (prop.isAssociation()) { + + if (callback == null) { + callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator); + } + + Object value = readAssociation(prop.getRequiredAssociation(), documentAccessor, dbRefProxyHandler, callback, + propertyContext); + + if (value != null) { + accessor.setProperty(prop, value); + } continue; } - if (entity.isConstructorArgument(prop) || !documentAccessor.hasValue(prop)) { + if (prop.isUnwrapped()) { + + accessor.setProperty(prop, + readUnwrapped(propertyContext, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop))); continue; } - if (prop.isAssociation()) { - readAssociation(prop.getAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); + if (!documentAccessor.hasValue(prop)) { continue; } @@ -337,77 +678,194 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA } } - private void readAssociation(Association association, PersistentPropertyAccessor accessor, - DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback) { + private DbRefResolverCallback getDbRefResolverCallback(ConversionContext context, DocumentAccessor documentAccessor, + ValueExpressionEvaluator evaluator) { + + return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), context.getPath(), evaluator, + (prop, bson, e, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, e)); + } + + @Nullable + private Object readAssociation(Association association, DocumentAccessor documentAccessor, + DbRefProxyHandler handler, DbRefResolverCallback callback, ConversionContext context) { MongoPersistentProperty property = association.getInverse(); Object value = documentAccessor.get(property); + if (property.isDocumentReference() + || !property.isDbReference() && property.findAnnotation(Reference.class) != null) { + + // quite unusual but sounds like worth having? + + if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { + + if (value == null) { + return null; + } + + DocumentPointer pointer = () -> value; + + // collection like special treatment + return conversionService.convert(pointer, property.getActualType()); + } else { + + return dbRefResolver.resolveReference(property, + new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), + referenceLookupDelegate, context.forProperty(property)::convert); + } + } + if (value == null) { - return; + return null; + } + + if (value instanceof DBRef dbref) { + return dbRefResolver.resolveDbRef(property, dbref, callback, handler); } - DBRef dbref = value instanceof DBRef ? (DBRef) value : null; - accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); + /* + * The value might be a pre resolved full document (eg. resulting from an aggregation $lookup). + * In this case we try to map that object to the target type without an additional step ($dbref resolution server roundtrip) + * in between. + */ + if (value instanceof Document document) { + if (property.isMap()) { + if (document.isEmpty() || peek(document.values()) instanceof DBRef) { + return dbRefResolver.resolveDbRef(property, null, callback, handler); + } else { + return readMap(context, document, property.getTypeInformation()); + } + } else { + return read(property.getActualType(), document); + } + } else if (value instanceof Collection collection && !collection.isEmpty() + && peek(collection) instanceof Document) { + return readCollectionOrArray(context, collection, property.getTypeInformation()); + } else { + return dbRefResolver.resolveDbRef(property, null, callback, handler); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoWriter#toDBRef(java.lang.Object, org.springframework.data.mongodb.core.mapping.MongoPersistentProperty) - */ + @Nullable + private Object readUnwrapped(ConversionContext context, DocumentAccessor documentAccessor, + MongoPersistentProperty prop, MongoPersistentEntity unwrappedEntity) { + + if (prop.findAnnotation(Unwrapped.class).onEmpty().equals(OnEmpty.USE_EMPTY)) { + return read(context, unwrappedEntity, (Document) documentAccessor.getDocument()); + } + + for (MongoPersistentProperty persistentProperty : unwrappedEntity) { + if (documentAccessor.hasValue(persistentProperty)) { + return read(context, unwrappedEntity, (Document) documentAccessor.getDocument()); + } + } + return null; + } + + @Override public DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referringProperty) { - org.springframework.data.mongodb.core.mapping.DBRef annotation = null; + org.springframework.data.mongodb.core.mapping.DBRef annotation; if (referringProperty != null) { annotation = referringProperty.getDBRef(); - Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef!"); + Assert.isTrue(annotation != null, "The referenced property has to be mapped with @DBRef"); } // DATAMONGO-913 - if (object instanceof LazyLoadingProxy) { - return ((LazyLoadingProxy) object).toDBRef(); + if (object instanceof LazyLoadingProxy proxy) { + return proxy.toDBRef(); } return createDBRef(object, referringProperty); } + @Override + public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + + if (source instanceof LazyLoadingProxy proxy) { + return proxy::getSource; + } + + Assert.notNull(referringProperty, "Cannot create DocumentReference; The referringProperty must not be null"); + + if (referringProperty.isDbReference()) { + return () -> toDBRef(source, referringProperty); + } + + if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) { + return createDocumentPointer(source, referringProperty); + } + + throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference"); + } + + DocumentPointer createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + + if (referringProperty == null) { + return () -> source; + } + + if (source instanceof DocumentPointer pointer) { + return pointer; + } + + if (ClassUtils.isAssignableValue(referringProperty.getType(), source) + && conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) { + return conversionService.convert(source, DocumentPointer.class); + } + + if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) { + return documentPointerFactory.computePointer(mappingContext, referringProperty, source, + referringProperty.getActualType()); + } + + return () -> source; + } + /** * Root entry method into write conversion. Adds a type discriminator to the {@link Document}. Shouldn't be called for * nested conversions. * - * @see org.springframework.data.mongodb.core.convert.MongoWriter#write(java.lang.Object, com.mongodb.Document) + * @see org.springframework.data.mongodb.core.convert.MongoWriter#write(java.lang.Object, java.lang.Object) */ - public void write(final Object obj, final Bson bson) { + @Override + public void write(Object obj, Bson bson) { if (null == obj) { return; } Class entityType = ClassUtils.getUserClass(obj.getClass()); - TypeInformation type = ClassTypeInformation.from(entityType); + TypeInformation type = TypeInformation.of(entityType); - Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).getTarget() : obj; + Object target = obj instanceof LazyLoadingProxy proxy ? proxy.getTarget() : obj; writeInternal(target, bson, type); - if (asMap(bson).containsKey("_id") && asMap(bson).get("_id") == null) { - removeFromMap(bson, "_id"); - } + BsonUtils.removeNullId(bson); - boolean handledByCustomConverter = conversions.hasCustomWriteTarget(entityType, Document.class); - if (!handledByCustomConverter && !(bson instanceof Collection)) { - typeMapper.writeType(type, bson); + if (requiresTypeHint(entityType)) { + getTypeMapper().writeType(type, bson); } } /** - * Internal write conversion method which should be used for nested invocations. + * Check if a given type requires a type hint (aka {@literal _class} attribute) when writing to the document. * - * @param obj - * @param bson + * @param type must not be {@literal null}. + * @return {@literal true} if not a simple type, {@link Collection} or type with custom write target. + */ + private boolean requiresTypeHint(Class type) { + + return !conversions.isSimpleType(type) && !ClassUtils.isAssignable(Collection.class, type) + && !conversions.hasCustomWriteTarget(type, Document.class); + } + + /** + * Internal write conversion method which should be used for nested invocations. */ @SuppressWarnings("unchecked") - protected void writeInternal(@Nullable Object obj, final Bson bson, final TypeInformation typeHint) { + protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInformation typeHint) { if (null == obj) { return; @@ -417,18 +875,18 @@ protected void writeInternal(@Nullable Object obj, final Bson bson, final TypeIn Optional> customTarget = conversions.getCustomWriteTarget(entityType, Document.class); if (customTarget.isPresent()) { - Document result = conversionService.convert(obj, Document.class); - addAllToMap(bson, result); + Document result = doConvert(obj, Document.class); + BsonUtils.addAllToMap(bson, result); return; } if (Map.class.isAssignableFrom(entityType)) { - writeMapInternal((Map) obj, bson, ClassTypeInformation.MAP); + writeMapInternal((Map) obj, bson, TypeInformation.MAP); return; } if (Collection.class.isAssignableFrom(entityType)) { - writeCollectionInternal((Collection) obj, ClassTypeInformation.LIST, (BasicDBList) bson); + writeCollectionInternal((Collection) obj, TypeInformation.LIST, (Collection) bson); return; } @@ -437,7 +895,7 @@ protected void writeInternal(@Nullable Object obj, final Bson bson, final TypeIn addCustomTypeKeyIfNecessary(typeHint, obj, bson); } - protected void writeInternal(@Nullable Object obj, final Bson bson, MongoPersistentEntity entity) { + protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable MongoPersistentEntity entity) { if (obj == null) { return; @@ -447,23 +905,24 @@ protected void writeInternal(@Nullable Object obj, final Bson bson, MongoPersist throw new MappingException("No mapping metadata found for entity of type " + obj.getClass().getName()); } - PersistentPropertyAccessor accessor = entity.getPropertyAccessor(obj); + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(obj); DocumentAccessor dbObjectAccessor = new DocumentAccessor(bson); - MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty != null && !dbObjectAccessor.hasValue(idProperty)) { - Object value = idMapper.convertId(accessor.getProperty(idProperty)); + Object value = idMapper.convertId(accessor.getProperty(idProperty), idProperty.getFieldType()); if (value != null) { dbObjectAccessor.put(idProperty, value); } } + writeProperties(bson, entity, accessor, dbObjectAccessor, idProperty); } - private void writeProperties(Bson bson, MongoPersistentEntity entity, PersistentPropertyAccessor accessor, - DocumentAccessor dbObjectAccessor, MongoPersistentProperty idProperty) { + private void writeProperties(Bson bson, MongoPersistentEntity entity, PersistentPropertyAccessor accessor, + DocumentAccessor dbObjectAccessor, @Nullable MongoPersistentProperty idProperty) { // Write the properties for (MongoPersistentProperty prop : entity) { @@ -472,49 +931,77 @@ private void writeProperties(Bson bson, MongoPersistentEntity entity, Persist continue; } if (prop.isAssociation()) { - writeAssociation(prop.getAssociation(), accessor, dbObjectAccessor); + + writeAssociation(prop.getRequiredAssociation(), accessor, dbObjectAccessor); continue; } Object value = accessor.getProperty(prop); if (value == null) { - continue; - } - if (!conversions.isSimpleType(value.getClass())) { - writePropertyInternal(value, dbObjectAccessor, prop); + if (conversions.hasValueConverter(prop)) { + dbObjectAccessor.put(prop, applyPropertyConversion(null, prop, accessor)); + } else { + dbObjectAccessor.put(prop, null); + } + } else if (!conversions.isSimpleType(value.getClass())) { + writePropertyInternal(value, dbObjectAccessor, prop, accessor); } else { - writeSimpleInternal(value, bson, prop); + writeSimpleInternal(value, bson, prop, accessor); } } } - private void writeAssociation(Association association, PersistentPropertyAccessor accessor, - DocumentAccessor dbObjectAccessor) { + private void writeAssociation(Association association, + PersistentPropertyAccessor accessor, DocumentAccessor dbObjectAccessor) { MongoPersistentProperty inverseProp = association.getInverse(); - writePropertyInternal(accessor.getProperty(inverseProp), dbObjectAccessor, inverseProp); + Object value = accessor.getProperty(inverseProp); + + if (value == null && !inverseProp.isUnwrapped() && inverseProp.writeNullValues()) { + dbObjectAccessor.put(inverseProp, null); + return; + } + + writePropertyInternal(value, dbObjectAccessor, inverseProp, accessor); } @SuppressWarnings({ "unchecked" }) - protected void writePropertyInternal(Object obj, DocumentAccessor accessor, MongoPersistentProperty prop) { + void writePropertyInternal(@Nullable Object obj, DocumentAccessor accessor, MongoPersistentProperty prop, + PersistentPropertyAccessor persistentPropertyAccessor) { if (obj == null) { return; } - TypeInformation valueType = ClassTypeInformation.from(obj.getClass()); + TypeInformation valueType = TypeInformation.of(obj.getClass()); TypeInformation type = prop.getTypeInformation(); + if (conversions.hasValueConverter(prop)) { + accessor.put(prop, applyPropertyConversion(obj, prop, persistentPropertyAccessor)); + return; + } + + if (prop.isUnwrapped()) { + + Document target = new Document(); + writeInternal(obj, target, mappingContext.getPersistentEntity(prop)); + + accessor.putAll(target); + return; + } + if (valueType.isCollectionLike()) { - List collectionInternal = createCollection(asCollection(obj), prop); + + List collectionInternal = createCollection(BsonUtils.asCollection(obj), prop); accessor.put(prop, collectionInternal); return; } if (valueType.isMap()) { + Bson mapDbObj = createMap((Map) obj, prop); accessor.put(prop, mapDbObj); return; @@ -528,23 +1015,33 @@ protected void writePropertyInternal(Object obj, DocumentAccessor accessor, Mong * If we already have a LazyLoadingProxy, we use it's cached DBRef value instead of * unnecessarily initializing it only to convert it to a DBRef a few instructions later. */ - if (obj instanceof LazyLoadingProxy) { - dbRefObj = ((LazyLoadingProxy) obj).toDBRef(); + if (obj instanceof LazyLoadingProxy proxy) { + dbRefObj = proxy.toDBRef(); } - dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop); - - if (null != dbRefObj) { - accessor.put(prop, dbRefObj); + if (obj != null && conversions.hasCustomWriteTarget(obj.getClass())) { + accessor.put(prop, doConvert(obj, conversions.getCustomWriteTarget(obj.getClass()).get())); return; } + + dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop); + + accessor.put(prop, dbRefObj); + return; + } + + if (prop.isAssociation() && prop.isAnnotationPresent(Reference.class)) { + + accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext) + .computePointer(mappingContext, prop, obj, valueType.getType()).getPointer()); + return; } /* * If we have a LazyLoadingProxy we make sure it is initialized first. */ - if (obj instanceof LazyLoadingProxy) { - obj = ((LazyLoadingProxy) obj).getTarget(); + if (obj instanceof LazyLoadingProxy proxy) { + obj = proxy.getTarget(); } // Lookup potential custom target type @@ -552,54 +1049,49 @@ protected void writePropertyInternal(Object obj, DocumentAccessor accessor, Mong if (basicTargetType.isPresent()) { - accessor.put(prop, conversionService.convert(obj, basicTargetType.get())); + accessor.put(prop, doConvert(obj, basicTargetType.get())); return; } - MongoPersistentEntity entity = isSubtype(prop.getType(), obj.getClass()) + MongoPersistentEntity entity = valueType.isSubTypeOf(prop.getType()) ? mappingContext.getRequiredPersistentEntity(obj.getClass()) : mappingContext.getRequiredPersistentEntity(type); Object existingValue = accessor.get(prop); - Document document = existingValue instanceof Document ? (Document) existingValue : new Document(); + Document document = existingValue instanceof Document existingDocument ? existingDocument : new Document(); writeInternal(obj, document, entity); - addCustomTypeKeyIfNecessary(ClassTypeInformation.from(prop.getRawType()), obj, document); + addCustomTypeKeyIfNecessary(TypeInformation.of(prop.getRawType()), obj, document); accessor.put(prop, document); } - private boolean isSubtype(Class left, Class right) { - return left.isAssignableFrom(right) && !left.equals(right); - } - - /** - * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a - * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element - * collection for everything else. - * - * @param source - * @return - */ - private static Collection asCollection(Object source) { - - if (source instanceof Collection) { - return (Collection) source; - } - - return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); - } - /** * Writes the given {@link Collection} using the given {@link MongoPersistentProperty} information. * * @param collection must not be {@literal null}. * @param property must not be {@literal null}. - * @return */ protected List createCollection(Collection collection, MongoPersistentProperty property) { if (!property.isDbReference()) { - return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList()); + + if (property.isAssociation()) { + + List targetCollection = collection.stream().map(it -> { + return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()) + .getPointer(); + }).collect(Collectors.toList()); + + return writeCollectionInternal(targetCollection, TypeInformation.of(DocumentPointer.class), + new ArrayList<>(targetCollection.size())); + } + + if (property.hasExplicitWriteTarget()) { + return writeCollectionInternal(collection, new FieldTypeInformation<>(property), + new ArrayList<>(collection.size())); + } + + return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>(collection.size())); } List dbList = new ArrayList<>(collection.size()); @@ -622,14 +1114,13 @@ protected List createCollection(Collection collection, MongoPersisten * * @param map must not {@literal null}. * @param property must not be {@literal null}. - * @return */ protected Bson createMap(Map map, MongoPersistentProperty property) { - Assert.notNull(map, "Given map must not be null!"); - Assert.notNull(property, "PersistentProperty must not be null!"); + Assert.notNull(map, "Given map must not be null"); + Assert.notNull(property, "PersistentProperty must not be null"); - if (!property.isDbReference()) { + if (!property.isAssociation()) { return writeMapInternal(map, new Document(), property.getTypeInformation()); } @@ -643,10 +1134,15 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (conversions.isSimpleType(key.getClass())) { String simpleKey = prepareMapKey(key.toString()); - document.put(simpleKey, value != null ? createDBRef(value, property) : null); + if (property.isDbReference()) { + document.put(simpleKey, value != null ? createDBRef(value, property) : null); + } else { + document.put(simpleKey, documentPointerFactory + .computePointer(mappingContext, property, value, property.getActualType()).getPointer()); + } } else { - throw new MappingException("Cannot use a complex object as a key value."); + throw new MappingException("Cannot use a complex object as a key value"); } } @@ -654,17 +1150,20 @@ protected Bson createMap(Map map, MongoPersistentProperty proper } /** - * Populates the given {@link BasicDBList} with values from the given {@link Collection}. + * Populates the given {@link Collection sink} with converted values from the given {@link Collection source}. * - * @param source the collection to create a {@link BasicDBList} for, must not be {@literal null}. + * @param source the collection to create a {@link Collection} for, must not be {@literal null}. * @param type the {@link TypeInformation} to consider or {@literal null} if unknown. - * @param sink the {@link BasicDBList} to write to. - * @return + * @param sink the {@link Collection} to write to. */ - private BasicDBList writeCollectionInternal(Collection source, TypeInformation type, BasicDBList sink) { + @SuppressWarnings("unchecked") + private List writeCollectionInternal(Collection source, @Nullable TypeInformation type, + Collection sink) { TypeInformation componentType = null; + List collection = sink instanceof List ? (List) sink : new ArrayList<>(sink); + if (type != null) { componentType = type.getComponentType(); } @@ -674,17 +1173,20 @@ private BasicDBList writeCollectionInternal(Collection source, TypeInformatio Class elementType = element == null ? null : element.getClass(); if (elementType == null || conversions.isSimpleType(elementType)) { - sink.add(getPotentiallyConvertedSimpleWrite(element)); + collection.add(getPotentiallyConvertedSimpleWrite(element, + componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { - sink.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList())); + + Collection objects = BsonUtils.asCollection(element); + collection.add(writeCollectionInternal(objects, componentType, new ArrayList<>(objects.size()))); } else { Document document = new Document(); writeInternal(element, document, componentType); - sink.add(document); + collection.add(document); } } - return sink; + return collection; } /** @@ -693,7 +1195,6 @@ private BasicDBList writeCollectionInternal(Collection source, TypeInformatio * @param obj must not be {@literal null}. * @param bson must not be {@literal null}. * @param propertyType must not be {@literal null}. - * @return */ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformation propertyType) { @@ -708,17 +1209,17 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat if (val == null || conversions.isSimpleType(val.getClass())) { writeSimpleInternal(val, bson, simpleKey); } else if (val instanceof Collection || val.getClass().isArray()) { - addToMap(bson, simpleKey, - writeCollectionInternal(asCollection(val), propertyType.getMapValueType(), new BasicDBList())); + BsonUtils.addToMap(bson, simpleKey, + writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new ArrayList<>())); } else { Document document = new Document(); TypeInformation valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType() - : ClassTypeInformation.OBJECT; + : TypeInformation.OBJECT; writeInternal(val, document, valueTypeInfo); - addToMap(bson, simpleKey, document); + BsonUtils.addToMap(bson, simpleKey, document); } } else { - throw new MappingException("Cannot use a complex object as a key value."); + throw new MappingException("Cannot use a complex object as a key value"); } } @@ -730,11 +1231,10 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat * conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB. * * @param key must not be {@literal null}. - * @return */ private String prepareMapKey(Object key) { - Assert.notNull(key, "Map key must not be null!"); + Assert.notNull(key, "Map key must not be null"); String convertedKey = potentiallyConvertMapKey(key); return potentiallyEscapeMapKey(convertedKey); @@ -745,8 +1245,7 @@ private String prepareMapKey(Object key) { * conversion if none is configured. * * @see #setMapKeyDotReplacement(String) - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyEscapeMapKey(String source) { @@ -756,28 +1255,27 @@ protected String potentiallyEscapeMapKey(String source) { if (mapKeyDotReplacement == null) { throw new MappingException(String.format( - "Map key %s contains dots but no replacement was configured! Make " - + "sure map keys don't contain dots in the first place or configure an appropriate replacement!", + "Map key %s contains dots but no replacement was configured; Make" + + " sure map keys don't contain dots in the first place or configure an appropriate replacement", source)); } - return source.replaceAll("\\.", mapKeyDotReplacement); + return StringUtils.replace(source, ".", mapKeyDotReplacement); } /** * Returns a {@link String} representation of the given {@link Map} key * * @param key - * @return */ private String potentiallyConvertMapKey(Object key) { - if (key instanceof String) { - return (String) key; + if (key instanceof String stringValue) { + return stringValue; } return conversions.hasCustomWriteTarget(key.getClass(), String.class) - ? (String) getPotentiallyConvertedSimpleWrite(key) + ? (String) getPotentiallyConvertedSimpleWrite(key, Object.class) : key.toString(); } @@ -785,66 +1283,95 @@ private String potentiallyConvertMapKey(Object key) { * Translates the map key replacements in the given key just read with a dot in case a map key replacement has been * configured. * - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyUnescapeMapKey(String source) { - return mapKeyDotReplacement == null ? source : source.replaceAll(mapKeyDotReplacement, "\\."); + return mapKeyDotReplacement == null ? source : StringUtils.replace(source, mapKeyDotReplacement, "."); } /** * Adds custom type information to the given {@link Document} if necessary. That is if the value is not the same as * the one given. This is usually the case if you store a subtype of the actual declared type of the property. * - * @param type + * @param type can be {@literal null}. * @param value must not be {@literal null}. * @param bson must not be {@literal null}. */ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Object value, Bson bson) { - Class reference = type != null ? type.getActualType().getType() : Object.class; + Class reference = type != null ? type.getRequiredActualType().getType() : Object.class; Class valueType = ClassUtils.getUserClass(value.getClass()); boolean notTheSameClass = !valueType.equals(reference); if (notTheSameClass) { - typeMapper.writeType(valueType, bson); + getTypeMapper().writeType(valueType, bson); } } /** * Writes the given simple value to the given {@link Document}. Will store enum names for enum values. * - * @param value + * @param value can be {@literal null}. * @param bson must not be {@literal null}. * @param key must not be {@literal null}. */ - private void writeSimpleInternal(Object value, Bson bson, String key) { - addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value)); + private void writeSimpleInternal(@Nullable Object value, Bson bson, String key) { + BsonUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); } - private void writeSimpleInternal(Object value, Bson bson, MongoPersistentProperty property) { + private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property, + PersistentPropertyAccessor persistentPropertyAccessor) { + DocumentAccessor accessor = new DocumentAccessor(bson); - accessor.put(property, getPotentiallyConvertedSimpleWrite(value)); + + if (conversions.hasValueConverter(property)) { + accessor.put(property, applyPropertyConversion(value, property, persistentPropertyAccessor)); + return; + } + + accessor.put(property, getPotentiallyConvertedSimpleWrite(value, + property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class)); + } + + @Nullable + @SuppressWarnings("unchecked") + private Object applyPropertyConversion(@Nullable Object value, MongoPersistentProperty property, + PersistentPropertyAccessor persistentPropertyAccessor) { + MongoConversionContext context = new MongoConversionContext(new PropertyValueProvider<>() { + + @Nullable + @Override + public T getPropertyValue(MongoPersistentProperty property) { + return (T) persistentPropertyAccessor.getProperty(property); + } + }, property, this, spELContext); + PropertyValueConverter> valueConverter = conversions + .getPropertyValueConversions().getValueConverter(property); + return value != null ? valueConverter.write(value, context) : valueConverter.writeNull(context); } /** * Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type. * Returns the converted value if so. If not, we perform special enum handling or simply return the value as is. - * - * @param value - * @return */ @Nullable - private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) { + private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class typeHint) { if (value == null) { return null; } + if (typeHint != null && Object.class != typeHint) { + + if (conversionService.canConvert(value.getClass(), typeHint)) { + value = doConvert(value, typeHint); + } + } + Optional> customTarget = conversions.getCustomWriteTarget(value.getClass()); if (customTarget.isPresent()) { - return conversionService.convert(value, customTarget.get()); + return doConvert(value, customTarget.get()); } if (ObjectUtils.isArray(value)) { @@ -852,7 +1379,7 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) { if (value instanceof byte[]) { return value; } - return asCollection(value); + return BsonUtils.asCollection(value); } return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; @@ -860,37 +1387,46 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) { /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies - * {@link Enum} handling or returns the value as is. + * {@link Enum} handling or returns the value as is. Can be overridden by subclasses. * - * @param value - * @param target must not be {@literal null}. - * @return + * @since 3.2 + */ + protected Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation target) { + return getPotentiallyConvertedSimpleRead(value, target.getType()); + } + + /** + * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies + * {@link Enum} handling or returns the value as is. */ - @Nullable @SuppressWarnings({ "rawtypes", "unchecked" }) - private Object getPotentiallyConvertedSimpleRead(@Nullable Object value, Class target) { + private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class target) { - if (value == null || target == null || target.isAssignableFrom(value.getClass())) { + if (target == null) { return value; } if (conversions.hasCustomReadTarget(value.getClass(), target)) { - return conversionService.convert(value, target); + return doConvert(value, target); + } + + if (ClassUtils.isAssignableValue(target, value)) { + return value; } if (Enum.class.isAssignableFrom(target)) { return Enum.valueOf((Class) target, value.toString()); } - return conversionService.convert(value, target); + return doConvert(value, target); } - protected DBRef createDBRef(Object target, MongoPersistentProperty property) { + protected DBRef createDBRef(Object target, @Nullable MongoPersistentProperty property) { - Assert.notNull(target, "Target object must not be null!"); + Assert.notNull(target, "Target object must not be null"); - if (target instanceof DBRef) { - return (DBRef) target; + if (target instanceof DBRef dbRef) { + return dbRef; } MongoPersistentEntity targetEntity = mappingContext.getPersistentEntity(target.getClass()); @@ -910,221 +1446,122 @@ protected DBRef createDBRef(Object target, MongoPersistentProperty property) { : entity.getPropertyAccessor(target).getProperty(idProperty); if (null == id) { - throw new MappingException("Cannot create a reference to an object with a NULL id."); + throw new MappingException("Cannot create a reference to an object with a NULL id"); } - return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity, idMapper.convertId(id)); + return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity, + idMapper.convertId(id, idProperty != null ? idProperty.getFieldType() : ObjectId.class)); } throw new MappingException("No id property found on class " + entity.getType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.ValueResolver#getValueInternal(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, com.mongodb.Document, org.springframework.data.mapping.model.SpELExpressionEvaluator, java.lang.Object) - */ - @Override - public Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator, - ObjectPath path) { - return new MongoDbPropertyValueProvider(bson, evaluator, path).getPropertyValue(prop); + @Nullable + private Object getValueInternal(ConversionContext context, MongoPersistentProperty prop, Bson bson, + ValueExpressionEvaluator evaluator) { + return new MongoDbPropertyValueProvider(context, bson, evaluator).getPropertyValue(prop); } /** - * Reads the given {@link BasicDBList} into a collection of the given {@link TypeInformation}. + * Reads the given {@link Collection} into a collection of the given {@link TypeInformation}. Can be overridden by + * subclasses. * - * @param targetType must not be {@literal null}. - * @param sourceValue must not be {@literal null}. - * @param path must not be {@literal null}. + * @param context must not be {@literal null} + * @param source must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @since 3.2 * @return the converted {@link Collection} or array, will never be {@literal null}. */ - @SuppressWarnings({ "rawtypes", "unchecked" }) - private Object readCollectionOrArray(TypeInformation targetType, List sourceValue, ObjectPath path) { + @SuppressWarnings("unchecked") + protected Object readCollectionOrArray(ConversionContext context, Collection source, + TypeInformation targetType) { - Assert.notNull(targetType, "Target type must not be null!"); - Assert.notNull(path, "Object path must not be null!"); + Assert.notNull(targetType, "Target type must not be null"); - Class collectionType = targetType.getType(); + Class collectionType = targetType.isSubTypeOf(Collection.class) // + ? targetType.getType() // + : List.class; - TypeInformation componentType = targetType.getComponentType() != null ? targetType.getComponentType() - : ClassTypeInformation.OBJECT; + TypeInformation componentType = targetType.getComponentType() != null // + ? targetType.getComponentType() // + : TypeInformation.OBJECT; Class rawComponentType = componentType.getType(); - collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class; - Collection items = targetType.getType().isArray() ? new ArrayList<>(sourceValue.size()) - : CollectionFactory.createCollection(collectionType, rawComponentType, sourceValue.size()); + Collection items = targetType.getType().isArray() // + ? new ArrayList<>(source.size()) // + : CollectionFactory.createCollection(collectionType, rawComponentType, source.size()); - if (sourceValue.isEmpty()) { + if (source.isEmpty()) { return getPotentiallyConvertedSimpleRead(items, targetType.getType()); } - if (!DBRef.class.equals(rawComponentType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceValue)) { + if (!DBRef.class.equals(rawComponentType) && isCollectionOfDbRefWhereBulkFetchIsPossible(source)) { - List objects = bulkReadAndConvertDBRefs((List) sourceValue, componentType, path, rawComponentType); + List objects = bulkReadAndConvertDBRefs(context, (List) source, componentType); return getPotentiallyConvertedSimpleRead(objects, targetType.getType()); } - for (Object dbObjItem : sourceValue) { - - if (dbObjItem instanceof DBRef) { - items.add(DBRef.class.equals(rawComponentType) ? dbObjItem - : readAndConvertDBRef((DBRef) dbObjItem, componentType, path, rawComponentType)); - } else if (dbObjItem instanceof Document) { - items.add(read(componentType, (Document) dbObjItem, path)); - } else if (dbObjItem instanceof BasicDBObject) { - items.add(read(componentType, (BasicDBObject) dbObjItem, path)); - } else { - - if (dbObjItem instanceof Collection) { - if (!rawComponentType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawComponentType)) { - throw new MappingException( - String.format(INCOMPATIBLE_TYPES, dbObjItem, dbObjItem.getClass(), rawComponentType, path)); - } - } - - if (dbObjItem instanceof List) { - items.add(readCollectionOrArray(ClassTypeInformation.OBJECT, (List) dbObjItem, path)); - } else { - items.add(getPotentiallyConvertedSimpleRead(dbObjItem, rawComponentType)); - } - } + for (Object element : source) { + items.add(element != null ? context.convert(element, componentType) : element); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); } /** - * Reads the given {@link Document} into a {@link Map}. will recursively resolve nested {@link Map}s as well. + * Reads the given {@link Document} into a {@link Map}. will recursively resolve nested {@link Map}s as well. Can be + * overridden by subclasses. * - * @param type the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @param context must not be {@literal null} * @param bson must not be {@literal null} - * @param path must not be {@literal null} - * @return + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted {@link Map}, will never be {@literal null}. + * @since 3.2 */ - @SuppressWarnings("unchecked") - protected Map readMap(TypeInformation type, Bson bson, ObjectPath path) { + protected Map readMap(ConversionContext context, Bson bson, TypeInformation targetType) { - Assert.notNull(bson, "Document must not be null!"); - Assert.notNull(path, "Object path must not be null!"); + Assert.notNull(bson, "Document must not be null"); + Assert.notNull(targetType, "TypeInformation must not be null"); - Class mapType = typeMapper.readType(bson, type).getType(); + Class mapType = getTypeMapper().readType(bson, targetType).getType(); - TypeInformation keyType = type.getComponentType(); - TypeInformation valueType = type.getMapValueType(); + TypeInformation keyType = targetType.getComponentType(); + TypeInformation valueType = targetType.getMapValueType() == null ? TypeInformation.OBJECT + : targetType.getRequiredMapValueType(); - Class rawKeyType = keyType != null ? keyType.getType() : null; - Class rawValueType = valueType != null ? valueType.getType() : null; + Class rawKeyType = keyType != null ? keyType.getType() : Object.class; + Class rawValueType = valueType.getType(); - Map sourceMap = asMap(bson); + Map sourceMap = BsonUtils.asMap(bson); Map map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size()); if (!DBRef.class.equals(rawValueType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceMap.values())) { - bulkReadAndConvertDBRefMapIntoTarget(valueType, rawValueType, sourceMap, map); + bulkReadAndConvertDBRefMapIntoTarget(context, valueType, sourceMap, map); return map; } - for (Entry entry : sourceMap.entrySet()) { + sourceMap.forEach((k, v) -> { - if (typeMapper.isTypeKey(entry.getKey())) { - continue; + if (getTypeMapper().isTypeKey(k)) { + return; } - Object key = potentiallyUnescapeMapKey(entry.getKey()); + Object key = potentiallyUnescapeMapKey(k); - if (rawKeyType != null && !rawKeyType.isAssignableFrom(key.getClass())) { - key = conversionService.convert(key, rawKeyType); + if (!rawKeyType.isAssignableFrom(key.getClass())) { + key = doConvert(key, rawKeyType); } - Object value = entry.getValue(); - TypeInformation defaultedValueType = valueType != null ? valueType : ClassTypeInformation.OBJECT; - - if (value instanceof Document) { - map.put(key, read(defaultedValueType, (Document) value, path)); - } else if (value instanceof BasicDBObject) { - map.put(key, read(defaultedValueType, (BasicDBObject) value, path)); - } else if (value instanceof DBRef) { - map.put(key, DBRef.class.equals(rawValueType) ? value - : readAndConvertDBRef((DBRef) value, defaultedValueType, ObjectPath.ROOT, rawValueType)); - } else if (value instanceof List) { - map.put(key, - readCollectionOrArray(valueType != null ? valueType : ClassTypeInformation.LIST, (List) value, path)); - } else { - map.put(key, getPotentiallyConvertedSimpleRead(value, rawValueType)); - } - } + map.put(key, v == null ? v : context.convert(v, valueType)); + }); return map; } - @SuppressWarnings("unchecked") - private static Map asMap(Bson bson) { - - if (bson instanceof Document) { - return (Document) bson; - } - - if (bson instanceof DBObject) { - return ((DBObject) bson).toMap(); - } - - throw new IllegalArgumentException( - String.format("Cannot read %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - private static void addToMap(Bson bson, String key, @Nullable Object value) { - - if (bson instanceof Document) { - ((Document) bson).put(key, value); - return; - } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); - return; - } - throw new IllegalArgumentException(String.format( - "Cannot add key/value pair to %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - @SuppressWarnings("unchecked") - private static void addAllToMap(Bson bson, Map value) { - - if (bson instanceof Document) { - ((Document) bson).putAll(value); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).putAll(value); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot add all to %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - - private static void removeFromMap(Bson bson, String key) { - - if (bson instanceof Document) { - ((Document) bson).remove(key); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).removeField(key); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot remove from %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object, org.springframework.data.util.TypeInformation) - */ @Nullable @SuppressWarnings("unchecked") @Override - public Object convertToMongoType(@Nullable Object obj, TypeInformation typeInformation) { + public Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation typeInformation) { if (obj == null) { return null; @@ -1132,37 +1569,43 @@ public Object convertToMongoType(@Nullable Object obj, TypeInformation typeIn Optional> target = conversions.getCustomWriteTarget(obj.getClass()); if (target.isPresent()) { - return conversionService.convert(obj, target.get()); + return doConvert(obj, target.get()); } if (conversions.isSimpleType(obj.getClass())) { - // Doesn't need conversion - return getPotentiallyConvertedSimpleWrite(obj); - } - TypeInformation typeHint = typeInformation; + Class conversionTargetType; + + if (typeInformation != null && conversions.isSimpleType(typeInformation.getType())) { + conversionTargetType = typeInformation.getType(); + } else { + conversionTargetType = Object.class; + } + + return getPotentiallyConvertedSimpleWrite(obj, conversionTargetType); + } - if (obj instanceof List) { - return maybeConvertList((List) obj, typeHint); + if (obj instanceof List list) { + return maybeConvertList(list, typeInformation); } - if (obj instanceof Document) { + if (obj instanceof Document document) { Document newValueDocument = new Document(); - for (String vk : ((Document) obj).keySet()) { - Object o = ((Document) obj).get(vk); - newValueDocument.put(vk, convertToMongoType(o, typeHint)); + for (String vk : document.keySet()) { + Object o = document.get(vk); + newValueDocument.put(vk, convertToMongoType(o, typeInformation)); } return newValueDocument; } - if (obj instanceof DBObject) { + if (obj instanceof DBObject dbObject) { Document newValueDbo = new Document(); - for (String vk : ((DBObject) obj).keySet()) { + for (String vk : dbObject.keySet()) { - Object o = ((DBObject) obj).get(vk); - newValueDbo.put(vk, convertToMongoType(o, typeHint)); + Object o = dbObject.get(vk); + newValueDbo.put(vk, convertToMongoType(o, typeInformation)); } return newValueDbo; @@ -1173,18 +1616,18 @@ public Object convertToMongoType(@Nullable Object obj, TypeInformation typeIn Document result = new Document(); for (Map.Entry entry : ((Map) obj).entrySet()) { - result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint)); + result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeInformation)); } return result; } if (obj.getClass().isArray()) { - return maybeConvertList(Arrays.asList((Object[]) obj), typeHint); + return maybeConvertList(Arrays.asList((Object[]) obj), typeInformation); } - if (obj instanceof Collection) { - return maybeConvertList((Collection) obj, typeHint); + if (obj instanceof Collection collection) { + return maybeConvertList(collection, typeInformation); } Document newDocument = new Document(); @@ -1201,7 +1644,15 @@ public Object convertToMongoType(@Nullable Object obj, TypeInformation typeIn return !obj.getClass().equals(typeInformation.getType()) ? newDocument : removeTypeInfo(newDocument, true); } - public List maybeConvertList(Iterable source, TypeInformation typeInformation) { + @Override + public Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { + Document newDocument = new Document(); + writeInternal(obj, newDocument, entity); + return newDocument; + } + + // TODO: hide in 5.0 + public List maybeConvertList(Iterable source, @Nullable TypeInformation typeInformation) { List newDbl = new ArrayList<>(); @@ -1219,13 +1670,13 @@ public List maybeConvertList(Iterable source, TypeInformation type * @param recursively whether to apply the removal recursively * @return */ + @SuppressWarnings("unchecked") private Object removeTypeInfo(Object object, boolean recursively) { - if (!(object instanceof Document)) { + if (!(object instanceof Document document)) { return object; } - Document document = (Document) object; String keyToRemove = null; for (String key : document.keySet()) { @@ -1239,7 +1690,7 @@ private Object removeTypeInfo(Object object, boolean recursively) { removeTypeInfo(element, recursively); } } else if (value instanceof List) { - for (Object element : (List) value) { + for (Object element : (List) value) { removeTypeInfo(element, recursively); } } else { @@ -1247,7 +1698,7 @@ private Object removeTypeInfo(Object object, boolean recursively) { } } - if (typeMapper.isTypeKey(key)) { + if (getTypeMapper().isTypeKey(key)) { keyToRemove = key; @@ -1264,159 +1715,52 @@ private Object removeTypeInfo(Object object, boolean recursively) { return document; } - /** - * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field - * of the configured source {@link Document}. - * - * @author Oliver Gierke - */ - class MongoDbPropertyValueProvider implements PropertyValueProvider { - - private final DocumentAccessor source; - private final SpELExpressionEvaluator evaluator; - private final ObjectPath path; - - /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and - * {@link ObjectPath}. - * - * @param source must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @param path must not be {@literal null}. - */ - public MongoDbPropertyValueProvider(Bson source, SpELExpressionEvaluator evaluator, ObjectPath path) { - - Assert.notNull(source, "Source document must no be null!"); - Assert.notNull(evaluator, "SpELExpressionEvaluator must not be null!"); - Assert.notNull(path, "ObjectPath must not be null!"); - - this.source = new DocumentAccessor(source); - this.evaluator = evaluator; - this.path = path; - } - - /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and - * {@link ObjectPath}. - * - * @param accessor must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @param path must not be {@literal null}. - */ - public MongoDbPropertyValueProvider(DocumentAccessor accessor, SpELExpressionEvaluator evaluator, ObjectPath path) { - - Assert.notNull(accessor, "DocumentAccessor must no be null!"); - Assert.notNull(evaluator, "SpELExpressionEvaluator must not be null!"); - Assert.notNull(path, "ObjectPath must not be null!"); - - this.source = accessor; - this.evaluator = evaluator; - this.path = path; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) - */ - @Nullable - public T getPropertyValue(MongoPersistentProperty property) { - - String expression = property.getSpelExpression(); - Object value = expression != null ? evaluator.evaluate(expression) : source.get(property); - - if (value == null) { - return null; - } - - return readValue(value, property.getTypeInformation(), path); - } - } - - /** - * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw - * resolved SpEL value. - * - * @author Oliver Gierke - */ - private class ConverterAwareSpELExpressionParameterValueProvider - extends SpELExpressionParameterValueProvider { - - private final ObjectPath path; - - /** - * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. - * - * @param evaluator must not be {@literal null}. - * @param conversionService must not be {@literal null}. - * @param delegate must not be {@literal null}. - */ - public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator, - ConversionService conversionService, ParameterValueProvider delegate, - ObjectPath path) { - - super(evaluator, conversionService, delegate); - this.path = path; - } + @Nullable + @SuppressWarnings("unchecked") + T readValue(ConversionContext context, @Nullable Object value, TypeInformation type) { - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter) - */ - @Override - protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { - return readValue(object, parameter.getType(), path); + if (value == null) { + return null; } - } - @Nullable - @SuppressWarnings("unchecked") - T readValue(Object value, TypeInformation type, ObjectPath path) { + Assert.notNull(type, "TypeInformation must not be null"); Class rawType = type.getType(); if (conversions.hasCustomReadTarget(value.getClass(), rawType)) { - return (T) conversionService.convert(value, rawType); - } else if (value instanceof DBRef) { - return potentiallyReadOrResolveDbRef((DBRef) value, type, path, rawType); - } else if (value instanceof List) { - return (T) readCollectionOrArray(type, (List) value, path); - } else if (value instanceof Document) { - return (T) read(type, (Document) value, path); - } else if (value instanceof DBObject) { - return (T) read(type, (BasicDBObject) value, path); - } else { - return (T) getPotentiallyConvertedSimpleRead(value, rawType); + return (T) doConvert(value, rawType); + } else if (value instanceof DBRef dbRef) { + return (T) readDBRef(context, dbRef, type); } + + return (T) context.convert(value, type); } @Nullable - @SuppressWarnings("unchecked") - private T potentiallyReadOrResolveDbRef(@Nullable DBRef dbref, TypeInformation type, ObjectPath path, - Class rawType) { + private Object readDBRef(ConversionContext context, @Nullable DBRef dbref, TypeInformation type) { - if (rawType.equals(DBRef.class)) { - return (T) dbref; + if (type.getType().equals(DBRef.class)) { + return dbref; } - T object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), (Class) rawType); - return object != null ? object : readAndConvertDBRef(dbref, type, path, rawType); - } + ObjectPath path = context.getPath(); - @Nullable - private T readAndConvertDBRef(@Nullable DBRef dbref, TypeInformation type, ObjectPath path, - final Class rawType) { + Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), type.getType()); + if (object != null) { + return object; + } - List result = bulkReadAndConvertDBRefs(Collections.singletonList(dbref), type, path, rawType); - return CollectionUtils.isEmpty(result) ? null : result.iterator().next(); + List result = bulkReadAndConvertDBRefs(context, Collections.singletonList(dbref), type); + return CollectionUtils.isEmpty(result) ? null : peek(result); } @SuppressWarnings({ "unchecked", "rawtypes" }) - private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation valueType, Class rawValueType, + private void bulkReadAndConvertDBRefMapIntoTarget(ConversionContext context, TypeInformation valueType, Map sourceMap, Map targetMap) { LinkedHashMap referenceMap = new LinkedHashMap<>(sourceMap); - List convertedObjects = bulkReadAndConvertDBRefs((List) new ArrayList(referenceMap.values()), - valueType, ObjectPath.ROOT, rawValueType); + List convertedObjects = bulkReadAndConvertDBRefs(context.withPath(ObjectPath.ROOT), + (List) new ArrayList(referenceMap.values()), valueType); int index = 0; for (String key : referenceMap.keySet()) { @@ -1426,35 +1770,36 @@ private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation valueType, } @SuppressWarnings("unchecked") - private List bulkReadAndConvertDBRefs(List dbrefs, TypeInformation type, ObjectPath path, - final Class rawType) { + private List bulkReadAndConvertDBRefs(ConversionContext context, List dbrefs, TypeInformation type) { if (CollectionUtils.isEmpty(dbrefs)) { return Collections.emptyList(); } - List referencedRawDocuments = dbrefs.size() == 1 - ? Collections.singletonList(readRef(dbrefs.iterator().next())) + List referencedRawDocuments = dbrefs.size() == 1 ? Collections.singletonList(readRef(peek(dbrefs))) : bulkReadRefs(dbrefs); - String collectionName = dbrefs.iterator().next().getCollectionName(); + String collectionName = peek(dbrefs).getCollectionName(); - List targeList = new ArrayList<>(dbrefs.size()); + List targetList = new ArrayList<>(dbrefs.size()); for (Document document : referencedRawDocuments) { + T target = null; if (document != null) { - maybeEmitEvent(new AfterLoadEvent<>(document, (Class) rawType, collectionName)); - } - final T target = (T) read(type, document, path); - targeList.add(target); + maybeEmitEvent(new AfterLoadEvent<>(document, (Class) type.getType(), collectionName)); + target = (T) readDocument(context, document, type); + } if (target != null) { maybeEmitEvent(new AfterConvertEvent<>(document, target, collectionName)); + target = maybeCallAfterConvert(target, document, collectionName); } + + targetList.add(target); } - return targeList; + return targetList; } private void maybeEmitEvent(MongoMappingEvent event) { @@ -1468,12 +1813,22 @@ private boolean canPublishEvent() { return this.applicationContext != null; } + protected T maybeCallAfterConvert(T object, Document document, String collection) { + + if (null != entityCallbacks) { + return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection); + } + + return object; + } + /** * Performs the fetch operation for the given {@link DBRef}. * * @param ref * @return */ + @Nullable Document readRef(DBRef ref) { return dbRefResolver.fetch(ref); } @@ -1489,25 +1844,76 @@ List bulkReadRefs(List references) { return dbRefResolver.bulkFetch(references); } + /** + * Get the conversion target type if defined or return the {@literal source}. + * + * @param source must not be {@literal null}. + * @return + * @since 2.2 + */ + public Class getWriteTarget(Class source) { + return conversions.getCustomWriteTarget(source).orElse(source); + } + + @Override + public CodecRegistry getCodecRegistry() { + return codecRegistryProvider != null ? codecRegistryProvider.getCodecRegistry() : super.getCodecRegistry(); + } + + /** + * Create a new {@link MappingMongoConverter} using the given {@link MongoDatabaseFactory} when loading {@link DBRef}. + * + * @return new instance of {@link MappingMongoConverter}. Never {@literal null}. + * @since 2.1.6 + */ + public MappingMongoConverter with(MongoDatabaseFactory dbFactory) { + + MappingMongoConverter target = new MappingMongoConverter(new DefaultDbRefResolver(dbFactory), mappingContext); + target.applicationContext = applicationContext; + target.conversions = conversions; + target.spELContext = spELContext; + target.setInstantiators(instantiators); + target.defaultTypeMapper = defaultTypeMapper; + target.typeMapper = typeMapper; + target.setCodecRegistryProvider(dbFactory); + target.afterPropertiesSet(); + + return target; + } + + private T doConvert(Object value, Class target) { + return doConvert(value, target, null); + } + + @SuppressWarnings("ConstantConditions") + private T doConvert(Object value, Class target, + @Nullable Class fallback) { + + if (conversionService.canConvert(value.getClass(), target) || fallback == null) { + return conversionService.convert(value, target); + } + return conversionService.convert(value, fallback); + } + /** * Returns whether the given {@link Iterable} contains {@link DBRef} instances all pointing to the same collection. * * @param source must not be {@literal null}. * @return */ - private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable source) { + private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable source) { - Assert.notNull(source, "Iterable of DBRefs must not be null!"); + Assert.notNull(source, "Iterable of DBRefs must not be null"); Set collectionsFound = new HashSet<>(); for (Object dbObjItem : source) { - if (!(dbObjItem instanceof DBRef)) { + if (!(dbObjItem instanceof DBRef dbRef)) { return false; } - collectionsFound.add(((DBRef) dbObjItem).getCollectionName()); + collectionsFound.add(dbRef.getCollectionName()); if (collectionsFound.size() > 1) { return false; @@ -1517,9 +1923,180 @@ private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable T peek(Iterable result) { + return result.iterator().next(); + } + /** - * Marker class used to indicate we have a non root document object here that might be used within an update - so we - * need to preserve type hints for potential nested elements but need to remove it on top level. + * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field + * of the configured source {@link Document}. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + */ + static class MongoDbPropertyValueProvider implements PropertyValueProvider { + + final ConversionContext context; + final DocumentAccessor accessor; + final ValueExpressionEvaluator evaluator; + final SpELContext spELContext; + + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link ValueExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, Bson source, ValueExpressionEvaluator evaluator) { + this(context, new DocumentAccessor(source), evaluator, null); + } + + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link ValueExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor accessor, + ValueExpressionEvaluator evaluator, SpELContext spELContext) { + + this.context = context; + this.accessor = accessor; + this.evaluator = evaluator; + this.spELContext = spELContext; + } + + @Override + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + String expression = property.getSpelExpression(); + Object value = expression != null ? evaluator.evaluate(expression) : accessor.get(property); + + CustomConversions conversions = context.getCustomConversions(); + if (conversions.hasValueConverter(property)) { + MongoConversionContext conversionContext = new MongoConversionContext(this, property, + context.getSourceConverter(), spELContext); + PropertyValueConverter> valueConverter = conversions + .getPropertyValueConversions().getValueConverter(property); + return (T) (value != null ? valueConverter.read(value, conversionContext) + : valueConverter.readNull(conversionContext)); + } + + if (value == null) { + return null; + } + + ConversionContext contextToUse = context.forProperty(property); + + return (T) contextToUse.convert(value, property.getTypeInformation()); + } + + public MongoDbPropertyValueProvider withContext(ConversionContext context) { + + return context == this.context ? this + : new MongoDbPropertyValueProvider(context, accessor, evaluator, spELContext); + } + } + + /** + * {@link PropertyValueProvider} that is aware of {@link MongoPersistentProperty#isAssociation()} and that delegates + * resolution to {@link DbRefResolver}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueProvider { + + /** + * Creates a new {@link AssociationAwareMongoDbPropertyValueProvider} for the given source, + * {@link ValueExpressionEvaluator} and {@link ObjectPath}. + * + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + AssociationAwareMongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor source, + ValueExpressionEvaluator evaluator) { + super(context, source, evaluator, MappingMongoConverter.this.spELContext); + } + + @Override + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + ConversionContext propertyContext = context.forProperty(property); + + if (property.isAssociation()) { + + DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(), + evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, + evaluator)); + + return (T) readAssociation(property.getRequiredAssociation(), accessor, dbRefProxyHandler, callback, + propertyContext); + } + + if (property.isUnwrapped()) { + + return (T) readUnwrapped(propertyContext, accessor, property, + mappingContext.getRequiredPersistentEntity(property)); + } + + if (!accessor.hasValue(property)) { + return null; + } + + return super.getPropertyValue(property); + } + } + + /** + * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw + * resolved SpEL value. + * + * @author Oliver Gierke + */ + private static class ConverterAwareValueExpressionParameterValueProvider + extends ValueExpressionParameterValueProvider { + + private final ConversionContext context; + + /** + * Creates a new {@link ConverterAwareValueExpressionParameterValueProvider}. + * + * @param context must not be {@literal null}. + * @param evaluator must not be {@literal null}. + * @param conversionService must not be {@literal null}. + * @param delegate must not be {@literal null}. + */ + public ConverterAwareValueExpressionParameterValueProvider(ConversionContext context, + ValueExpressionEvaluator evaluator, ConversionService conversionService, + ParameterValueProvider delegate) { + + super(evaluator, conversionService, delegate); + + Assert.notNull(context, "ConversionContext must no be null"); + + this.context = context; + } + + @Override + protected T potentiallyConvertExpressionValue(Object object, Parameter parameter) { + return context.convert(object, parameter.getType()); + } + } + + /** + * Marker class used to indicate we have a non root document object here that might be used within an update - so we + * need to preserve type hints for potential nested elements but need to remove it on top level. * * @author Christoph Strobl * @since 1.8 @@ -1527,4 +2104,447 @@ private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable { + + INSTANCE; + + @Override + public T getParameterValue(Parameter parameter) { + return null; + } + } + + /** + * {@link TypeInformation} considering {@link MongoPersistentProperty#getFieldType()} as type source. + * + * @param + */ + private static class FieldTypeInformation implements TypeInformation { + + private final MongoPersistentProperty persistentProperty; + private final TypeInformation delegate; + + @SuppressWarnings("unchecked") + public FieldTypeInformation(MongoPersistentProperty property) { + + this.persistentProperty = property; + this.delegate = (TypeInformation) property.getTypeInformation(); + } + + @Override + public List> getParameterTypes(Constructor constructor) { + return persistentProperty.getTypeInformation().getParameterTypes(constructor); + } + + @Override + public org.springframework.data.util.TypeInformation getProperty(String property) { + return delegate.getProperty(property); + } + + @Override + public boolean isCollectionLike() { + return delegate.isCollectionLike(); + } + + @Override + public org.springframework.data.util.TypeInformation getComponentType() { + return TypeInformation.of(persistentProperty.getFieldType()); + } + + @Override + public boolean isMap() { + return delegate.isMap(); + } + + @Override + public org.springframework.data.util.TypeInformation getMapValueType() { + return TypeInformation.of(persistentProperty.getFieldType()); + } + + @Override + public Class getType() { + return delegate.getType(); + } + + @Override + public TypeInformation getRawTypeInformation() { + return delegate.getRawTypeInformation(); + } + + @Override + public org.springframework.data.util.TypeInformation getActualType() { + return delegate.getActualType(); + } + + @Override + public org.springframework.data.util.TypeInformation getReturnType(Method method) { + return delegate.getReturnType(method); + } + + @Override + public List> getParameterTypes(Method method) { + return delegate.getParameterTypes(method); + } + + @Override + public org.springframework.data.util.TypeInformation getSuperTypeInformation(Class superType) { + return delegate.getSuperTypeInformation(superType); + } + + @Override + public boolean isAssignableFrom(org.springframework.data.util.TypeInformation target) { + return delegate.isAssignableFrom(target); + } + + @Override + public List> getTypeArguments() { + return delegate.getTypeArguments(); + } + + @Override + public org.springframework.data.util.TypeInformation specialize(TypeInformation type) { + return delegate.specialize(type); + } + + @Override + public TypeDescriptor toTypeDescriptor() { + return delegate.toTypeDescriptor(); + } + } + + /** + * Conversion context defining an interface for graph-traversal-based conversion of documents. Entrypoint for + * recursive conversion of {@link Document} and other types. + * + * @since 3.4.3 + */ + protected interface ConversionContext { + + /** + * Converts a source object into {@link TypeInformation target}. + * + * @param source must not be {@literal null}. + * @param typeHint must not be {@literal null}. + * @return the converted object. + */ + default S convert(Object source, TypeInformation typeHint) { + return convert(source, typeHint, this); + } + + /** + * Converts a source object into {@link TypeInformation target}. + * + * @param source must not be {@literal null}. + * @param typeHint must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the converted object. + */ + S convert(Object source, TypeInformation typeHint, ConversionContext context); + + /** + * Create a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + * + * @param currentPath must not be {@literal null}. + * @return a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + */ + ConversionContext withPath(ObjectPath currentPath); + + /** + * Obtain a {@link ConversionContext} for the given property {@code name}. + * + * @param name must not be {@literal null}. + * @return the {@link ConversionContext} to be used for conversion of the given property. + */ + default ConversionContext forProperty(String name) { + return this; + } + + /** + * Obtain a {@link ConversionContext} for the given {@link MongoPersistentProperty}. + * + * @param property must not be {@literal null}. + * @return the {@link ConversionContext} to be used for conversion of the given property. + */ + default ConversionContext forProperty(MongoPersistentProperty property) { + + return property.isAssociation() ? new AssociationConversionContext(forProperty(property.getName())) + : forProperty(property.getName()); + } + + /** + * Lookup a potentially existing entity instance of the given {@link MongoPersistentEntity} and {@link Document} + * + * @param entity + * @param document + * @return + * @param + */ + @Nullable + default S findContextualEntity(MongoPersistentEntity entity, Document document) { + return null; + } + + ObjectPath getPath(); + + CustomConversions getCustomConversions(); + + MongoConverter getSourceConverter(); + + } + + /** + * @since 3.4.3 + */ + static class AssociationConversionContext implements ConversionContext { + + private final ConversionContext delegate; + + public AssociationConversionContext(ConversionContext delegate) { + this.delegate = delegate; + } + + @Override + public S convert(Object source, TypeInformation typeHint, ConversionContext context) { + return delegate.convert(source, typeHint, context); + } + + @Override + public ConversionContext withPath(ObjectPath currentPath) { + return new AssociationConversionContext(delegate.withPath(currentPath)); + } + + @Override + public S findContextualEntity(MongoPersistentEntity entity, Document document) { + + Object identifier = document.get(BasicMongoPersistentProperty.ID_FIELD_NAME); + + return identifier != null ? getPath().getPathItem(identifier, entity.getCollection(), entity.getType()) : null; + } + + @Override + public ObjectPath getPath() { + return delegate.getPath(); + } + + @Override + public CustomConversions getCustomConversions() { + return delegate.getCustomConversions(); + } + + @Override + public MongoConverter getSourceConverter() { + return delegate.getSourceConverter(); + } + + } + + /** + * Conversion context holding references to simple {@link ValueConverter} and {@link ContainerValueConverter}. + * Entrypoint for recursive conversion of {@link Document} and other types. + * + * @since 3.2 + */ + protected static class DefaultConversionContext implements ConversionContext { + + final MongoConverter sourceConverter; + final org.springframework.data.convert.CustomConversions conversions; + final ObjectPath path; + final ContainerValueConverter documentConverter; + final ContainerValueConverter> collectionConverter; + final ContainerValueConverter mapConverter; + final ContainerValueConverter dbRefConverter; + final ValueConverter elementConverter; + + DefaultConversionContext(MongoConverter sourceConverter, + org.springframework.data.convert.CustomConversions customConversions, ObjectPath path, + ContainerValueConverter documentConverter, ContainerValueConverter> collectionConverter, + ContainerValueConverter mapConverter, ContainerValueConverter dbRefConverter, + ValueConverter elementConverter) { + + this.sourceConverter = sourceConverter; + this.conversions = customConversions; + this.path = path; + this.documentConverter = documentConverter; + this.collectionConverter = collectionConverter; + this.mapConverter = mapConverter; + this.dbRefConverter = dbRefConverter; + this.elementConverter = elementConverter; + } + + @SuppressWarnings("unchecked") + @Override + public S convert(Object source, TypeInformation typeHint, + ConversionContext context) { + + if (conversions.hasCustomReadTarget(source.getClass(), typeHint.getType())) { + return (S) elementConverter.convert(source, typeHint); + } + + if (source instanceof Collection collection) { + + Class rawType = typeHint.getType(); + if (!Object.class.equals(rawType) && !String.class.equals(rawType)) { + + if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { + + throw new MappingException( + String.format(INCOMPATIBLE_TYPES, source, source.getClass(), rawType, getPath())); + } + } + + if (typeHint.isCollectionLike() || typeHint.getType().isAssignableFrom(Collection.class)) { + return (S) collectionConverter.convert(context, collection, typeHint); + } + } + + if (typeHint.isMap()) { + + if (ClassUtils.isAssignable(Document.class, typeHint.getType())) { + return (S) documentConverter.convert(context, BsonUtils.asBson(source), typeHint); + } + + if (BsonUtils.supportsBson(source)) { + return (S) mapConverter.convert(context, BsonUtils.asBson(source), typeHint); + } + + throw new IllegalArgumentException( + String.format("Expected map like structure but found %s", source.getClass())); + } + + if (source instanceof DBRef dbRef) { + return (S) dbRefConverter.convert(context, dbRef, typeHint); + } + + if (BsonUtils.supportsBson(source)) { + return (S) documentConverter.convert(context, BsonUtils.asBson(source), typeHint); + } + + return (S) elementConverter.convert(source, typeHint); + } + + @Override + public CustomConversions getCustomConversions() { + return conversions; + } + + @Override + public MongoConverter getSourceConverter() { + return sourceConverter; + } + + @Override + public ConversionContext withPath(ObjectPath currentPath) { + + Assert.notNull(currentPath, "ObjectPath must not be null"); + + return new DefaultConversionContext(sourceConverter, conversions, currentPath, documentConverter, + collectionConverter, mapConverter, dbRefConverter, elementConverter); + } + + @Override + public ObjectPath getPath() { + return path; + } + + /** + * Converts a simple {@code source} value into {@link TypeInformation the target type}. + * + * @param + */ + interface ValueConverter { + + Object convert(T source, TypeInformation typeHint); + + } + + /** + * Converts a container {@code source} value into {@link TypeInformation the target type}. Containers may + * recursively apply conversions for entities, collections, maps, etc. + * + * @param + */ + interface ContainerValueConverter { + + Object convert(ConversionContext context, T source, TypeInformation typeHint); + + } + + } + + /** + * @since 3.4.3 + */ + class ProjectingConversionContext extends DefaultConversionContext { + + private final EntityProjection returnedTypeDescriptor; + + ProjectingConversionContext(MongoConverter sourceConverter, CustomConversions customConversions, ObjectPath path, + ContainerValueConverter> collectionConverter, ContainerValueConverter mapConverter, + ContainerValueConverter dbRefConverter, ValueConverter elementConverter, + EntityProjection projection) { + super(sourceConverter, customConversions, path, + (context, source, typeHint) -> doReadOrProject(context, source, typeHint, projection), + + collectionConverter, mapConverter, dbRefConverter, elementConverter); + this.returnedTypeDescriptor = projection; + } + + @Override + public ConversionContext forProperty(String name) { + + EntityProjection property = returnedTypeDescriptor.findProperty(name); + if (property == null) { + return new DefaultConversionContext(sourceConverter, conversions, path, + MappingMongoConverter.this::readDocument, collectionConverter, mapConverter, dbRefConverter, + elementConverter); + } + + return new ProjectingConversionContext(sourceConverter, conversions, path, collectionConverter, mapConverter, + dbRefConverter, elementConverter, property); + } + + @Override + public ConversionContext withPath(ObjectPath currentPath) { + return new ProjectingConversionContext(sourceConverter, conversions, currentPath, collectionConverter, + mapConverter, dbRefConverter, elementConverter, returnedTypeDescriptor); + } + } + + private static class PropertyTranslatingPropertyAccessor implements PersistentPropertyAccessor { + + private final PersistentPropertyAccessor delegate; + private final PersistentPropertyTranslator propertyTranslator; + + private PropertyTranslatingPropertyAccessor(PersistentPropertyAccessor delegate, + PersistentPropertyTranslator propertyTranslator) { + this.delegate = delegate; + this.propertyTranslator = propertyTranslator; + } + + static PersistentPropertyAccessor create(PersistentPropertyAccessor delegate, + PersistentPropertyTranslator propertyTranslator) { + return new PropertyTranslatingPropertyAccessor<>(delegate, propertyTranslator); + } + + @Override + public void setProperty(PersistentProperty property, @Nullable Object value) { + delegate.setProperty(translate(property), value); + } + + @Override + public Object getProperty(PersistentProperty property) { + return delegate.getProperty(translate(property)); + } + + @Override + public T getBean() { + return delegate.getBean(); + } + + private MongoPersistentProperty translate(PersistentProperty property) { + return propertyTranslator.translate((MongoPersistentProperty) property); + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java new file mode 100644 index 0000000000..da106715d4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java @@ -0,0 +1,170 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.bson.conversions.Bson; +import org.springframework.data.convert.ValueConversionContext; +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Nullable; + +/** + * {@link ValueConversionContext} that allows to delegate read/write to an underlying {@link MongoConverter}. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 3.4 + */ +public class MongoConversionContext implements ValueConversionContext { + + private final PropertyValueProvider accessor; // TODO: generics + private final MongoConverter mongoConverter; + + @Nullable private final MongoPersistentProperty persistentProperty; + @Nullable private final SpELContext spELContext; + @Nullable private final OperatorContext operatorContext; + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter) { + this(accessor, persistentProperty, mongoConverter, null, null); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable SpELContext spELContext) { + this(accessor, persistentProperty, mongoConverter, spELContext, null); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable OperatorContext operatorContext) { + this(accessor, persistentProperty, mongoConverter, null, operatorContext); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable SpELContext spELContext, @Nullable OperatorContext operatorContext) { + + this.accessor = accessor; + this.persistentProperty = persistentProperty; + this.mongoConverter = mongoConverter; + this.spELContext = spELContext; + this.operatorContext = operatorContext; + } + + @Override + public MongoPersistentProperty getProperty() { + + if (persistentProperty == null) { + throw new IllegalStateException("No underlying MongoPersistentProperty available"); + } + + return persistentProperty; + } + + /** + * @param operatorContext + * @return new instance of {@link MongoConversionContext}. + * @since 4.5 + */ + @CheckReturnValue + public MongoConversionContext forOperator(@Nullable OperatorContext operatorContext) { + return new MongoConversionContext(accessor, persistentProperty, mongoConverter, spELContext, operatorContext); + } + + @Nullable + public Object getValue(String propertyPath) { + return accessor.getPropertyValue(getProperty().getOwner().getRequiredPersistentProperty(propertyPath)); + } + + @Override + @SuppressWarnings("unchecked") + public T write(@Nullable Object value, TypeInformation target) { + return (T) mongoConverter.convertToMongoType(value, target); + } + + @Override + public T read(@Nullable Object value, TypeInformation target) { + return value instanceof Bson bson ? mongoConverter.read(target.getType(), bson) + : ValueConversionContext.super.read(value, target); + } + + @Nullable + public SpELContext getSpELContext() { + return spELContext; + } + + @Nullable + public OperatorContext getOperatorContext() { + return operatorContext; + } + + /** + * The {@link OperatorContext} provides access to the actual conversion intent like a write operation or a query + * operator such as {@literal $gte}. + * + * @since 4.5 + */ + public interface OperatorContext { + + /** + * The operator the conversion is used in. + * + * @return {@literal write} for simple write operations during save, or a query operator. + */ + String operator(); + + /** + * The context path the operator is used in. + * + * @return never {@literal null}. + */ + String path(); + + boolean isWriteOperation(); + + } + + record WriteOperatorContext(String path) implements OperatorContext { + + @Override + public String operator() { + return "write"; + } + + @Override + public boolean isWriteOperation() { + return true; + } + } + + record QueryOperatorContext(String operator, String path) implements OperatorContext { + + public QueryOperatorContext(@Nullable String operator, String path) { + this.operator = operator != null ? operator : "$eq"; + this.path = path; + } + + @Override + public boolean isWriteOperation() { + return false; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java index 41d5aa62eb..3676e74c8b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,24 @@ */ package org.springframework.data.mongodb.core.convert; +import com.mongodb.MongoClientSettings; import org.bson.BsonValue; import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.springframework.core.convert.ConversionException; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.convert.EntityConverter; import org.springframework.data.convert.EntityReader; import org.springframework.data.convert.TypeMapper; +import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -37,19 +46,49 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Ryan Gibb */ public interface MongoConverter extends EntityConverter, MongoPersistentProperty, Object, Bson>, MongoWriter, - EntityReader { + EntityReader, CodecRegistryProvider { /** - * Returns thw {@link TypeMapper} being used to write type information into {@link Document}s created with that + * Returns the {@link TypeMapper} being used to write type information into {@link Document}s created with that * converter. * * @return will never be {@literal null}. */ MongoTypeMapper getTypeMapper(); + /** + * Returns the {@link ProjectionFactory} for this converter. + * + * @return will never be {@literal null}. + * @since 3.4 + */ + ProjectionFactory getProjectionFactory(); + + /** + * Returns the {@link CustomConversions} for this converter. + * + * @return will never be {@literal null}. + * @since 3.4 + */ + CustomConversions getCustomConversions(); + + /** + * Apply a projection to {@link Bson} and return the projection return type {@code R}. + * {@link EntityProjection#isProjection() Non-projecting} descriptors fall back to {@link #read(Class, Object) regular + * object materialization}. + * + * @param descriptor the projection descriptor, must not be {@literal null}. + * @param bson must not be {@literal null}. + * @param + * @return a new instance of the projection return type {@code R}. + * @since 3.4 + */ + R project(EntityProjection descriptor, Bson bson); + /** * Mapping function capable of converting values into a desired target type by eg. extracting the actual java type * from a given {@link BsonValue}. @@ -66,36 +105,42 @@ public interface MongoConverter @Nullable default T mapValueToTargetType(S source, Class targetType, DbRefResolver dbRefResolver) { - Assert.notNull(targetType, "TargetType must not be null!"); - Assert.notNull(dbRefResolver, "DbRefResolver must not be null!"); - + Assert.notNull(targetType, "TargetType must not be null"); + Assert.notNull(dbRefResolver, "DbRefResolver must not be null"); if (targetType != Object.class && ClassUtils.isAssignable(targetType, source.getClass())) { return (T) source; } - if (source instanceof BsonValue) { + if (source instanceof BsonValue bson) { - Object value = BsonUtils.toJavaType((BsonValue) source); + Object value = BsonUtils.toJavaType(bson); - if (value instanceof Document) { + if (value instanceof Document document) { - Document sourceDocument = (Document) value; + if (document.containsKey("$ref") && document.containsKey("$id")) { - if (sourceDocument.containsKey("$ref") && sourceDocument.containsKey("$id")) { + Object id = document.get("$id"); + String collection = document.getString("$ref"); - sourceDocument = dbRefResolver.fetch(new DBRef(sourceDocument.getString("$ref"), sourceDocument.get("$id"))); - if (sourceDocument == null) { + MongoPersistentEntity entity = getMappingContext().getPersistentEntity(targetType); + if (entity != null && entity.hasIdProperty()) { + id = convertId(id, entity.getIdProperty().getFieldType()); + } + + DBRef ref = document.containsKey("$db") ? new DBRef(document.getString("$db"), collection, id) + : new DBRef(collection, id); + + document = dbRefResolver.fetch(ref); + if (document == null) { return null; } } - return read(targetType, sourceDocument); + return read(targetType, document); } else { - if (!ClassUtils.isAssignable(targetType, value.getClass())) { - if (getConversionService().canConvert(value.getClass(), targetType)) { - return getConversionService().convert(value, targetType); - } + if (!ClassUtils.isAssignable(targetType, value.getClass()) && getConversionService().canConvert(value.getClass(), targetType)) { + return getConversionService().convert(value, targetType); } } @@ -103,4 +148,47 @@ default T mapValueToTargetType(S source, Class targetType, DbRefResolv } return getConversionService().convert(source, targetType); } + + /** + * Converts the given raw id value into either {@link ObjectId} or {@link String}. + * + * @param id can be {@literal null}. + * @param targetType must not be {@literal null}. + * @return {@literal null} if source {@literal id} is already {@literal null}. + * @since 2.2 + */ + @Nullable + default Object convertId(@Nullable Object id, Class targetType) { + + if (id == null || ClassUtils.isAssignableValue(targetType, id)) { + return id; + } + + if (ClassUtils.isAssignable(ObjectId.class, targetType)) { + + if (id instanceof String) { + + if (ObjectId.isValid(id.toString())) { + return new ObjectId(id.toString()); + } + + // avoid ConversionException as convertToMongoType will return String anyways. + return id; + } + } + + try { + return getConversionService().canConvert(id.getClass(), targetType) + ? getConversionService().convert(id, targetType) + : convertToMongoType(id, (TypeInformation) null); + } catch (ConversionException o_O) { + return convertToMongoType(id,(TypeInformation) null); + } + } + + @Override + default CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java index 83d27510a2..f9a67d73a0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,39 @@ */ package org.springframework.data.mongodb.core.convert; +import static org.springframework.data.convert.ConverterBuilder.*; + import java.math.BigDecimal; import java.math.BigInteger; import java.net.MalformedURLException; +import java.net.URI; import java.net.URL; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Currency; import java.util.List; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import org.bson.BinaryVector; +import org.bson.BsonArray; +import org.bson.BsonDouble; +import org.bson.BsonReader; +import org.bson.BsonTimestamp; +import org.bson.BsonUndefined; +import org.bson.BsonWriter; import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.codecs.DecoderContext; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecRegistries; import org.bson.types.Binary; import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; + import org.springframework.core.convert.ConversionFailedException; import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.ConditionalConverter; @@ -37,13 +55,17 @@ import org.springframework.core.convert.converter.ConverterFactory; import org.springframework.data.convert.ReadingConverter; import org.springframework.data.convert.WritingConverter; +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoVector; import org.springframework.data.mongodb.core.query.Term; import org.springframework.data.mongodb.core.script.NamedMongoScript; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.NumberUtils; import org.springframework.util.StringUtils; +import com.mongodb.MongoClientSettings; + /** * Wrapper class to contain useful converters for the usage with Mongo. * @@ -69,10 +91,10 @@ static Collection getConvertersToRegister() { List converters = new ArrayList<>(); - converters.add(BigDecimalToStringConverter.INSTANCE); - converters.add(StringToBigDecimalConverter.INSTANCE); - converters.add(BigIntegerToStringConverter.INSTANCE); - converters.add(StringToBigIntegerConverter.INSTANCE); + converters.add(BigDecimalToDecimal128Converter.INSTANCE); + converters.add(Decimal128ToBigDecimalConverter.INSTANCE); + converters.add(BigIntegerToDecimal128Converter.INSTANCE); + converters.add(URLToStringConverter.INSTANCE); converters.add(StringToURLConverter.INSTANCE); converters.add(DocumentToStringConverter.INSTANCE); @@ -86,6 +108,15 @@ static Collection getConvertersToRegister() { converters.add(LongToAtomicLongConverter.INSTANCE); converters.add(IntegerToAtomicIntegerConverter.INSTANCE); converters.add(BinaryToByteArrayConverter.INSTANCE); + converters.add(BsonTimestampToInstantConverter.INSTANCE); + converters.add(NumberToNumberConverterFactory.INSTANCE); + + converters.add(VectorToBsonArrayConverter.INSTANCE); + converters.add(ListToVectorConverter.INSTANCE); + converters.add(BinaryVectorToMongoVectorConverter.INSTANCE); + + converters.add(reading(BsonUndefined.class, Object.class, it -> null)); + converters.add(reading(String.class, URI.class, URI::create).andWriting(URI::toString)); return converters; } @@ -99,7 +130,7 @@ enum ObjectIdToStringConverter implements Converter { INSTANCE; public String convert(ObjectId id) { - return id == null ? null : id.toString(); + return id.toString(); } } @@ -125,7 +156,7 @@ enum ObjectIdToBigIntegerConverter implements Converter { INSTANCE; public BigInteger convert(ObjectId source) { - return source == null ? null : new BigInteger(source.toString(), 16); + return new BigInteger(source.toString(), 16); } } @@ -138,7 +169,7 @@ enum BigIntegerToObjectIdConverter implements Converter { INSTANCE; public ObjectId convert(BigInteger source) { - return source == null ? null : new ObjectId(source.toString(16)); + return new ObjectId(source.toString(16)); } } @@ -146,7 +177,29 @@ enum BigDecimalToStringConverter implements Converter { INSTANCE; public String convert(BigDecimal source) { - return source == null ? null : source.toString(); + return source.toString(); + } + } + + /** + * @since 2.2 + */ + enum BigDecimalToDecimal128Converter implements Converter { + INSTANCE; + + public Decimal128 convert(BigDecimal source) { + return new Decimal128(source); + } + } + + /** + * @since 5.0 + */ + enum BigIntegerToDecimal128Converter implements Converter { + INSTANCE; + + public Decimal128 convert(BigInteger source) { + return new Decimal128(new BigDecimal(source)); } } @@ -158,14 +211,27 @@ public BigDecimal convert(String source) { } } + /** + * @since 2.2 + */ + enum Decimal128ToBigDecimalConverter implements Converter { + INSTANCE; + + public BigDecimal convert(Decimal128 source) { + return source.bigDecimalValue(); + } + } + + @WritingConverter enum BigIntegerToStringConverter implements Converter { INSTANCE; public String convert(BigInteger source) { - return source == null ? null : source.toString(); + return source.toString(); } } + @ReadingConverter enum StringToBigIntegerConverter implements Converter { INSTANCE; @@ -178,7 +244,7 @@ enum URLToStringConverter implements Converter { INSTANCE; public String convert(URL source) { - return source == null ? null : source.toString(); + return source.toString(); } } @@ -191,7 +257,7 @@ enum StringToURLConverter implements Converter { public URL convert(String source) { try { - return source == null ? null : new URL(source); + return new URL(source); } catch (MalformedURLException e) { throw new ConversionFailedException(SOURCE, TARGET, source, e); } @@ -203,14 +269,27 @@ enum DocumentToStringConverter implements Converter { INSTANCE; - @Override - public String convert(Document source) { + private final Codec codec = CodecRegistries.fromRegistries(CodecRegistries.fromCodecs(new Codec() { - if (source == null) { - return null; + @Override + public void encode(BsonWriter writer, UUID value, EncoderContext encoderContext) { + writer.writeString(value.toString()); + } + + @Override + public Class getEncoderClass() { + return UUID.class; } - return source.toJson(); + @Override + public UUID decode(BsonReader reader, DecoderContext decoderContext) { + throw new IllegalStateException("decode not supported"); + } + }), MongoClientSettings.getDefaultCodecRegistry()).get(Document.class); + + @Override + public String convert(Document source) { + return source.toJson(codec); } } @@ -225,7 +304,7 @@ enum TermToStringConverter implements Converter { @Override public String convert(Term source) { - return source == null ? null : source.getFormatted(); + return source.getFormatted(); } } @@ -240,11 +319,11 @@ enum DocumentToNamedMongoScriptConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(Currency source) { - return source == null ? null : source.getCurrencyCode(); + return source.getCurrencyCode(); } } @@ -307,10 +378,6 @@ enum StringToCurrencyConverter implements Converter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Currency convert(String source) { return StringUtils.hasText(source) ? Currency.getInstance(source) : null; @@ -330,19 +397,11 @@ enum NumberToNumberConverterFactory implements ConverterFactory, INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.ConverterFactory#getConverter(java.lang.Class) - */ @Override public Converter getConverter(Class targetType) { return new NumberToNumberConverter(targetType); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.ConditionalConverter#matches(org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor) - */ @Override public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) { return !sourceType.equals(targetType); @@ -359,24 +418,20 @@ private final static class NumberToNumberConverter implements */ public NumberToNumberConverter(Class targetType) { - Assert.notNull(targetType, "Target type must not be null!"); + Assert.notNull(targetType, "Target type must not be null"); this.targetType = targetType; } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public T convert(Number source) { - if (source instanceof AtomicInteger) { - return NumberUtils.convertNumberToTargetClass(((AtomicInteger) source).get(), this.targetType); + if (source instanceof AtomicInteger atomicInteger) { + return NumberUtils.convertNumberToTargetClass(atomicInteger.get(), this.targetType); } - if (source instanceof AtomicLong) { - return NumberUtils.convertNumberToTargetClass(((AtomicLong) source).get(), this.targetType); + if (source instanceof AtomicLong atomicLong) { + return NumberUtils.convertNumberToTargetClass(atomicLong.get(), this.targetType); } return NumberUtils.convertNumberToTargetClass(source, this.targetType); @@ -384,6 +439,94 @@ public T convert(Number source) { } } + @WritingConverter + enum VectorToBsonArrayConverter implements Converter { + + INSTANCE; + + @Override + public Object convert(Vector source) { + + if (source instanceof MongoVector mv) { + return mv.getSource(); + } + + double[] doubleArray = source.toDoubleArray(); + + BsonArray array = new BsonArray(doubleArray.length); + + for (double v : doubleArray) { + array.add(new BsonDouble(v)); + } + + return array; + } + } + + @ReadingConverter + enum ListToVectorConverter implements Converter, Vector> { + + INSTANCE; + + @Override + public Vector convert(List source) { + return Vector.of(source); + } + } + + @ReadingConverter + enum BinaryVectorToMongoVectorConverter implements Converter { + + INSTANCE; + + @Override + public Vector convert(BinaryVector source) { + return MongoVector.of(source); + } + } + + @WritingConverter + enum ByteArrayConverterFactory implements ConverterFactory, ConditionalConverter { + + INSTANCE; + + @Override + public Converter getConverter(Class targetType) { + return new ByteArrayConverter<>(targetType); + } + + @Override + public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) { + return targetType.getType() != Object.class && !sourceType.equals(targetType); + } + + private final static class ByteArrayConverter implements Converter { + + private final Class targetType; + + /** + * Creates a new {@link ByteArrayConverter} for the given target type. + * + * @param targetType must not be {@literal null}. + */ + public ByteArrayConverter(Class targetType) { + + Assert.notNull(targetType, "Target type must not be null"); + + this.targetType = targetType; + } + + @Override + public T convert(byte[] source) { + + if (this.targetType == BinaryVector.class) { + return (T) BinaryVector.int8Vector(source); + } + return (T) source; + } + } + } + /** * {@link ConverterFactory} implementation converting {@link AtomicLong} into {@link Long}. * @@ -428,7 +571,7 @@ enum LongToAtomicLongConverter implements Converter { @Override public AtomicLong convert(Long source) { - return source != null ? new AtomicLong(source) : null; + return new AtomicLong(source); } } @@ -444,7 +587,7 @@ enum IntegerToAtomicIntegerConverter implements Converter { INSTANCE; - @Nullable @Override public byte[] convert(Binary source) { return source.getData(); } } + + /** + * {@link Converter} implementation converting {@link BsonTimestamp} into {@link Instant}. + * + * @author Christoph Strobl + * @since 2.1.2 + */ + @ReadingConverter + enum BsonTimestampToInstantConverter implements Converter { + + INSTANCE; + + @Override + public Instant convert(BsonTimestamp source) { + return Instant.ofEpochSecond(source.getTime(), 0); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java index 38cb703330..050c3bd27d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoCustomConversions.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,51 @@ */ package org.springframework.data.mongodb.core.convert; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; +import java.util.function.Consumer; import org.springframework.core.convert.TypeDescriptor; +import org.springframework.core.convert.converter.Converter; +import org.springframework.core.convert.converter.ConverterFactory; import org.springframework.core.convert.converter.GenericConverter; -import org.springframework.data.convert.JodaTimeConverters; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.PropertyValueConversions; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.convert.PropertyValueConverterRegistrar; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.SimplePropertyValueConversions; import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigIntegerConverter; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** * Value object to capture custom conversion. {@link MongoCustomConversions} also act as factory for * {@link org.springframework.data.mapping.model.SimpleTypeHolder} * * @author Mark Paluch + * @author Christoph Strobl * @since 2.0 * @see org.springframework.data.convert.CustomConversions * @see org.springframework.data.mapping.model.SimpleTypeHolder @@ -51,7 +76,6 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus converters.add(CustomToStringConverter.INSTANCE); converters.addAll(MongoConverters.getConvertersToRegister()); - converters.addAll(JodaTimeConverters.getConvertersToRegister()); converters.addAll(GeoConverters.getConvertersToRegister()); STORE_CONVERTERS = Collections.unmodifiableList(converters); @@ -71,7 +95,33 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus * @param converters must not be {@literal null}. */ public MongoCustomConversions(List converters) { - super(STORE_CONVERSIONS, converters); + this(MongoConverterConfigurationAdapter.from(converters)); + } + + /** + * Create a new {@link MongoCustomConversions} given {@link MongoConverterConfigurationAdapter}. + * + * @param conversionConfiguration must not be {@literal null}. + * @since 2.3 + */ + protected MongoCustomConversions(MongoConverterConfigurationAdapter conversionConfiguration) { + super(conversionConfiguration.createConverterConfiguration()); + } + + /** + * Functional style {@link org.springframework.data.convert.CustomConversions} creation giving users a convenient way + * of configuring store specific capabilities by providing deferred hooks to what will be configured when creating the + * {@link org.springframework.data.convert.CustomConversions#CustomConversions(ConverterConfiguration) instance}. + * + * @param configurer must not be {@literal null}. + * @since 2.3 + */ + public static MongoCustomConversions create(Consumer configurer) { + + MongoConverterConfigurationAdapter adapter = new MongoConverterConfigurationAdapter(); + configurer.accept(adapter); + + return new MongoCustomConversions(adapter); } @WritingConverter @@ -79,10 +129,6 @@ private enum CustomToStringConverter implements GenericConverter { INSTANCE; - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.GenericConverter#getConvertibleTypes() - */ public Set getConvertibleTypes() { ConvertiblePair localeToString = new ConvertiblePair(Locale.class, String.class); @@ -91,12 +137,325 @@ public Set getConvertibleTypes() { return new HashSet<>(Arrays.asList(localeToString, booleanToString)); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.GenericConverter#convert(java.lang.Object, org.springframework.core.convert.TypeDescriptor, org.springframework.core.convert.TypeDescriptor) - */ public Object convert(@Nullable Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { return source != null ? source.toString() : null; } } + + /** + * {@link MongoConverterConfigurationAdapter} encapsulates creation of + * {@link org.springframework.data.convert.CustomConversions.ConverterConfiguration} with MongoDB specifics. + * + * @author Christoph Strobl + * @since 2.3 + */ + public static class MongoConverterConfigurationAdapter { + + /** + * List of {@literal java.time} types having different representation when rendered via the native + * {@link org.bson.codecs.Codec} than the Spring Data {@link Converter}. + */ + private static final Set> JAVA_DRIVER_TIME_SIMPLE_TYPES = Set.of(LocalDate.class, LocalTime.class, LocalDateTime.class); + + private boolean useNativeDriverJavaTimeCodecs = false; + private BigDecimalRepresentation bigDecimals = BigDecimalRepresentation.STRING; + private final List customConverters = new ArrayList<>(); + + private final PropertyValueConversions internalValueConversion = PropertyValueConversions.simple(it -> {}); + private PropertyValueConversions propertyValueConversions = internalValueConversion; + + /** + * Create a {@link MongoConverterConfigurationAdapter} using the provided {@code converters} and our own codecs for + * JSR-310 types. + * + * @param converters must not be {@literal null}. + * @return + */ + public static MongoConverterConfigurationAdapter from(List converters) { + + Assert.notNull(converters, "Converters must not be null"); + + MongoConverterConfigurationAdapter converterConfigurationAdapter = new MongoConverterConfigurationAdapter(); + converterConfigurationAdapter.useSpringDataJavaTimeCodecs(); + converterConfigurationAdapter.registerConverters(converters); + + return converterConfigurationAdapter; + } + + /** + * Add a custom {@link Converter} implementation. + * + * @param converter must not be {@literal null}. + * @return this. + */ + public MongoConverterConfigurationAdapter registerConverter(Converter converter) { + + Assert.notNull(converter, "Converter must not be null"); + customConverters.add(converter); + return this; + } + + /** + * Add {@link Converter converters}, {@link ConverterFactory factories}, {@link ConverterBuilder.ConverterAware + * converter-aware objects}, and {@link GenericConverter generic converters}. + * + * @param converters must not be {@literal null} nor contain {@literal null} values. + * @return this. + */ + public MongoConverterConfigurationAdapter registerConverters(Collection converters) { + + Assert.notNull(converters, "Converters must not be null"); + Assert.noNullElements(converters, "Converters must not be null nor contain null values"); + + customConverters.addAll(converters); + return this; + } + + /** + * Add a custom {@link ConverterFactory} implementation. + * + * @param converterFactory must not be {@literal null}. + * @return this. + */ + public MongoConverterConfigurationAdapter registerConverterFactory(ConverterFactory converterFactory) { + + Assert.notNull(converterFactory, "ConverterFactory must not be null"); + customConverters.add(converterFactory); + return this; + } + + /** + * Add a custom/default {@link PropertyValueConverterFactory} implementation used to serve + * {@link PropertyValueConverter}. + * + * @param converterFactory must not be {@literal null}. + * @return this. + * @since 3.4 + */ + public MongoConverterConfigurationAdapter registerPropertyValueConverterFactory( + PropertyValueConverterFactory converterFactory) { + + Assert.state(valueConversions() instanceof SimplePropertyValueConversions, + "Configured PropertyValueConversions does not allow setting custom ConverterRegistry"); + + ((SimplePropertyValueConversions) valueConversions()).setConverterFactory(converterFactory); + return this; + } + + /** + * Gateway to register property specific converters. + * + * @param configurationAdapter must not be {@literal null}. + * @return this. + * @since 3.4 + */ + public MongoConverterConfigurationAdapter configurePropertyConversions( + Consumer> configurationAdapter) { + + Assert.state(valueConversions() instanceof SimplePropertyValueConversions, + "Configured PropertyValueConversions does not allow setting custom ConverterRegistry"); + + PropertyValueConverterRegistrar propertyValueConverterRegistrar = new PropertyValueConverterRegistrar(); + configurationAdapter.accept(propertyValueConverterRegistrar); + + ((SimplePropertyValueConversions) valueConversions()) + .setValueConverterRegistry(propertyValueConverterRegistrar.buildRegistry()); + return this; + } + + /** + * Set whether to or not to use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for + * {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime} + * and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}. + * + * @param useNativeDriverJavaTimeCodecs + * @return this. + */ + public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs(boolean useNativeDriverJavaTimeCodecs) { + + this.useNativeDriverJavaTimeCodecs = useNativeDriverJavaTimeCodecs; + return this; + } + + /** + * Use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for + * {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime} + * and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}. + * + * @return this. + * @see #useNativeDriverJavaTimeCodecs(boolean) + */ + public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs() { + return useNativeDriverJavaTimeCodecs(true); + } + + /** + * Use SpringData {@link Converter Jsr310 converters} for + * {@link org.springframework.data.convert.Jsr310Converters.LocalDateToDateConverter LocalDate}, + * {@link org.springframework.data.convert.Jsr310Converters.LocalTimeToDateConverter LocalTime} and + * {@link org.springframework.data.convert.Jsr310Converters.LocalDateTimeToDateConverter LocalDateTime} using the + * {@link ZoneId#systemDefault()}. + * + * @return this. + * @see #useNativeDriverJavaTimeCodecs(boolean) + */ + public MongoConverterConfigurationAdapter useSpringDataJavaTimeCodecs() { + return useNativeDriverJavaTimeCodecs(false); + } + + /** + * Configures the representation to for {@link java.math.BigDecimal} and {@link java.math.BigInteger} values in + * MongoDB. Defaults to {@link BigDecimalRepresentation#STRING}. + * + * @param representation the representation to use. + * @return this. + * @since 4.5 + */ + public MongoConverterConfigurationAdapter bigDecimal(BigDecimalRepresentation representation) { + + Assert.notNull(representation, "BigDecimalDataType must not be null"); + this.bigDecimals = representation; + return this; + } + /** + * Optionally set the {@link PropertyValueConversions} to be applied during mapping. + *

          + * Use this method if {@link #configurePropertyConversions(Consumer)} and + * {@link #registerPropertyValueConverterFactory(PropertyValueConverterFactory)} are not sufficient. + * + * @param valueConversions must not be {@literal null}. + * @return this. + * @since 3.4 + * @deprecated since 4.2. Use {@link #withPropertyValueConversions(PropertyValueConversions)} instead. + */ + @Deprecated(since = "4.2.0") + public MongoConverterConfigurationAdapter setPropertyValueConversions(PropertyValueConversions valueConversions) { + return withPropertyValueConversions(valueConversions); + } + + /** + * Optionally set the {@link PropertyValueConversions} to be applied during mapping. + *

          + * Use this method if {@link #configurePropertyConversions(Consumer)} and + * {@link #registerPropertyValueConverterFactory(PropertyValueConverterFactory)} are not sufficient. + * + * @param valueConversions must not be {@literal null}. + * @return this. + * @since 4.2 + */ + public MongoConverterConfigurationAdapter withPropertyValueConversions(PropertyValueConversions valueConversions) { + + Assert.notNull(valueConversions, "PropertyValueConversions must not be null"); + this.propertyValueConversions = valueConversions; + return this; + } + + PropertyValueConversions valueConversions() { + + if (this.propertyValueConversions == null) { + this.propertyValueConversions = internalValueConversion; + } + + return this.propertyValueConversions; + } + + ConverterConfiguration createConverterConfiguration() { + + if (hasDefaultPropertyValueConversions() + && propertyValueConversions instanceof SimplePropertyValueConversions svc) { + svc.init(); + } + + List converters = new ArrayList<>(STORE_CONVERTERS.size() + 7); + + if (bigDecimals == BigDecimalRepresentation.STRING) { + + converters.add(BigDecimalToStringConverter.INSTANCE); + converters.add(StringToBigDecimalConverter.INSTANCE); + converters.add(BigIntegerToStringConverter.INSTANCE); + converters.add(StringToBigIntegerConverter.INSTANCE); + } + + if (!useNativeDriverJavaTimeCodecs) { + + converters.addAll(customConverters); + return new ConverterConfiguration(STORE_CONVERSIONS, converters, convertiblePair -> true, + this.propertyValueConversions); + } + + /* + * We need to have those converters using UTC as the default ones would go on with the systemDefault. + */ + converters.add(DateToUtcLocalDateConverter.INSTANCE); + converters.add(DateToUtcLocalTimeConverter.INSTANCE); + converters.add(DateToUtcLocalDateTimeConverter.INSTANCE); + converters.addAll(STORE_CONVERTERS); + + StoreConversions storeConversions = StoreConversions + .of(new SimpleTypeHolder(JAVA_DRIVER_TIME_SIMPLE_TYPES, MongoSimpleTypes.HOLDER), converters); + + return new ConverterConfiguration(storeConversions, this.customConverters, convertiblePair -> { + + // Avoid default registrations + + return !JAVA_DRIVER_TIME_SIMPLE_TYPES.contains(convertiblePair.getSourceType()) + || !Date.class.isAssignableFrom(convertiblePair.getTargetType()); + }, this.propertyValueConversions); + } + + @ReadingConverter + private enum DateToUtcLocalDateTimeConverter implements Converter { + + INSTANCE; + + @Override + public LocalDateTime convert(Date source) { + return LocalDateTime.ofInstant(Instant.ofEpochMilli(source.getTime()), ZoneId.of("UTC")); + } + } + + @ReadingConverter + private enum DateToUtcLocalTimeConverter implements Converter { + INSTANCE; + + @Override + public LocalTime convert(Date source) { + return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalTime(); + } + } + + @ReadingConverter + private enum DateToUtcLocalDateConverter implements Converter { + INSTANCE; + + @Override + public LocalDate convert(Date source) { + return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalDate(); + } + } + + private boolean hasDefaultPropertyValueConversions() { + return propertyValueConversions == internalValueConversion; + } + + } + + /** + * Strategy to represent {@link java.math.BigDecimal} and {@link java.math.BigInteger} values in MongoDB. + * + * @since 4.5 + */ + public enum BigDecimalRepresentation { + + /** + * Store values as {@link Number#toString() String}. Using strings retains precision but does not support range + * queries. + */ + STRING, + + /** + * Store numbers using {@link org.bson.types.Decimal128}. Requires MongoDB Server 3.4 or later. + */ + DECIMAL128 + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java new file mode 100644 index 0000000000..05baa88c57 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java @@ -0,0 +1,80 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.MongoCollection; + +/** + * {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents} + * for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader { + + private static final Log LOGGER = LogFactory.getLog(MongoDatabaseFactoryReferenceLoader.class); + + private final MongoDatabaseFactory mongoDbFactory; + + /** + * @param mongoDbFactory must not be {@literal null}. + */ + public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) { + + Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null"); + + this.mongoDbFactory = mongoDbFactory; + } + + @Override + public Iterable fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) { + + MongoCollection collection = getCollection(context); + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace(String.format("Bulk fetching %s from %s.%s", referenceQuery, + StringUtils.hasText(context.getDatabase()) ? context.getDatabase() + : collection.getNamespace().getDatabaseName(), + context.getCollection())); + } + + return referenceQuery.apply(collection); + } + + /** + * Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying + * {@link MongoDatabaseFactory}. + * + * @param context must not be {@literal null}. + * @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}. + */ + protected MongoCollection getCollection(ReferenceCollection context) { + + return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), + Document.class); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java index f9343eee69..0316251dc1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,12 +34,14 @@ import org.springframework.data.domain.ExampleMatcher.StringMatcher; import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.MongoRegexCreator; import org.springframework.data.mongodb.core.query.MongoRegexCreator.MatchMode; import org.springframework.data.mongodb.core.query.SerializationUtils; import org.springframework.data.mongodb.core.query.UntypedExampleMatcher; +import org.springframework.data.mongodb.util.DotPath; import org.springframework.data.support.ExampleMatcherAccessor; import org.springframework.data.util.TypeInformation; import org.springframework.util.Assert; @@ -83,7 +85,7 @@ public MongoExampleMapper(MongoConverter converter) { */ public Document getMappedExample(Example example) { - Assert.notNull(example, "Example must not be null!"); + Assert.notNull(example, "Example must not be null"); return getMappedExample(example, mappingContext.getRequiredPersistentEntity(example.getProbeType())); } @@ -98,8 +100,8 @@ public Document getMappedExample(Example example) { */ public Document getMappedExample(Example example, MongoPersistentEntity entity) { - Assert.notNull(example, "Example must not be null!"); - Assert.notNull(entity, "MongoPersistentEntity must not be null!"); + Assert.notNull(example, "Example must not be null"); + Assert.notNull(entity, "MongoPersistentEntity must not be null"); Document reference = (Document) converter.convertToMongoType(example.getProbe()); @@ -134,7 +136,7 @@ private void applyPropertySpecs(String path, Document source, Class probeType while (iter.hasNext()) { Map.Entry entry = iter.next(); - String propertyPath = StringUtils.hasText(path) ? path + "." + entry.getKey() : entry.getKey(); + String propertyPath = DotPath.from(path).append(entry.getKey()).toString(); String mappedPropertyPath = getMappedPropertyPath(propertyPath, probeType); if (isEmptyIdProperty(entry)) { @@ -164,19 +166,20 @@ private void applyPropertySpecs(String path, Document source, Class probeType if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) { PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath); - value = valueTransformer.convert(value); - if (value == null) { + Optional converted = valueTransformer.apply(Optional.ofNullable(value)); + + if(!converted.isPresent()) { iter.remove(); continue; } - entry.setValue(value); + entry.setValue(converted.get()); } if (entry.getValue() instanceof String) { applyStringMatcher(entry, stringMatcher, ignoreCase); - } else if (entry.getValue() instanceof Document) { - applyPropertySpecs(propertyPath, (Document) entry.getValue(), probeType, exampleSpecAccessor); + } else if (entry.getValue() instanceof Document document) { + applyPropertySpecs(propertyPath, document, probeType, exampleSpecAccessor); } } } @@ -223,7 +226,7 @@ private String getMappedPropertyPath(String path, Class probeType) { return StringUtils.collectionToDelimitedString(resultParts, "."); } - private Document updateTypeRestrictions(Document query, Example example) { + private Document updateTypeRestrictions(Document query, Example example) { Document result = new Document(); @@ -243,7 +246,7 @@ private Document updateTypeRestrictions(Document query, Example example) { return result; } - private boolean isTypeRestricting(Example example) { + private boolean isTypeRestricting(Example example) { if (example.getMatcher() instanceof UntypedExampleMatcher) { return false; @@ -276,7 +279,8 @@ private Set> getTypesToMatch(Example example) { } private static boolean isEmptyIdProperty(Entry entry) { - return entry.getKey().equals("_id") && entry.getValue() == null || entry.getValue().equals(Optional.empty()); + return entry.getKey().equals(FieldName.ID.name()) + && (entry.getValue() == null || entry.getValue().equals(Optional.empty())); } private static void applyStringMatcher(Map.Entry entry, StringMatcher stringMatcher, @@ -322,20 +326,13 @@ private static Document orConcatenate(Document source) { */ private static MatchMode toMatchMode(StringMatcher matcher) { - switch (matcher) { - case CONTAINING: - return MatchMode.CONTAINING; - case STARTING: - return MatchMode.STARTING_WITH; - case ENDING: - return MatchMode.ENDING_WITH; - case EXACT: - return MatchMode.EXACT; - case REGEX: - return MatchMode.REGEX; - case DEFAULT: - default: - return MatchMode.DEFAULT; - } + return switch (matcher) { + case CONTAINING -> MatchMode.CONTAINING; + case STARTING -> MatchMode.STARTING_WITH; + case ENDING -> MatchMode.ENDING_WITH; + case EXACT -> MatchMode.EXACT; + case REGEX -> MatchMode.REGEX; + default -> MatchMode.DEFAULT; + }; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java index 2d290e0676..8d199083e7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -54,22 +54,18 @@ public class MongoJsonSchemaMapper implements JsonSchemaMapper { */ public MongoJsonSchemaMapper(MongoConverter converter) { - Assert.notNull(converter, "Converter must not be null!"); + Assert.notNull(converter, "Converter must not be null"); this.converter = converter; this.mappingContext = converter.getMappingContext(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.JsonSchemaMapper#mapSchema(org.springframework.data.mongodb.core.schema.MongoJsonSchema, java.lang.Class) - */ public Document mapSchema(Document jsonSchema, Class type) { - Assert.notNull(jsonSchema, "Schema must not be null!"); - Assert.notNull(type, "Type must not be null! Please consider Object.class."); + Assert.notNull(jsonSchema, "Schema must not be null"); + Assert.notNull(type, "Type must not be null Please consider Object.class"); Assert.isTrue(jsonSchema.containsKey($JSON_SCHEMA), - () -> String.format("Document does not contain $jsonSchema field. Found %s.", jsonSchema)); + () -> String.format("Document does not contain $jsonSchema field; Found: %s", jsonSchema)); if (Object.class.equals(type)) { return new Document(jsonSchema); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java index 8edc3c5f31..d9d49fb19a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -46,4 +46,15 @@ public interface MongoTypeMapper extends TypeMapper { * @param restrictedTypes must not be {@literal null} */ void writeTypeRestrictions(Document result, Set> restrictedTypes); + + /** + * Compute the target type for a given source considering {@link org.springframework.data.convert.CustomConversions}. + * + * @param source the source type. + * @return never {@literal null}. + * @since 2.2 + */ + default Class getWriteTargetTypeFor(Class source) { + return source; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoValueConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoValueConverter.java new file mode 100644 index 0000000000..73c05b949b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoValueConverter.java @@ -0,0 +1,26 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.data.convert.PropertyValueConverter; + +/** + * MongoDB-specific {@link PropertyValueConverter} extension. + * + * @author Christoph Strobl + * @since 3.4 + */ +public interface MongoValueConverter extends PropertyValueConverter {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java index b3c482214e..867a6213d2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,8 @@ import org.bson.conversions.Bson; import org.springframework.data.convert.EntityWriter; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -43,7 +45,7 @@ public interface MongoWriter extends EntityWriter { */ @Nullable default Object convertToMongoType(@Nullable Object obj) { - return convertToMongoType(obj, null); + return convertToMongoType(obj, (TypeInformation) null); } /** @@ -57,13 +59,30 @@ default Object convertToMongoType(@Nullable Object obj) { @Nullable Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation typeInformation); + default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { + return convertToMongoType(obj, entity.getTypeInformation()); + } + /** * Creates a {@link DBRef} to refer to the given object. * * @param object the object to create a {@link DBRef} to link to. The object's type has to carry an id attribute. - * @param referingProperty the client-side property referring to the object which might carry additional metadata for + * @param referringProperty the client-side property referring to the object which might carry additional metadata for * the {@link DBRef} object to create. Can be {@literal null}. * @return will never be {@literal null}. */ - DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty); + DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referringProperty); + + /** + * Creates a the {@link DocumentPointer} representing the link to another entity. + * + * @param source the object to create a document link to. + * @param referringProperty the client-side property referring to the object which might carry additional metadata for + * the {@link DBRef} object to create. Can be {@literal null}. + * @return will never be {@literal null}. + * @since 3.3 + */ + default DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + return () -> toDBRef(source, referringProperty); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java new file mode 100644 index 0000000000..265257af5c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java @@ -0,0 +1,68 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.List; + +import org.bson.Document; + +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; + +import com.mongodb.DBRef; + +/** + * No-Operation {@link org.springframework.data.mongodb.core.mapping.DBRef} resolver throwing + * {@link UnsupportedOperationException} when attempting to resolve database references. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ +public enum NoOpDbRefResolver implements DbRefResolver { + + INSTANCE; + + @Override + @Nullable + public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, + DbRefProxyHandler proxyHandler) { + + return handle(); + } + + @Override + @Nullable + public Document fetch(DBRef dbRef) { + return handle(); + } + + @Override + public List bulkFetch(List dbRefs) { + return handle(); + } + + private T handle() throws UnsupportedOperationException { + throw new UnsupportedOperationException("DBRef resolution is not supported"); + } + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java index 32604f8b1b..5fefd472c4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ObjectPath.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,11 @@ */ package org.springframework.data.mongodb.core.convert; -import lombok.Value; - import java.util.ArrayList; import java.util.List; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.util.Lazy; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -41,30 +40,37 @@ * @author Christoph Strobl * @since 1.6 */ -class ObjectPath { +public class ObjectPath { static final ObjectPath ROOT = new ObjectPath(); - private final ObjectPathItem[] items; + private final @Nullable ObjectPath parent; + private final @Nullable Object object; + private final @Nullable Object idValue; + private final Lazy collection; private ObjectPath() { - this.items = new ObjectPathItem[0]; + + this.parent = null; + this.object = null; + this.idValue = null; + this.collection = Lazy.empty(); } /** - * Creates a new {@link ObjectPath} from the given parent {@link ObjectPath} by adding the provided - * {@link ObjectPathItem} to it. + * Creates a new {@link ObjectPath} from the given parent {@link ObjectPath} and adding the provided path values. * * @param parent must not be {@literal null}. - * @param item + * @param collection + * @param idValue + * @param collection */ - private ObjectPath(ObjectPath parent, ObjectPath.ObjectPathItem item) { - - ObjectPathItem[] items = new ObjectPathItem[parent.items.length + 1]; - System.arraycopy(parent.items, 0, items, 0, parent.items.length); - items[parent.items.length] = item; + private ObjectPath(ObjectPath parent, Object object, @Nullable Object idValue, Lazy collection) { - this.items = items; + this.parent = parent; + this.object = object; + this.idValue = idValue; + this.collection = collection; } /** @@ -77,43 +83,10 @@ private ObjectPath(ObjectPath parent, ObjectPath.ObjectPathItem item) { */ ObjectPath push(Object object, MongoPersistentEntity entity, @Nullable Object id) { - Assert.notNull(object, "Object must not be null!"); - Assert.notNull(entity, "MongoPersistentEntity must not be null!"); - - ObjectPathItem item = new ObjectPathItem(object, id, entity.getCollection()); - return new ObjectPath(this, item); - } - - /** - * Returns the object with the given id and stored in the given collection if it's contained in the - * {@link ObjectPath}. - * - * @param id must not be {@literal null}. - * @param collection must not be {@literal null} or empty. - * @return - * @deprecated use {@link #getPathItem(Object, String, Class)}. - */ - @Nullable - @Deprecated - Object getPathItem(Object id, String collection) { - - Assert.notNull(id, "Id must not be null!"); - Assert.hasText(collection, "Collection name must not be null!"); - - for (ObjectPathItem item : items) { - - Object object = item.getObject(); - - if (object == null || item.getIdValue() == null) { - continue; - } - - if (collection.equals(item.getCollection()) && id.equals(item.getIdValue())) { - return object; - } - } + Assert.notNull(object, "Object must not be null"); + Assert.notNull(entity, "MongoPersistentEntity must not be null"); - return null; + return new ObjectPath(this, object, id, Lazy.of(entity::getCollection)); } /** @@ -129,19 +102,19 @@ Object getPathItem(Object id, String collection) { @Nullable T getPathItem(Object id, String collection, Class type) { - Assert.notNull(id, "Id must not be null!"); - Assert.hasText(collection, "Collection name must not be null!"); - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(id, "Id must not be null"); + Assert.hasText(collection, "Collection name must not be null"); + Assert.notNull(type, "Type must not be null"); - for (ObjectPathItem item : items) { + for (ObjectPath current = this; current != null; current = current.parent) { - Object object = item.getObject(); + Object object = current.getObject(); - if (object == null || item.getIdValue() == null) { + if (object == null || current.getIdValue() == null) { continue; } - if (collection.equals(item.getCollection()) && id.equals(item.getIdValue()) + if (collection.equals(current.getCollection()) && id.equals(current.getIdValue()) && ClassUtils.isAssignable(type, object.getClass())) { return type.cast(object); } @@ -157,41 +130,36 @@ T getPathItem(Object id, String collection, Class type) { */ @Nullable Object getCurrentObject() { - return items.length == 0 ? null : items[items.length - 1].getObject(); + return getObject(); + } + + @Nullable + private Object getObject() { + return object; + } + + @Nullable + private Object getIdValue() { + return idValue; + } + + private String getCollection() { + return collection.get(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { - if (items.length == 0) { + if (parent == null) { return "[empty]"; } - List strings = new ArrayList<>(items.length); + List strings = new ArrayList<>(); - for (ObjectPathItem item : items) { - strings.add(ObjectUtils.nullSafeToString(item.object)); + for (ObjectPath current = this; current != null; current = current.parent) { + strings.add(ObjectUtils.nullSafeToString(current.getObject())); } return StringUtils.collectionToDelimitedString(strings, " -> "); } - - /** - * An item in an {@link ObjectPath}. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @author Mark Paluch - */ - @Value - private static class ObjectPathItem { - - Object object; - @Nullable Object idValue; - String collection; - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 4fc2f7af28..debaf2f127 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,41 +15,62 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; +import java.util.Collection; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonRegularExpression; import org.bson.BsonValue; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; -import org.springframework.core.convert.ConversionException; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Reference; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.domain.Example; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.PropertyReferenceException; import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.PersistentPropertyPath; +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationExpression; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.QueryOperatorContext; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.util.BsonUtils; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.mongodb.util.DotPath; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; @@ -65,12 +86,18 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author David Julia + * @author Divya Srivastava + * @author Gyungrai Wang + * @author Ross Lawley */ public class QueryMapper { - private static final List DEFAULT_ID_NAMES = Arrays.asList("id", "_id"); + protected static final Log LOGGER = LogFactory.getLog(QueryMapper.class); + + private static final List DEFAULT_ID_NAMES = Arrays.asList("id", FieldName.ID.name()); private static final Document META_TEXT_SCORE = new Document("$meta", "textScore"); - static final ClassTypeInformation NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class); + static final TypeInformation NESTED_DOCUMENT = TypeInformation.of(NestedDocument.class); private enum MetaMapping { FORCE, WHEN_PRESENT, IGNORE @@ -89,7 +116,7 @@ private enum MetaMapping { */ public QueryMapper(MongoConverter converter) { - Assert.notNull(converter, "MongoConverter must not be null!"); + Assert.notNull(converter, "MongoConverter must not be null"); this.conversionService = converter.getConversionService(); this.converter = converter; @@ -110,7 +137,6 @@ public Document getMappedObject(Bson query, Optional entity) { if (isNestedKeyword(query)) { @@ -124,10 +150,13 @@ public Document getMappedObject(Bson query, @Nullable MongoPersistentEntity e // TODO: remove one once QueryMapper can work with Query instances directly if (Query.isRestrictedTypeKey(key)) { - @SuppressWarnings("unchecked") Set> restrictedTypes = BsonUtils.get(query, key); this.converter.getTypeMapper().writeTypeRestrictions(result, restrictedTypes); + continue; + } + if (isTypeKey(key)) { + result.put(key, BsonUtils.get(query, key)); continue; } @@ -139,9 +168,31 @@ public Document getMappedObject(Bson query, @Nullable MongoPersistentEntity e try { Field field = createPropertyField(entity, key, mappingContext); - Entry entry = getMappedObjectForField(field, BsonUtils.get(query, key)); - result.put(entry.getKey(), entry.getValue()); + // TODO: move to dedicated method + if (field.getProperty() != null && field.getProperty().isUnwrapped()) { + + Object theNestedObject = BsonUtils.get(query, key); + Document mappedValue = (Document) getMappedValue(field, theNestedObject); + if (!StringUtils.hasText(field.getMappedKey())) { + result.putAll(mappedValue); + } else { + result.put(field.getMappedKey(), mappedValue); + } + } else { + + Entry entry = getMappedObjectForField(field, BsonUtils.get(query, key)); + + /* + * Note to future self: + * ---- + * This could be the place to plug in a query rewrite mechanism that allows to transform comparison + * against field that has a dot in its name (like 'a.b') into an $expr so that { "a.b" : "some value" } + * eventually becomes { $expr : { $eq : [ { $getField : "a.b" }, "some value" ] } } + * ---- + */ + result.put(entry.getKey(), entry.getValue()); + } } catch (InvalidPersistentPropertyPath invalidPathException) { // in case the object has not already been mapped @@ -166,15 +217,14 @@ public Document getMappedObject(Bson query, @Nullable MongoPersistentEntity e */ public Document getMappedSort(Document sortObject, @Nullable MongoPersistentEntity entity) { - Assert.notNull(sortObject, "SortObject must not be null!"); + Assert.notNull(sortObject, "SortObject must not be null"); if (sortObject.isEmpty()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } - Document mappedSort = getMappedObject(sortObject, entity); - mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); - return mappedSort; + Document mappedSort = mapFieldsToPropertyNames(sortObject, entity); + return mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); } /** @@ -182,32 +232,110 @@ public Document getMappedSort(Document sortObject, @Nullable MongoPersistentEnti * Also converts and potentially adds missing property {@code $meta} representation. * * @param fieldsObject must not be {@literal null}. - * @param entity can be {@litearl null}. + * @param entity can be {@literal null}. * @return * @since 1.6 */ public Document getMappedFields(Document fieldsObject, @Nullable MongoPersistentEntity entity) { - Assert.notNull(fieldsObject, "FieldsObject must not be null!"); + Assert.notNull(fieldsObject, "FieldsObject must not be null"); + + Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity); + return mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); + } + + private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity entity) { + + if (fields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + Document target = new Document(); + + BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> { + + Field field = createPropertyField(entity, k, mappingContext); + if (field.getProperty() != null && field.getProperty().isUnwrapped()) { + return; + } + + target.put(field.getMappedKey(), v); + }); - Document mappedFields = fieldsObject.isEmpty() ? new Document() : getMappedObject(fieldsObject, entity); - mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); - return mappedFields; + return target; } - private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, MetaMapping metaMapping) { + /** + * Adds missing {@code $meta} representation if required. + * + * @param source must not be {@literal null}. + * @param entity can be {@literal null}. + * @return never {@literal null}. + * @since 3.4 + */ + public Document addMetaAttributes(Document source, @Nullable MongoPersistentEntity entity) { + return mapMetaAttributes(source, entity, MetaMapping.FORCE); + } + + private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, + MetaMapping metaMapping) { if (entity == null) { - return; + return source; } if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) { + + if (source == BsonUtils.EMPTY_DOCUMENT) { + source = new Document(); + } + MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty(); if (MetaMapping.FORCE.equals(metaMapping) || (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsKey(textScoreProperty.getFieldName()))) { source.putAll(getMappedTextScoreField(textScoreProperty)); } } + + return source; + } + + private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity entity) { + + if (fieldsObject.isEmpty() || entity == null) { + return fieldsObject; + } + + Document target = new Document(); + + for (Entry field : fieldsObject.entrySet()) { + + try { + + PropertyPath path = PropertyPath.from(field.getKey(), entity.getTypeInformation()); + PersistentPropertyPath persistentPropertyPath = mappingContext + .getPersistentPropertyPath(path); + MongoPersistentProperty property = mappingContext.getPersistentPropertyPath(path).getLeafProperty(); + + if (property.isUnwrapped() && property.isEntity()) { + + MongoPersistentEntity unwrappedEntity = mappingContext.getRequiredPersistentEntity(property); + + for (MongoPersistentProperty unwrappedProperty : unwrappedEntity) { + + DotPath dotPath = DotPath.from(persistentPropertyPath.toDotPath()).append(unwrappedProperty.getName()); + target.put(dotPath.toString(), field.getValue()); + } + + } else { + target.put(field.getKey(), field.getValue()); + } + } catch (RuntimeException e) { + target.put(field.getKey(), field.getValue()); + } + + } + return target; } private Document getMappedTextScoreField(MongoPersistentProperty property) { @@ -226,6 +354,10 @@ protected Entry getMappedObjectForField(Field field, Object rawV String key = field.getMappedKey(); Object value; + if (rawValue instanceof MongoExpression mongoExpression) { + return createMapEntry(key, getMappedObject(mongoExpression.toDocument(), field.getEntity())); + } + if (isNestedKeyword(rawValue) && !field.isIdField()) { Keyword keyword = new Keyword((Document) rawValue); value = getMappedKeyword(field, keyword); @@ -244,7 +376,16 @@ protected Entry getMappedObjectForField(Field field, Object rawV */ protected Field createPropertyField(@Nullable MongoPersistentEntity entity, String key, MappingContext, MongoPersistentProperty> mappingContext) { - return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext); + + if (entity == null) { + return new Field(key); + } + + if (FieldName.ID.name().equals(key)) { + return new MetadataBackedField(key, entity, mappingContext, entity.getIdProperty()); + } + + return new MetadataBackedField(key, entity, mappingContext); } /** @@ -260,7 +401,8 @@ protected Document getMappedKeyword(Keyword keyword, @Nullable MongoPersistentEn if (keyword.isOrOrNor() || (keyword.hasIterableValue() && !keyword.isGeometry())) { Iterable conditions = keyword.getValue(); - List newConditions = new ArrayList(); + List newConditions = conditions instanceof Collection collection ? new ArrayList<>(collection.size()) + : new ArrayList<>(); for (Object condition : conditions) { newConditions.add(isDocument(condition) ? getMappedObject((Document) condition, entity) @@ -271,11 +413,12 @@ protected Document getMappedKeyword(Keyword keyword, @Nullable MongoPersistentEn } if (keyword.isSample()) { - return exampleMapper.getMappedExample(keyword.> getValue(), entity); + return exampleMapper.getMappedExample(keyword.getValue(), entity); } if (keyword.isJsonSchema()) { - return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()), entity.getType()); + return schemaMapper.mapSchema(new Document(keyword.getKey(), keyword.getValue()), + entity != null ? entity.getType() : Object.class); } return new Document(keyword.getKey(), convertSimpleOrDocument(keyword.getValue(), entity)); @@ -290,12 +433,16 @@ protected Document getMappedKeyword(Keyword keyword, @Nullable MongoPersistentEn */ protected Document getMappedKeyword(Field property, Keyword keyword) { - boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists(); + boolean needsAssociationConversion = property.isAssociation() && !keyword.isExists() && keyword.mayHoldDbRef(); Object value = keyword.getValue(); Object convertedValue = needsAssociationConversion ? convertAssociation(value, property) : getMappedValue(property.with(keyword.getKey()), value); + if (keyword.isSample() && convertedValue instanceof Document document) { + return document; + } + return new Document(keyword.key, convertedValue); } @@ -303,57 +450,31 @@ protected Document getMappedKeyword(Field property, Keyword keyword) { * Returns the mapped value for the given source object assuming it's a value for the given * {@link MongoPersistentProperty}. * - * @param value the source object to be mapped - * @param property the property the value is a value for - * @param newKey the key the value will be bound to eventually + * @param documentField the key the value will be bound to eventually + * @param sourceValue the source object to be mapped * @return */ @Nullable @SuppressWarnings("unchecked") - protected Object getMappedValue(Field documentField, Object value) { + protected Object getMappedValue(Field documentField, Object sourceValue) { - if (documentField.isIdField()) { + Object value = applyFieldTargetTypeHintToValue(documentField, sourceValue); - if (isDBObject(value)) { - DBObject valueDbo = (DBObject) value; - Document resultDbo = new Document(valueDbo.toMap()); + if (documentField.getProperty() != null + && converter.getCustomConversions().hasValueConverter(documentField.getProperty())) { - if (valueDbo.containsField("$in") || valueDbo.containsField("$nin")) { - String inKey = valueDbo.containsField("$in") ? "$in" : "$nin"; - List ids = new ArrayList(); - for (Object id : (Iterable) valueDbo.get(inKey)) { - ids.add(convertId(id)); - } - resultDbo.put(inKey, ids); - } else if (valueDbo.containsField("$ne")) { - resultDbo.put("$ne", convertId(valueDbo.get("$ne"))); - } else { - return getMappedObject(resultDbo, Optional.empty()); - } - return resultDbo; - } + PropertyValueConverter> valueConverter = converter + .getCustomConversions().getPropertyValueConversions().getValueConverter(documentField.getProperty()); - else if (isDocument(value)) { - Document valueDbo = (Document) value; - Document resultDbo = new Document(valueDbo); + return convertValue(documentField, sourceValue, value, valueConverter); + } - if (valueDbo.containsKey("$in") || valueDbo.containsKey("$nin")) { - String inKey = valueDbo.containsKey("$in") ? "$in" : "$nin"; - List ids = new ArrayList(); - for (Object id : (Iterable) valueDbo.get(inKey)) { - ids.add(convertId(id)); - } - resultDbo.put(inKey, ids); - } else if (valueDbo.containsKey("$ne")) { - resultDbo.put("$ne", convertId(valueDbo.get("$ne"))); - } else { - return getMappedObject(resultDbo, Optional.empty()); - } - return resultDbo; + if (documentField.isIdField() && !documentField.isAssociation()) { + return convertIdField(documentField, value); + } - } else { - return convertId(value); - } + if (value == null) { + return null; } if (isNestedKeyword(value)) { @@ -367,6 +488,15 @@ else if (isDocument(value)) { return convertSimpleOrDocument(value, documentField.getPropertyEntity()); } + private boolean isIdField(Field documentField) { + return documentField.getProperty() != null + && documentField.getProperty().getOwner().isIdProperty(documentField.getProperty()); + } + + private Class getIdTypeForField(Field documentField) { + return isIdField(documentField) ? documentField.getProperty().getFieldType() : ObjectId.class; + } + /** * Returns whether the given {@link Field} represents an association reference that together with the given value * requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the @@ -379,9 +509,9 @@ else if (isDocument(value)) { */ protected boolean isAssociationConversionNecessary(Field documentField, @Nullable Object value) { - Assert.notNull(documentField, "Document field must not be null!"); + Assert.notNull(documentField, "Document field must not be null"); - if (value == null) { + if (value == null || documentField.getProperty() == null) { return false; } @@ -389,13 +519,17 @@ protected boolean isAssociationConversionNecessary(Field documentField, @Nullabl return false; } - Class type = value.getClass(); + Class type = value.getClass(); MongoPersistentProperty property = documentField.getProperty(); if (property.getActualType().isAssignableFrom(type)) { return true; } + if (property.isDocumentReference()) { + return true; + } + MongoPersistentEntity entity = documentField.getPropertyEntity(); return entity.hasIdProperty() && (type.equals(DBRef.class) || entity.getRequiredIdProperty().getActualType().isAssignableFrom(type)); @@ -411,6 +545,19 @@ protected boolean isAssociationConversionNecessary(Field documentField, @Nullabl @Nullable protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity entity) { + if (source instanceof Example example) { + return exampleMapper.getMappedExample(example, entity); + } + + if (source instanceof AggregationExpression age) { + return entity == null ? age.toDocument() : // + age.toDocument(new RelaxedTypeBasedAggregationOperationContext(entity.getType(), this.mappingContext, this)); + } + + if (source instanceof MongoExpression exr) { + return exr.toDocument(); + } + if (source instanceof List) { return delegateConvertToMongoType(source, entity); } @@ -419,10 +566,6 @@ protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersisten return getMappedObject((Document) source, entity); } - if (source instanceof BasicDBList) { - return delegateConvertToMongoType(source, entity); - } - if (isDBObject(source)) { return getMappedObject((BasicDBObject) source, entity); } @@ -431,6 +574,24 @@ protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersisten return source; } + if (source instanceof Map sourceMap) { + + Map map = new LinkedHashMap<>(sourceMap.size(), 1F); + + for (Entry entry : sourceMap.entrySet()) { + + String key = ObjectUtils.nullSafeToString(converter.convertToMongoType(entry.getKey())); + + if (entry.getValue() instanceof Document document) { + map.put(key, getMappedObject(document, entity)); + } else { + map.put(key, delegateConvertToMongoType(entry.getValue(), entity)); + } + } + + return map; + } + return delegateConvertToMongoType(source, entity); } @@ -444,11 +605,21 @@ protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersisten */ @Nullable protected Object delegateConvertToMongoType(Object source, @Nullable MongoPersistentEntity entity) { + + if (entity != null && entity.isUnwrapped()) { + return converter.convertToMongoType(source, entity); + } + return converter.convertToMongoType(source, entity == null ? null : entity.getTypeInformation()); } + @Nullable protected Object convertAssociation(Object source, Field field) { - return convertAssociation(source, field.getProperty()); + Object value = convertAssociation(source, field.getProperty()); + if (value != null && field.isIdField() && field.getFieldType() != value.getClass()) { + return convertId(value, field.getFieldType()); + } + return value; } /** @@ -465,30 +636,130 @@ protected Object convertAssociation(@Nullable Object source, @Nullable MongoPers return source; } - if (source instanceof DBRef) { + if (source instanceof DBRef ref) { + + Object id = convertId(ref.getId(), + property.getOwner().isIdProperty(property) ? property.getFieldType() : ObjectId.class); - DBRef ref = (DBRef) source; - return new DBRef(ref.getCollectionName(), convertId(ref.getId())); + if (StringUtils.hasText(ref.getDatabaseName())) { + return new DBRef(ref.getDatabaseName(), ref.getCollectionName(), id); + } else { + return new DBRef(ref.getCollectionName(), id); + } } - if (source instanceof Iterable) { + if (source instanceof Iterable iterable) { BasicDBList result = new BasicDBList(); - for (Object element : (Iterable) source) { - result.add(createDbRefFor(element, property)); + for (Object element : iterable) { + result.add(createReferenceFor(element, property)); } return result; } - if (property.isMap()) { + if (property.isMap() && source instanceof Document dbObject) { Document result = new Document(); - Document dbObject = (Document) source; for (String key : dbObject.keySet()) { - result.put(key, createDbRefFor(dbObject.get(key), property)); + result.put(key, createReferenceFor(dbObject.get(key), property)); } return result; } - return createDbRefFor(source, property); + return createReferenceFor(source, property); + } + + @Nullable + private Object convertValue(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter) { + + MongoPersistentProperty property = documentField.getProperty(); + + OperatorContext criteriaContext = new QueryOperatorContext( + isKeyword(documentField.name) ? documentField.name : "$eq", property.getFieldName()); + MongoConversionContext conversionContext; + if (valueConverter instanceof MongoConversionContext mcc) { + conversionContext = mcc.forOperator(criteriaContext); + } else { + conversionContext = new MongoConversionContext(NoPropertyPropertyValueProvider.INSTANCE, property, converter, + criteriaContext); + } + + return convertValueWithConversionContext(documentField, sourceValue, value, valueConverter, conversionContext); + } + + @Nullable + protected Object convertValueWithConversionContext(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter, + MongoConversionContext conversionContext) { + + MongoPersistentProperty property = documentField.getProperty(); + /* might be an $in clause with multiple entries */ + if (property != null && !property.isCollectionLike() && sourceValue instanceof Collection collection) { + + if (collection.isEmpty()) { + return collection; + } + + List converted = new ArrayList<>(collection.size()); + for (Object o : collection) { + converted.add(valueConverter.write(o, conversionContext)); + } + + return converted; + } + + if (property != null && !documentField.getProperty().isMap() && sourceValue instanceof Document document) { + + return BsonUtils.mapValues(document, (key, val) -> { + if (isKeyword(key)) { + return convertValueWithConversionContext(documentField, val, val, valueConverter, conversionContext + .forOperator(new QueryOperatorContext(key, conversionContext.getOperatorContext().path()))); + } + return val; + }); + } + + return valueConverter.write(value, conversionContext); + } + + @Nullable + @SuppressWarnings("unchecked") + private Object convertIdField(Field documentField, Object source) { + + Object value = source; + if (isDBObject(source)) { + DBObject valueDbo = (DBObject) source; + value = new Document(valueDbo.toMap()); + } + + if (!isDocument(value)) { + return convertId(value, getIdTypeForField(documentField)); + } + + Document valueDbo = (Document) value; + Document resultDbo = new Document(valueDbo); + + for (Entry entry : valueDbo.entrySet()) { + + String key = entry.getKey(); + if ("$nin".equals(key) || "$in".equals(key) || "$all".equals(key)) { + List ids = new ArrayList<>(); + for (Object id : (Iterable) valueDbo.get(key)) { + ids.add(convertId(id, getIdTypeForField(documentField))); + } + resultDbo.put(key, ids); + } else if (isKeyword(key)) { + resultDbo.put(key, convertIdField(documentField, entry.getValue())); + } else { + if (documentField.getProperty() != null && documentField.getProperty().isEntity()) { + Field propertyField = createPropertyField(documentField.getPropertyEntity(), key, mappingContext); + resultDbo.put(key, getMappedValue(propertyField, entry.getValue())); + } else { + resultDbo.put(key, getMappedValue(documentField, entry.getValue())); + } + } + } + + return resultDbo; } /** @@ -531,14 +802,19 @@ protected final Entry createMapEntry(Field field, @Nullable Obje */ private Entry createMapEntry(String key, @Nullable Object value) { - Assert.hasText(key, "Key must not be null or empty!"); - return Collections.singletonMap(key, value).entrySet().iterator().next(); + Assert.hasText(key, "Key must not be null or empty"); + return new AbstractMap.SimpleEntry<>(key, value); } - private DBRef createDbRefFor(Object source, MongoPersistentProperty property) { + private Object createReferenceFor(Object source, MongoPersistentProperty property) { - if (source instanceof DBRef) { - return (DBRef) source; + if (source instanceof DBRef dbRef) { + return dbRef; + } + + if (property != null && (property.isDocumentReference() + || (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) { + return converter.toDocumentPointer(source, property).getPointer(); } return converter.toDBRef(source, property); @@ -549,24 +825,29 @@ private DBRef createDbRefFor(Object source, MongoPersistentProperty property) { * * @param id * @return + * @since 2.2 */ @Nullable public Object convertId(@Nullable Object id) { + return convertId(id, ObjectId.class); + } - if (id == null) { - return null; - } + /** + * Converts the given raw id value into either {@link ObjectId} or {@link Class targetType}. + * + * @param id can be {@literal null}. + * @param targetType + * @return the converted {@literal id} or {@literal null} if the source was already {@literal null}. + * @since 2.2 + */ + @Nullable + public Object convertId(@Nullable Object id, Class targetType) { - if (id instanceof String) { - return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id; + if (Quirks.skipConversion(id)) { + return id; } - try { - return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class) - : delegateConvertToMongoType(id, null); - } catch (ConversionException o_O) { - return delegateConvertToMongoType(id, null); - } + return converter.convertId(id, targetType); } /** @@ -575,24 +856,36 @@ public Object convertId(@Nullable Object id) { * @param candidate * @return */ - protected boolean isNestedKeyword(Object candidate) { + protected boolean isNestedKeyword(@Nullable Object candidate) { if (!(candidate instanceof Document)) { return false; } - Set keys = BsonUtils.asMap((Bson) candidate).keySet(); + Map map = BsonUtils.asMap((Bson) candidate); - if (keys.size() != 1) { + if (map.size() != 1) { return false; } - return isKeyword(keys.iterator().next().toString()); + return isKeyword(map.entrySet().iterator().next().getKey()); + } + + /** + * Returns whether the given {@link String} is the type key. + * + * @param key + * @return + * @see MongoTypeMapper#isTypeKey(String) + * @since 2.2 + */ + protected boolean isTypeKey(String key) { + return converter.getTypeMapper().isTypeKey(key); } /** * Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the - * set of registered keywords returned by {@link #getKeywords()}. + * set of registered keywords. * * @param candidate * @return @@ -601,6 +894,42 @@ protected boolean isKeyword(String candidate) { return candidate.startsWith("$"); } + /** + * Convert the given field value into its desired + * {@link org.springframework.data.mongodb.core.mapping.Field#targetType() target type} before applying further + * conversions. In case of a {@link Collection} (used eg. for {@code $in} queries) the individual values will be + * converted one by one. + * + * @param documentField the field and its metadata + * @param value the actual value. Can be {@literal null}. + * @return the potentially converted target value. + */ + @Nullable + private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) { + + if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget() + || value instanceof Document || value instanceof DBObject || Quirks.skipConversion(value)) { + return value; + } + + if (!conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) { + return value; + } + + if (value instanceof Collection source) { + + Collection converted = new ArrayList<>(source.size()); + + for (Object o : source) { + converted.add(conversionService.convert(o, documentField.getProperty().getFieldType())); + } + + return converted; + } + + return conversionService.convert(value, documentField.getProperty().getFieldType()); + } + /** * Value object to capture a query keyword representation. * @@ -609,7 +938,7 @@ protected boolean isKeyword(String candidate) { */ static class Keyword { - private static final String N_OR_PATTERN = "\\$.*or"; + private static final Set NON_DBREF_CONVERTING_KEYWORDS = Set.of("$", "$size", "$slice", "$gt", "$lt"); private final String key; private final Object value; @@ -621,11 +950,14 @@ public Keyword(Bson source, String key) { public Keyword(Bson bson) { - Set keys = BsonUtils.asMap(bson).keySet(); - Assert.isTrue(keys.size() == 1, "Can only use a single value Document!"); + Map map = BsonUtils.asMap(bson); + Assert.isTrue(map.size() == 1, "Can only use a single value Document"); + + Set> entries = map.entrySet(); + Entry entry = entries.iterator().next(); - this.key = keys.iterator().next(); - this.value = BsonUtils.get(bson, key); + this.key = entry.getKey(); + this.value = entry.getValue(); } /** @@ -638,7 +970,7 @@ public boolean isExists() { } public boolean isOrOrNor() { - return key.matches(N_OR_PATTERN); + return key.equalsIgnoreCase("$or") || key.equalsIgnoreCase("$nor"); } /** @@ -674,6 +1006,14 @@ public T getValue() { return (T) value; } + /** + * @return {@literal true} if key may hold a DbRef. + * @since 2.1.4 + */ + public boolean mayHoldDbRef() { + return !NON_DBREF_CONVERTING_KEYWORDS.contains(key); + } + /** * Returns whether the current keyword indicates a {@literal $jsonSchema} object. * @@ -692,23 +1032,23 @@ public boolean isJsonSchema() { */ protected static class Field { - private static final String ID_KEY = "_id"; + protected static final Pattern POSITIONAL_OPERATOR = Pattern.compile("\\$\\[.*\\]"); protected final String name; /** - * Creates a new {@link DocumentField} without meta-information but the given name. + * Creates a new {@link Field} without meta-information but the given name. * * @param name must not be {@literal null} or empty. */ public Field(String name) { - Assert.hasText(name, "Name must not be null!"); + Assert.hasText(name, "Name must not be null"); this.name = name; } /** - * Returns a new {@link DocumentField} with the given name. + * Returns a new {@link Field} with the given name. * * @param name must not be {@literal null} or empty. * @return @@ -723,7 +1063,7 @@ public Field with(String name) { * @return */ public boolean isIdField() { - return ID_KEY.equals(name); + return FieldName.ID.name().equals(name); } /** @@ -748,6 +1088,11 @@ public MongoPersistentEntity getPropertyEntity() { return null; } + @Nullable + MongoPersistentEntity getEntity() { + return null; + } + /** * Returns whether the field represents an association. * @@ -763,7 +1108,7 @@ public boolean isAssociation() { * @return */ public String getMappedKey() { - return isIdField() ? ID_KEY : name; + return isIdField() ? FieldName.ID.name() : name; } /** @@ -780,20 +1125,36 @@ public Association getAssociation() { return null; } + /** + * Returns whether the field references a {@link java.util.Map}. + * + * @return {@literal true} if property information is available and references a {@link java.util.Map}. + * @see PersistentProperty#isMap() + */ + public boolean isMap() { + return getProperty() != null && getProperty().isMap(); + } + public TypeInformation getTypeHint() { - return ClassTypeInformation.OBJECT; + return TypeInformation.OBJECT; + } + + public Class getFieldType() { + return Object.class; } } /** - * Extension of {@link DocumentField} to be backed with mapping metadata. + * Extension of {@link Field} to be backed with mapping metadata. * * @author Oliver Gierke * @author Thomas Darimont */ - protected static class MetadataBackedField extends Field { + public static class MetadataBackedField extends Field { - private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!"; + private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?"); + private static final Pattern NUMERIC_SEGMENT = Pattern.compile("\\d+"); + private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s; Associations can only be pointed to directly or via their id property"; private final MongoPersistentEntity entity; private final MappingContext, MongoPersistentProperty> mappingContext; @@ -829,73 +1190,59 @@ public MetadataBackedField(String name, MongoPersistentEntity entity, super(name); - Assert.notNull(entity, "MongoPersistentEntity must not be null!"); + Assert.notNull(entity, "MongoPersistentEntity must not be null"); this.entity = entity; this.mappingContext = context; - this.path = getPath(name); + this.path = getPath(removePlaceholders(POSITIONAL_PARAMETER_PATTERN, name), property); this.property = path == null ? property : path.getLeafProperty(); this.association = findAssociation(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#with(java.lang.String) - */ @Override public MetadataBackedField with(String name) { return new MetadataBackedField(name, entity, mappingContext, property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#isIdKey() - */ @Override public boolean isIdField() { + if (property != null) { + return property.getOwner().isIdProperty(property); + } + MongoPersistentProperty idProperty = entity.getIdProperty(); if (idProperty != null) { - return idProperty.getName().equals(name) || idProperty.getFieldName().equals(name); + return name.equals(idProperty.getName()) || name.equals(idProperty.getFieldName()); } return DEFAULT_ID_NAMES.contains(name); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getProperty() - */ @Override public MongoPersistentProperty getProperty() { return association == null ? property : association.getInverse(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getEntity() - */ @Override public MongoPersistentEntity getPropertyEntity() { MongoPersistentProperty property = getProperty(); return property == null ? null : mappingContext.getPersistentEntity(property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#isAssociation() - */ + @Nullable + @Override + public MongoPersistentEntity getEntity() { + return entity; + } + @Override public boolean isAssociation() { return association != null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getAssociation() - */ @Override public Association getAssociation() { return association; @@ -923,12 +1270,18 @@ private Association findAssociation() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTargetKey() - */ + @Override + public Class getFieldType() { + return property.getFieldType(); + } + @Override public String getMappedKey() { + + if (getProperty() != null && getProperty().getMongoField().getName().isKey()) { + return getProperty().getFieldName(); + } + return path == null ? name : path.toDotPath(isAssociation() ? getAssociationConverter() : getPropertyConverter()); } @@ -938,41 +1291,145 @@ protected PersistentPropertyPath getPath() { } /** - * Returns the {@link PersistentPropertyPath} for the given pathExpression. + * Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}. * * @param pathExpression * @return */ - private PersistentPropertyPath getPath(String pathExpression) { + @Nullable + private PersistentPropertyPath getPath(String pathExpression, + @Nullable MongoPersistentProperty sourceProperty) { + + if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) { + return mappingContext.getPersistentPropertyPath( + PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation())); + } + + String rawPath = resolvePath(pathExpression); + + PropertyPath path = forName(rawPath); + if (path == null || isPathToJavaLangClassProperty(path)) { + return null; + } + + PersistentPropertyPath propertyPath = tryToResolvePersistentPropertyPath(path); + + if (propertyPath == null) { + + if (QueryMapper.LOGGER.isInfoEnabled()) { + + String types = StringUtils.collectionToDelimitedString( + path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> "); + QueryMapper.LOGGER.info(String.format( + "Could not map '%s'; Maybe a fragment in '%s' is considered a simple type; Mapper continues with %s", + path, types, pathExpression)); + } + return null; + } + + Iterator iterator = propertyPath.iterator(); + boolean associationDetected = false; + + while (iterator.hasNext()) { + + MongoPersistentProperty property = iterator.next(); + + if (property.isAssociation()) { + associationDetected = true; + continue; + } + + if (associationDetected && !property.getOwner().isIdProperty(property)) { + throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression)); + } + } + + return propertyPath; + } + + @Nullable + private PersistentPropertyPath tryToResolvePersistentPropertyPath(PropertyPath path) { try { + return mappingContext.getPersistentPropertyPath(path); + } catch (MappingException e) { + return null; + } + } + + /** + * Querydsl happens to map id fields directly to {@literal _id} which breaks {@link PropertyPath} resolution. So if + * the first attempt fails we try to replace {@literal _id} with just {@literal id} and see if we can resolve if + * then. + * + * @param path + * @return the path or {@literal null} + */ + @Nullable + private PropertyPath forName(String path) { - PropertyPath path = PropertyPath.from(pathExpression.replaceAll("\\.\\d+", ""), entity.getTypeInformation()); - PersistentPropertyPath propertyPath = mappingContext.getPersistentPropertyPath(path); + try { - Iterator iterator = propertyPath.iterator(); - boolean associationDetected = false; + if (entity.getPersistentProperty(path) != null) { + return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation()); + } - while (iterator.hasNext()) { + return PropertyPath.from(path, entity.getTypeInformation()); + } catch (PropertyReferenceException | InvalidPersistentPropertyPath e) { - MongoPersistentProperty property = iterator.next(); + if (path.endsWith("_id")) { + return forName(path.substring(0, path.length() - 3) + "id"); + } - if (property.isAssociation()) { - associationDetected = true; - continue; - } + // Ok give it another try quoting + try { + return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation()); + } catch (PropertyReferenceException | InvalidPersistentPropertyPath ex) { - if (associationDetected && !property.isIdProperty()) { - throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression)); - } } - return propertyPath; - } catch (PropertyReferenceException e) { return null; } } + private boolean isPathToJavaLangClassProperty(PropertyPath path) { + + return (path.getType() == Class.class || path.getType().equals(Object.class)) + && path.getLeafProperty().getType() == Class.class; + } + + private static String resolvePath(String source) { + + String[] segments = source.split("\\."); + if (segments.length == 1) { + return source; + } + + List path = new ArrayList<>(segments.length); + + /* always start from a property, so we can skip the first segment. + from there remove any position placeholder */ + for (int i = 1; i < segments.length; i++) { + String segment = segments[i]; + if (segment.startsWith("[") && segment.endsWith("]")) { + continue; + } + if (NUMERIC_SEGMENT.matcher(segment).matches()) { + continue; + } + path.add(segment); + } + + // when property is followed only by placeholders eg. 'values.0.3.90' + // or when there is no difference in the number of segments + if (path.isEmpty() || segments.length == path.size() + 1) { + return source; + } + + path.add(0, segments[0]); + return StringUtils.collectionToDelimitedString(path, "."); + } + /** * Return the {@link Converter} to be used to created the mapped key. Default implementation will use * {@link PropertyToFieldNameConverter}. @@ -980,7 +1437,7 @@ private PersistentPropertyPath getPath(String pathExpre * @return */ protected Converter getPropertyConverter() { - return new PositionParameterRetainingPropertyKeyConverter(name); + return new PositionParameterRetainingPropertyKeyConverter(name, mappingContext); } /** @@ -991,7 +1448,15 @@ protected Converter getPropertyConverter() { * @since 1.7 */ protected Converter getAssociationConverter() { - return new AssociationConverter(getAssociation()); + return new AssociationConverter(name, getAssociation()); + } + + protected MappingContext, MongoPersistentProperty> getMappingContext() { + return mappingContext; + } + + private static String removePlaceholders(Pattern pattern, String raw) { + return pattern.matcher(raw).replaceAll(""); } /** @@ -1002,24 +1467,17 @@ static class PositionParameterRetainingPropertyKeyConverter implements Converter private final KeyMapper keyMapper; - public PositionParameterRetainingPropertyKeyConverter(String rawKey) { - this.keyMapper = new KeyMapper(rawKey); + public PositionParameterRetainingPropertyKeyConverter(String rawKey, + MappingContext, MongoPersistentProperty> ctx) { + this.keyMapper = new KeyMapper(rawKey, ctx); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(MongoPersistentProperty source) { return keyMapper.mapPropertyName(source); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#getTypeHint() - */ @Override public TypeInformation getTypeHint() { @@ -1031,7 +1489,7 @@ public TypeInformation getTypeHint() { if (property.getActualType().isInterface() || java.lang.reflect.Modifier.isAbstract(property.getActualType().getModifiers())) { - return ClassTypeInformation.OBJECT; + return TypeInformation.OBJECT; } return NESTED_DOCUMENT; @@ -1044,11 +1502,23 @@ public TypeInformation getTypeHint() { static class KeyMapper { private final Iterator iterator; + private int currentIndex; + private final List pathParts; + + public KeyMapper(String key, + MappingContext, MongoPersistentProperty> mappingContext) { - public KeyMapper(String key) { + this.pathParts = Arrays.asList(key.split("\\.")); + this.iterator = pathParts.iterator(); + this.currentIndex = 0; + } + + String nextToken() { + return pathParts.get(currentIndex + 1); + } - this.iterator = Arrays.asList(key.split("\\.")).iterator(); - this.iterator.next(); + boolean hasNexToken() { + return pathParts.size() > currentIndex + 1; } /** @@ -1060,29 +1530,40 @@ public KeyMapper(String key) { protected String mapPropertyName(MongoPersistentProperty property) { StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property)); - boolean inspect = iterator.hasNext(); + if (!hasNexToken()) { + return mappedName.toString(); + } - while (inspect) { + String nextToken = nextToken(); + if (isPositionalParameter(nextToken)) { - String partial = iterator.next(); - boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike())); + mappedName.append(".").append(nextToken); + currentIndex += 2; + return mappedName.toString(); + } - if (isPositional) { - mappedName.append(".").append(partial); - } + if (property.isMap()) { - inspect = isPositional && iterator.hasNext(); + mappedName.append(".").append(nextToken); + currentIndex += 2; + return mappedName.toString(); } + currentIndex++; return mappedName.toString(); } - private static boolean isPositionalParameter(String partial) { + static boolean isPositionalParameter(String partial) { if ("$".equals(partial)) { return true; } + Matcher matcher = POSITIONAL_OPERATOR.matcher(partial); + if (matcher.find()) { + return true; + } + try { Long.valueOf(partial); return true; @@ -1100,6 +1581,7 @@ private static boolean isPositionalParameter(String partial) { */ protected static class AssociationConverter implements Converter { + private final String name; private final MongoPersistentProperty property; private boolean associationFound; @@ -1108,16 +1590,13 @@ protected static class AssociationConverter implements Converter association) { + public AssociationConverter(String name, Association association) { - Assert.notNull(association, "Association must not be null!"); + Assert.notNull(association, "Association must not be null"); this.property = association.getInverse(); + this.name = name; } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(MongoPersistentProperty source) { @@ -1129,6 +1608,12 @@ public String convert(MongoPersistentProperty source) { associationFound = true; } + if (associationFound) { + if (name.endsWith("$") && property.isCollectionLike()) { + return source.getFieldName() + ".$"; + } + } + return source.getFieldName(); } } @@ -1136,4 +1621,35 @@ public String convert(MongoPersistentProperty source) { public MappingContext, MongoPersistentProperty> getMappingContext() { return mappingContext; } + + public MongoConverter getConverter() { + return converter; + } + + enum NoPropertyPropertyValueProvider implements PropertyValueProvider { + + INSTANCE; + + @Override + public T getPropertyValue(MongoPersistentProperty property) { + throw new IllegalStateException("No enclosing property source available"); + } + } + + /* + * Types that must not be converted. + */ + static class Quirks { + + private static final Set> types = Set.of(Pattern.class, BsonRegularExpression.class); + + static boolean skipConversion(@Nullable Object value) { + + if (value == null) { + return false; + } + + return types.contains(value.getClass()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java new file mode 100644 index 0000000000..5a1adf9114 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -0,0 +1,156 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.Collections; +import java.util.Iterator; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoCollection; + +/** + * The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a + * {@link ReferenceLoader.DocumentReferenceQuery}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public interface ReferenceLoader { + + /** + * Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}. + * + * @param referenceQuery must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the matching {@link Document} or {@literal null} if none found. + */ + @Nullable + default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) { + + Iterator it = fetchMany(referenceQuery, context).iterator(); + return it.hasNext() ? it.next() : null; + } + + /** + * Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}. + * + * @param referenceQuery must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the matching {@link Document} or {@literal null} if none found. + */ + Iterable fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context); + + /** + * The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched + * applying potentially given order criteria. + */ + interface DocumentReferenceQuery { + + /** + * Get the query to obtain matching {@link Document documents}. + * + * @return never {@literal null}. + */ + Bson getQuery(); + + /** + * Get the sort criteria for ordering results. + * + * @return an empty {@link Document} by default. Never {@literal null}. + */ + default Bson getSort() { + return new Document(); + } + + default Iterable apply(MongoCollection collection) { + return restoreOrder(collection.find(getQuery()).sort(getSort())); + } + + /** + * Restore the order of fetched documents. + * + * @param documents must not be {@literal null}. + * @return never {@literal null}. + */ + default Iterable restoreOrder(Iterable documents) { + return documents; + } + + static DocumentReferenceQuery forSingleDocument(Bson bson) { + + return new DocumentReferenceQuery() { + + @Override + public Bson getQuery() { + return bson; + } + + @Override + public Iterable apply(MongoCollection collection) { + + Document result = collection.find(getQuery()).sort(getSort()).limit(1).first(); + return result != null ? Collections.singleton(result) : Collections.emptyList(); + } + }; + } + + static DocumentReferenceQuery forManyDocuments(Bson bson) { + + return new DocumentReferenceQuery() { + + @Override + public Bson getQuery() { + return bson; + } + + @Override + public Iterable apply(MongoCollection collection) { + return collection.find(getQuery()).sort(getSort()); + } + }; + } + + /** + * @return a {@link DocumentReferenceQuery} that will not match any documents. + * @since 4.2.5 + */ + static DocumentReferenceQuery forNoResult() { + return NoResultsFilter.INSTANCE; + } + } + + /** + * A dedicated {@link DocumentReferenceQuery} that will not match any documents. + * + * @since 4.2.5 + */ + enum NoResultsFilter implements DocumentReferenceQuery { + INSTANCE; + + private static final Document NO_RESULTS_PREDICATE = new Document(FieldName.ID.name(), + new Document("$exists", false)); + + @Override + public Bson getQuery() { + return NO_RESULTS_PREDICATE; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java new file mode 100644 index 0000000000..b912cfb540 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -0,0 +1,506 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.lang.annotation.Annotation; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.NoResultsFilter; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.mongodb.util.json.ValueProvider; +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.data.util.Streamable; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.DBRef; +import com.mongodb.client.MongoCollection; + +/** + * A common delegate for {@link ReferenceResolver} implementations to resolve a reference to one/many target documents + * that are converted to entities. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Stefan Bildl + * @since 3.3 + */ +public final class ReferenceLookupDelegate { + + private final MappingContext, MongoPersistentProperty> mappingContext; + private final SpELContext spELContext; + private final ParameterBindingDocumentCodec codec; + + /** + * Create a new {@link ReferenceLookupDelegate}. + * + * @param mappingContext must not be {@literal null}. + * @param spELContext must not be {@literal null}. + */ + public ReferenceLookupDelegate( + MappingContext, MongoPersistentProperty> mappingContext, + SpELContext spELContext) { + + Assert.notNull(mappingContext, "MappingContext must not be null"); + Assert.notNull(spELContext, "SpELContext must not be null"); + + this.mappingContext = mappingContext; + this.spELContext = spELContext; + this.codec = new ParameterBindingDocumentCodec(); + } + + /** + * Read the reference expressed by the given property. + * + * @param property the reference defining property. Must not be {@literal null}. THe + * @param source the source value identifying to the referenced entity. Must not be {@literal null}. + * @param lookupFunction to execute a lookup query. Must not be {@literal null}. + * @param entityReader the callback to convert raw source values into actual domain types. Must not be + * {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + public Object readReference(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, + MongoEntityReader entityReader) { + + Object value = source instanceof DocumentReferenceSource documentReferenceSource + ? documentReferenceSource.getTargetSource() + : source; + + Iterable result = retrieveRawDocuments(property, source, lookupFunction, value); + + if (result == null) { + return null; + } + + if (property.isCollectionLike()) { + return entityReader.read(result, property.getTypeInformation()); + } + + if (!result.iterator().hasNext()) { + return null; + } + + Object resultValue = result.iterator().next(); + return resultValue != null ? entityReader.read(resultValue, property.getTypeInformation()) : null; + } + + @Nullable + private Iterable retrieveRawDocuments(MongoPersistentProperty property, Object source, + LookupFunction lookupFunction, Object value) { + + DocumentReferenceQuery filter = computeFilter(property, source, spELContext); + if (filter instanceof NoResultsFilter) { + return Collections.emptyList(); + } + + ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); + return lookupFunction.apply(filter, referenceCollection); + } + + private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value, + SpELContext spELContext) { + + // Use the first value as a reference for others in case of collection like + if (value instanceof Iterable iterable) { + + Iterator iterator = iterable.iterator(); + value = iterator.hasNext() ? iterator.next() : new Document(); + } + + // handle DBRef value + if (value instanceof DBRef dbRef) { + return ReferenceCollection.fromDBRef(dbRef); + } + + String collection = mappingContext.getRequiredPersistentEntity(property.getAssociationTargetType()).getCollection(); + + if (value instanceof Document documentPointer) { + + if (property.isDocumentReference()) { + + ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); + DocumentReference documentReference = property.getDocumentReference(); + + String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, + () -> documentPointer.get("db", String.class)); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, + () -> documentPointer.get("collection", collection)); + return new ReferenceCollection(targetDatabase, targetCollection); + } + + return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection)); + } + + if (property.isDocumentReference()) { + + ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); + DocumentReference documentReference = property.getDocumentReference(); + + String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection); + + return new ReferenceCollection(targetDatabase, targetCollection); + } + + return new ReferenceCollection(null, collection); + } + + /** + * Use the given {@link ParameterBindingContext} to compute potential expressions against the value. + * + * @param value must not be {@literal null}. + * @param bindingContext must not be {@literal null}. + * @param defaultValue + * @param + * @return can be {@literal null}. + */ + @SuppressWarnings("unchecked") + private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { + + if (!StringUtils.hasText(value)) { + return defaultValue.get(); + } + + // parameter binding requires a document, since we do not have one, construct it. + if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) { + String s = "{ 'target-value' : " + value + "}"; + T evaluated = (T) codec.decode(s, bindingContext).get("target-value"); + return evaluated != null ? evaluated : defaultValue.get(); + } + + if (BsonUtils.isJsonDocument(value)) { + return (T) codec.decode(value, bindingContext); + } + + if (!value.startsWith("#") && ExpressionUtils.detectExpression(value) == null) { + return (T) value; + } + + T evaluated = (T) bindingContext.evaluateExpression(value); + return evaluated != null ? evaluated : defaultValue.get(); + } + + ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + ValueProvider valueProvider = valueProviderFor(DocumentReferenceSource.getTargetSource(source)); + + return new ParameterBindingContext(valueProvider, spELContext.getParser(), + () -> evaluationContextFor(property, source, spELContext)); + } + + ValueProvider valueProviderFor(Object source) { + + return index -> { + if (source instanceof Document document) { + return Streamable.of(document.values()).toList().get(index); + } + return source; + }; + } + + EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + Object target = source instanceof DocumentReferenceSource documentReferenceSource + ? documentReferenceSource.getTargetSource() + : source; + + if (target == null) { + target = new Document(); + } + + EvaluationContext ctx = spELContext.getEvaluationContext(target); + ctx.setVariable("target", target); + ctx.setVariable("self", DocumentReferenceSource.getSelf(source)); + ctx.setVariable(property.getName(), target); + + return ctx; + } + + /** + * Compute the query to retrieve linked documents. + * + * @param property must not be {@literal null}. + * @param source must not be {@literal null}. + * @param spELContext must not be {@literal null}. + * @return never {@literal null}. + */ + @SuppressWarnings("unchecked") + DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference() + : ReferenceEmulatingDocumentReference.INSTANCE; + + String lookup = documentReference.lookup(); + + Object value = DocumentReferenceSource.getTargetSource(source); + + Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext), + Document::new); + + if (property.isCollectionLike() && (value instanceof Collection || value == null)) { + + if (value == null) { + return new ListDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), + sort); + } + + Collection objects = (Collection) value; + + // optimization: bypass query if the collection pointing to the references is empty + if (objects.isEmpty()) { + return DocumentReferenceQuery.forNoResult(); + } + + List ors = new ArrayList<>(objects.size()); + for (Object entry : objects) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); + ors.add(decoded); + } + + return new ListDocumentReferenceQuery(new Document("$or", ors), sort); + } + + if (property.isMap() && value instanceof Map) { + + if (ObjectUtils.isEmpty(value)) { + return DocumentReferenceQuery.forNoResult(); + } + + Set> entries = ((Map) value).entrySet(); + Map filterMap = new LinkedHashMap<>(entries.size()); + + for (Entry entry : entries) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext)); + filterMap.put(entry.getKey(), decoded); + } + + return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap); + } + + return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), sort); + } + + enum ReferenceEmulatingDocumentReference implements DocumentReference { + + INSTANCE; + + @Override + public Class annotationType() { + return DocumentReference.class; + } + + @Override + public String db() { + return ""; + } + + @Override + public String collection() { + return ""; + } + + @Override + public String lookup() { + return "{ '_id' : ?#{#target} }"; + } + + @Override + public String sort() { + return ""; + } + + @Override + public boolean lazy() { + return false; + } + } + + /** + * {@link DocumentReferenceQuery} implementation fetching a single {@link Document}. + */ + static class SingleDocumentReferenceQuery implements DocumentReferenceQuery { + + private final Document query; + private final Document sort; + + public SingleDocumentReferenceQuery(Document query, Document sort) { + + this.query = query; + this.sort = sort; + } + + @Override + public Bson getQuery() { + return query; + } + + @Override + public Document getSort() { + return sort; + } + + @Override + public Iterable apply(MongoCollection collection) { + + Document result = collection.find(getQuery()).sort(getSort()).limit(1).first(); + return result != null ? Collections.singleton(result) : Collections.emptyList(); + } + } + + /** + * {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a + * {@link Map} structure. Restores the original map order by matching individual query documents against the actual + * values. + */ + static class MapDocumentReferenceQuery implements DocumentReferenceQuery { + + private final Document query; + private final Document sort; + private final Map filterOrderMap; + + public MapDocumentReferenceQuery(Document query, Document sort, Map filterOrderMap) { + + this.query = query; + this.sort = sort; + this.filterOrderMap = filterOrderMap; + } + + @Override + public Bson getQuery() { + return query; + } + + @Override + public Bson getSort() { + return sort; + } + + @Override + public Iterable restoreOrder(Iterable documents) { + + Map targetMap = new LinkedHashMap<>(); + List collected = documents instanceof List list ? list : Streamable.of(documents).toList(); + + for (Entry filterMapping : filterOrderMap.entrySet()) { + + Optional first = collected.stream() + .filter(it -> it.entrySet().containsAll(filterMapping.getValue().entrySet())).findFirst(); + + targetMap.put(filterMapping.getKey().toString(), first.orElse(null)); + } + return Collections.singleton(new Document(targetMap)); + } + } + + /** + * {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a + * {@link Collection} like structure. Restores the original order by matching individual query documents against the + * actual values. + */ + static class ListDocumentReferenceQuery implements DocumentReferenceQuery { + + private final Document query; + private final Document sort; + + public ListDocumentReferenceQuery(Document query, Document sort) { + + this.query = query; + this.sort = sort; + } + + @Override + public Iterable restoreOrder(Iterable documents) { + + List target = documents instanceof List list ? list : Streamable.of(documents).toList(); + + if (!sort.isEmpty() || !query.containsKey("$or")) { + return target; + } + + List ors = query.get("$or", List.class); + return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList()); + } + + @Override + public Document getQuery() { + return query; + } + + @Override + public Document getSort() { + return sort; + } + + int compareAgainstReferenceIndex(List referenceList, Document document1, Document document2) { + + for (Document document : referenceList) { + + Set> entries = document.entrySet(); + if (document1.entrySet().containsAll(entries)) { + return -1; + } + if (document2.entrySet().containsAll(entries)) { + return 1; + } + } + return referenceList.size(); + } + } + + /** + * The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to + * obtain raw results. + */ + @FunctionalInterface + interface LookupFunction { + + /** + * @param referenceQuery never {@literal null}. + * @param referenceCollection never {@literal null}. + * @return never {@literal null}. + */ + Iterable apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java new file mode 100644 index 0000000000..715327d18e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -0,0 +1,119 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.DBRef; + +/** + * The {@link ReferenceResolver} allows to load and convert linked entities. + * + * @author Christoph Strobl + * @since 3.3 + */ +@FunctionalInterface +public interface ReferenceResolver { + + /** + * Resolve the association defined via the given property from a given source value. May return a + * {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. The resolved value is assignable to + * {@link PersistentProperty#getType()}. + * + * @param property the association defining property. + * @param source the association source value. + * @param referenceLookupDelegate the lookup executing component. + * @param entityReader conversion function capable of constructing entities from raw source. + * @return can be {@literal null}. + */ + @Nullable + Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader); + + /** + * {@link ReferenceCollection} is a value object that contains information about the target database and collection + * name of an association. + */ + class ReferenceCollection { + + @Nullable // + private final String database; + private final String collection; + + /** + * @param database can be {@literal null} to indicate the configured default + * {@link MongoDatabaseFactory#getMongoDatabase() database} should be used. + * @param collection the target collection name. Must not be {@literal null}. + */ + public ReferenceCollection(@Nullable String database, String collection) { + + Assert.hasText(collection, "Collection must not be empty or null"); + + this.database = database; + this.collection = collection; + } + + /** + * Create a new instance of {@link ReferenceCollection} from the given {@link DBRef}. + * + * @param dbRef must not be {@literal null}. + * @return new instance of {@link ReferenceCollection}. + */ + public static ReferenceCollection fromDBRef(DBRef dbRef) { + return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName()); + } + + /** + * Get the target collection name. + * + * @return never {@literal null}. + */ + public String getCollection() { + return collection; + } + + /** + * Get the target database name. If {@literal null} the default database should be used. + * + * @return can be {@literal null}. + */ + @Nullable + public String getDatabase() { + return database; + } + } + + /** + * Domain type conversion callback interface that allows to read the {@code source} object into a mapped object. + */ + @FunctionalInterface + interface MongoEntityReader { + + /** + * Read values from the given source into an object defined via the given {@link TypeInformation}. + * + * @param source never {@literal null}. + * @param typeInformation information about the desired target type. + * @return never {@literal null}. + */ + Object read(Object source, TypeInformation typeInformation); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java index 0e02bd67d3..805bafe974 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,30 @@ */ package org.springframework.data.mongodb.core.convert; +import java.util.ArrayList; import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; import java.util.Map.Entry; import org.bson.Document; import org.bson.conversions.Bson; import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.WriteOperatorContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update.Modifier; import org.springframework.data.mongodb.core.query.Update.Modifiers; -import org.springframework.data.util.ClassTypeInformation; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; /** * A subclass of {@link QueryMapper} that retains type information on the mongo types. @@ -57,10 +63,6 @@ public UpdateMapper(MongoConverter converter) { this.converter = converter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#getMappedObject(Bson, MongoPersistentEntity) - */ @Override public Document getMappedObject(Bson query, @Nullable MongoPersistentEntity entity) { @@ -131,19 +133,22 @@ public static boolean isUpdateObject(@Nullable Document updateObj) { */ @Override protected Object delegateConvertToMongoType(Object source, @Nullable MongoPersistentEntity entity) { + + if (entity != null && entity.isUnwrapped()) { + return converter.convertToMongoType(source, entity); + } + return converter.convertToMongoType(source, - entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity)); + entity == null ? TypeInformation.OBJECT : getTypeHintForEntity(source, entity)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#getMappedObjectForField(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object) - */ @Override protected Entry getMappedObjectForField(Field field, Object rawValue) { if (isDocument(rawValue)) { - return createMapEntry(field, convertSimpleOrDocument(rawValue, field.getPropertyEntity())); + + Object val = field.isMap() ? new LinkedHashMap<>((Document) rawValue) : rawValue; // unwrap to preserve field type + return createMapEntry(field, convertSimpleOrDocument(val, field.getPropertyEntity())); } if (isQuery(rawValue)) { @@ -158,33 +163,36 @@ protected Entry getMappedObjectForField(Field field, Object rawV return super.getMappedObjectForField(field, rawValue); } + protected Object convertValueWithConversionContext(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter, + MongoConversionContext conversionContext) { + + return super.convertValueWithConversionContext(documentField, sourceValue, value, valueConverter, conversionContext.forOperator(new WriteOperatorContext(documentField.name))); + } + private Entry getMappedUpdateModifier(Field field, Object rawValue) { - Object value = null; + Object value; - if (rawValue instanceof Modifier) { + if (rawValue instanceof Modifier modifier) { - value = getMappedValue(field, (Modifier) rawValue); + value = getMappedValue(field, modifier); - } else if (rawValue instanceof Modifiers) { + } else if (rawValue instanceof Modifiers modifiers) { Document modificationOperations = new Document(); - for (Modifier modifier : ((Modifiers) rawValue).getModifiers()) { + for (Modifier modifier : modifiers.getModifiers()) { modificationOperations.putAll(getMappedValue(field, modifier)); } value = modificationOperations; } else { - throw new IllegalArgumentException(String.format("Unable to map value of type '%s'!", rawValue.getClass())); + throw new IllegalArgumentException(String.format("Unable to map value of type '%s'", rawValue.getClass())); } return createMapEntry(field, value); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#isAssociationConversionNecessary(org.springframework.data.mongodb.core.convert.QueryMapper.Field, java.lang.Object) - */ @Override protected boolean isAssociationConversionNecessary(Field documentField, @Nullable Object value) { return super.isAssociationConversionNecessary(documentField, value) || documentField.containsAssociation(); @@ -213,8 +221,18 @@ private Object getMappedModifier(@Nullable Field field, Modifier modifier) { : getMappedSort(sortObject, field.getPropertyEntity()); } - TypeInformation typeHint = field == null ? ClassTypeInformation.OBJECT : field.getTypeHint(); + if (isAssociationConversionNecessary(field, value)) { + if (ObjectUtils.isArray(value) || value instanceof Collection) { + List targetPointers = new ArrayList<>(); + for (Object val : converter.getConversionService().convert(value, List.class)) { + targetPointers.add(getMappedValue(field, val)); + } + return targetPointers; + } + return super.getMappedValue(field, value); + } + TypeInformation typeHint = field == null ? TypeInformation.OBJECT : field.getTypeHint(); return converter.convertToMongoType(value, typeHint); } @@ -238,10 +256,6 @@ private TypeInformation getTypeHintForEntity(@Nullable Object source, MongoPe return NESTED_DOCUMENT; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper#createPropertyField(org.springframework.data.mongodb.core.mapping.MongoPersistentEntity, java.lang.String, org.springframework.data.mapping.context.MappingContext) - */ @Override protected Field createPropertyField(MongoPersistentEntity entity, String key, MappingContext, MongoPersistentProperty> mappingContext) { @@ -269,6 +283,7 @@ private static Document getSortObject(Sort sort) { * * @author Thomas Darimont * @author Oliver Gierke + * @author Christoph Strobl */ private static class MetadataBackedUpdateField extends MetadataBackedField { @@ -286,35 +301,23 @@ private static class MetadataBackedUpdateField extends MetadataBackedField { public MetadataBackedUpdateField(MongoPersistentEntity entity, String key, MappingContext, MongoPersistentProperty> mappingContext) { - super(key.replaceAll("\\.\\$", ""), entity, mappingContext); + super(key, entity, mappingContext); this.key = key; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getMappedKey() - */ @Override public String getMappedKey() { return this.getPath() == null ? key : super.getMappedKey(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getPropertyConverter() - */ @Override protected Converter getPropertyConverter() { - return new PositionParameterRetainingPropertyKeyConverter(key); + return new PositionParameterRetainingPropertyKeyConverter(key, getMappingContext()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.convert.QueryMapper.MetadataBackedField#getAssociationConverter() - */ @Override protected Converter getAssociationConverter() { - return new UpdateAssociationConverter(getAssociation(), key); + return new UpdateAssociationConverter(getMappingContext(), getAssociation(), key); } /** @@ -331,16 +334,14 @@ protected static class UpdateAssociationConverter extends AssociationConverter { * * @param association must not be {@literal null}. */ - public UpdateAssociationConverter(Association association, String key) { + public UpdateAssociationConverter( + MappingContext, MongoPersistentProperty> mappingContext, + Association association, String key) { - super(association); - this.mapper = new KeyMapper(key); + super(key, association); + this.mapper = new KeyMapper(key, mappingContext); } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public String convert(MongoPersistentProperty source) { return super.convert(source) == null ? null : mapper.mapPropertyName(source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java index 662b53f8ec..0a96cc867a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ValueResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,27 +17,30 @@ import org.bson.Document; import org.bson.conversions.Bson; -import org.springframework.data.mapping.model.SpELExpressionEvaluator; + +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; /** * Internal API to trigger the resolution of properties. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ interface ValueResolver { /** * Resolves the value for the given {@link MongoPersistentProperty} within the given {@link Document} using the given - * {@link SpELExpressionEvaluator} and {@link ObjectPath}. + * {@link ValueExpressionEvaluator} and {@link ObjectPath}. * * @param prop * @param bson * @param evaluator - * @param parent + * @param path * @return */ - Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator, - ObjectPath path); + @Nullable + Object getValueInternal(MongoPersistentProperty prop, Bson bson, ValueExpressionEvaluator evaluator, ObjectPath path); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/EncryptingConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/EncryptingConverter.java new file mode 100644 index 0000000000..4097be7704 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/EncryptingConverter.java @@ -0,0 +1,66 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert.encryption; + +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoValueConverter; +import org.springframework.data.mongodb.core.encryption.EncryptionContext; + +/** + * A specialized {@link MongoValueConverter} for {@literal encrypting} and {@literal decrypting} properties. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface EncryptingConverter extends MongoValueConverter { + + @Override + default S read(Object value, MongoConversionContext context) { + return decrypt(value, buildEncryptionContext(context)); + } + + /** + * Decrypt the given encrypted source value within the given {@link EncryptionContext context}. + * + * @param encryptedValue the encrypted source. + * @param context the context to operate in. + * @return never {@literal null}. + */ + S decrypt(Object encryptedValue, EncryptionContext context); + + @Override + default T write(Object value, MongoConversionContext context) { + return encrypt(value, buildEncryptionContext(context)); + } + + /** + * Encrypt the given raw source value within the given {@link EncryptionContext context}. + * + * @param value the encrypted source. + * @param context the context to operate in. + * @return never {@literal null}. + */ + T encrypt(Object value, EncryptionContext context); + + /** + * Obtain the {@link EncryptionContext} for a given {@link MongoConversionContext value conversion context}. + * + * @param context the current MongoDB specific {@link org.springframework.data.convert.ValueConversionContext}. + * @return the {@link EncryptionContext} to operate in. + * @see org.springframework.data.convert.ValueConversionContext + */ + EncryptionContext buildEncryptionContext(MongoConversionContext context); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java new file mode 100644 index 0000000000..67c30fcf94 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java @@ -0,0 +1,77 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert.encryption; + +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.encryption.EncryptionContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; + +/** + * Default {@link EncryptionContext} implementation. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +class ExplicitEncryptionContext implements EncryptionContext { + + private final MongoConversionContext conversionContext; + + public ExplicitEncryptionContext(MongoConversionContext conversionContext) { + this.conversionContext = conversionContext; + } + + @Override + public MongoPersistentProperty getProperty() { + return conversionContext.getProperty(); + } + + @Nullable + @Override + public Object lookupValue(String path) { + return conversionContext.getValue(path); + } + + @Override + public Object convertToMongoType(Object value) { + return conversionContext.write(value); + } + + @Override + public EvaluationContext getEvaluationContext(Object source) { + return conversionContext.getSpELContext().getEvaluationContext(source); + } + + @Override + public T read(@Nullable Object value, TypeInformation target) { + return conversionContext.read(value, target); + } + + @Override + public T write(@Nullable Object value, TypeInformation target) { + return conversionContext.write(value, target); + } + + @Override + @Nullable + public OperatorContext getOperatorContext() { + return conversionContext.getOperatorContext(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java new file mode 100644 index 0000000000..8d29847aae --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java @@ -0,0 +1,317 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert.encryption; + +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.springframework.data.mongodb.core.encryption.EncryptionOptions.*; + +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.types.Binary; + +import org.springframework.core.CollectionFactory; +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.encryption.Encryption; +import org.springframework.data.mongodb.core.encryption.EncryptionContext; +import org.springframework.data.mongodb.core.encryption.EncryptionKey; +import org.springframework.data.mongodb.core.encryption.EncryptionKeyResolver; +import org.springframework.data.mongodb.core.encryption.EncryptionOptions; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Default implementation of {@link EncryptingConverter}. Properties used with this converter must be annotated with + * {@link Encrypted @Encrypted} to provide key and algorithm metadata. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public class MongoEncryptionConverter implements EncryptingConverter { + + private static final Log LOGGER = LogFactory.getLog(MongoEncryptionConverter.class); + private static final List RANGE_OPERATORS = asList("$gt", "$gte", "$lt", "$lte"); + public static final String AND_OPERATOR = "$and"; + + private final Encryption encryption; + private final EncryptionKeyResolver keyResolver; + + public MongoEncryptionConverter(Encryption encryption, EncryptionKeyResolver keyResolver) { + + this.encryption = encryption; + this.keyResolver = keyResolver; + } + + @Nullable + @Override + public Object read(Object value, MongoConversionContext context) { + + Object decrypted = EncryptingConverter.super.read(value, context); + return decrypted instanceof BsonValue bsonValue ? BsonUtils.toJavaType(bsonValue) : decrypted; + } + + @Override + public Object decrypt(Object encryptedValue, EncryptionContext context) { + + Object decryptedValue = encryptedValue; + if (encryptedValue instanceof Binary || encryptedValue instanceof BsonBinary) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Decrypting %s.%s.", getProperty(context).getOwner().getName(), + getProperty(context).getName())); + } + + decryptedValue = encryption.decrypt((BsonBinary) BsonUtils.simpleToBsonValue(encryptedValue)); + + // in case the driver has auto decryption (aka .bypassAutoEncryption(true)) active + // https://github.com/mongodb/mongo-java-driver/blob/master/driver-sync/src/examples/tour/ClientSideEncryptionExplicitEncryptionOnlyTour.java + if (encryptedValue == decryptedValue) { + return decryptedValue; + } + } + + MongoPersistentProperty persistentProperty = getProperty(context); + if (getProperty(context).isCollectionLike() && decryptedValue instanceof Iterable iterable) { + + int size = iterable instanceof Collection c ? c.size() : 10; + + if (!persistentProperty.isEntity()) { + Collection collection = CollectionFactory.createCollection(persistentProperty.getType(), size); + iterable.forEach(it -> { + if (it instanceof BsonValue bsonValue) { + collection.add(BsonUtils.toJavaType(bsonValue)); + } else { + collection.add(context.read(it, persistentProperty.getActualType())); + } + }); + + return collection; + } else { + Collection collection = CollectionFactory.createCollection(persistentProperty.getType(), size); + iterable.forEach(it -> { + if (it instanceof BsonValue bsonValue) { + collection.add(context.read(BsonUtils.toJavaType(bsonValue), persistentProperty.getActualType())); + } else { + collection.add(context.read(it, persistentProperty.getActualType())); + } + }); + return collection; + } + } + + if (!persistentProperty.isEntity() && persistentProperty.isMap()) { + if (persistentProperty.getType() != Document.class) { + if (decryptedValue instanceof BsonValue bsonValue) { + return new LinkedHashMap<>((Document) BsonUtils.toJavaType(bsonValue)); + } + if (decryptedValue instanceof Document document) { + return new LinkedHashMap<>(document); + } + if (decryptedValue instanceof Map map) { + return map; + } + } + } + + if (persistentProperty.isEntity() && decryptedValue instanceof BsonDocument bsonDocument) { + return context.read(BsonUtils.toJavaType(bsonDocument), persistentProperty.getTypeInformation().getType()); + } + + if (persistentProperty.isEntity() && decryptedValue instanceof Document document) { + return context.read(document, persistentProperty.getTypeInformation().getType()); + } + + return decryptedValue; + } + + @Override + public Object encrypt(Object value, EncryptionContext context) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Encrypting %s.%s.", getProperty(context).getOwner().getName(), + getProperty(context).getName())); + } + + MongoPersistentProperty persistentProperty = getProperty(context); + + Encrypted annotation = persistentProperty.findAnnotation(Encrypted.class); + if (annotation == null) { + annotation = persistentProperty.getOwner().findAnnotation(Encrypted.class); + } + + if (annotation == null) { + throw new IllegalStateException(String.format("Property %s.%s is not annotated with @Encrypted", + persistentProperty.getOwner().getName(), persistentProperty.getName())); + } + + String algorithm = annotation.algorithm(); + EncryptionKey key = keyResolver.getKey(context); + OperatorContext operatorContext = context.getOperatorContext(); + + EncryptionOptions encryptionOptions = new EncryptionOptions(algorithm, key, + getEQOptions(persistentProperty, operatorContext)); + + if (operatorContext != null && !operatorContext.isWriteOperation() && encryptionOptions.queryableEncryptionOptions() != null + && !encryptionOptions.queryableEncryptionOptions().getQueryType().equals("equality")) { + return encryptExpression(operatorContext, value, encryptionOptions); + } else { + return encryptValue(value, context, persistentProperty, encryptionOptions); + } + } + + private static @Nullable QueryableEncryptionOptions getEQOptions(MongoPersistentProperty persistentProperty, + @Nullable OperatorContext operatorContext) { + + Queryable queryableAnnotation = persistentProperty.findAnnotation(Queryable.class); + if (queryableAnnotation == null || !StringUtils.hasText(queryableAnnotation.queryType())) { + return null; + } + + QueryableEncryptionOptions queryableEncryptionOptions = QueryableEncryptionOptions.none(); + + String queryAttributes = queryableAnnotation.queryAttributes(); + if (!queryAttributes.isEmpty()) { + queryableEncryptionOptions = queryableEncryptionOptions.attributes(Document.parse(queryAttributes)); + } + + if (queryableAnnotation.contentionFactor() >= 0) { + queryableEncryptionOptions = queryableEncryptionOptions.contentionFactor(queryableAnnotation.contentionFactor()); + } + + boolean isPartOfARangeQuery = operatorContext != null && !operatorContext.isWriteOperation(); + if (isPartOfARangeQuery) { + queryableEncryptionOptions = queryableEncryptionOptions.queryType(queryableAnnotation.queryType()); + } + return queryableEncryptionOptions; + } + + private BsonBinary encryptValue(Object value, EncryptionContext context, MongoPersistentProperty persistentProperty, + EncryptionOptions encryptionOptions) { + + if (!persistentProperty.isEntity()) { + + if (persistentProperty.isCollectionLike()) { + return encryption.encrypt(collectionLikeToBsonValue(value, persistentProperty, context), encryptionOptions); + } + if (persistentProperty.isMap()) { + Object convertedMap = context.write(value); + if (convertedMap instanceof Document document) { + return encryption.encrypt(document.toBsonDocument(), encryptionOptions); + } + } + return encryption.encrypt(BsonUtils.simpleToBsonValue(value), encryptionOptions); + } + + if (persistentProperty.isCollectionLike()) { + return encryption.encrypt(collectionLikeToBsonValue(value, persistentProperty, context), encryptionOptions); + } + + Object write = context.write(value); + if (write instanceof Document doc) { + return encryption.encrypt(doc.toBsonDocument(), encryptionOptions); + } + return encryption.encrypt(BsonUtils.simpleToBsonValue(write), encryptionOptions); + } + + /** + * Encrypts a range query expression. + *

          + * The mongodb-crypt {@code encryptExpression} has strict formatting requirements so this method ensures these + * requirements are met and then picks out and returns just the value for use with a range query. + * + * @param operatorContext field name and query operator. + * @param value the value of the expression to be encrypted. + * @param encryptionOptions the options. + * @return the encrypted range value for use in a range query. + */ + private BsonValue encryptExpression(OperatorContext operatorContext, Object value, + EncryptionOptions encryptionOptions) { + + BsonValue doc = BsonUtils.simpleToBsonValue(value); + + String fieldName = operatorContext.path(); + String queryOperator = operatorContext.operator(); + + if (!RANGE_OPERATORS.contains(queryOperator)) { + throw new AssertionError(String.format("Not a valid range query. Querying a range encrypted field but the " + + "query operator '%s' for field path '%s' is not a range query.", queryOperator, fieldName)); + } + + BsonDocument encryptExpression = new BsonDocument(AND_OPERATOR, + new BsonArray(singletonList(new BsonDocument(fieldName, new BsonDocument(queryOperator, doc))))); + + BsonDocument result = encryption.encryptExpression(encryptExpression, encryptionOptions); + return result.getArray(AND_OPERATOR).get(0).asDocument().getDocument(fieldName).getBinary(queryOperator); + } + + private BsonValue collectionLikeToBsonValue(Object value, MongoPersistentProperty property, + EncryptionContext context) { + + BsonArray bsonArray = new BsonArray(); + boolean isEntity = property.isEntity(); + + if (value instanceof Collection values) { + values.forEach(it -> { + + if (isEntity) { + Document document = (Document) context.write(it, property.getTypeInformation()); + bsonArray.add(document == null ? null : document.toBsonDocument()); + } else { + bsonArray.add(BsonUtils.simpleToBsonValue(it)); + } + }); + } else if (ObjectUtils.isArray(value)) { + + for (Object o : ObjectUtils.toObjectArray(value)) { + + if (isEntity) { + Document document = (Document) context.write(o, property.getTypeInformation()); + bsonArray.add(document == null ? null : document.toBsonDocument()); + } else { + bsonArray.add(BsonUtils.simpleToBsonValue(o)); + } + } + } + + return bsonArray; + } + + @Override + public EncryptionContext buildEncryptionContext(MongoConversionContext context) { + return new ExplicitEncryptionContext(context); + } + + protected MongoPersistentProperty getProperty(EncryptionContext context) { + return context.getProperty(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/package-info.java new file mode 100644 index 0000000000..4a6f78357a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/package-info.java @@ -0,0 +1,7 @@ +/** + * Converters integrating with + * explicit encryption + * mechanism of Client-Side Field Level Encryption. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.convert.encryption; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java new file mode 100644 index 0000000000..a80a72ed1f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonDocument; + +/** + * Component responsible for encrypting and decrypting values. + * + * @param

          plaintext type. + * @param ciphertext type. + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public interface Encryption { + + /** + * Encrypt the given value. + * + * @param value must not be {@literal null}. + * @param options must not be {@literal null}. + * @return the encrypted value. + */ + C encrypt(P value, EncryptionOptions options); + + /** + * Decrypt the given value. + * + * @param value must not be {@literal null}. + * @return the decrypted value. + */ + P decrypt(C value); + + /** + * Encrypt the given expression. + * + * @param value must not be {@literal null}. + * @param options must not be {@literal null}. + * @return the encrypted expression. + * @since 4.5.0 + */ + default BsonDocument encryptExpression(BsonDocument value, EncryptionOptions options) { + throw new UnsupportedOperationException("Unsupported encryption method"); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java new file mode 100644 index 0000000000..5f5e29578d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java @@ -0,0 +1,142 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; + +/** + * Context to encapsulate encryption for a specific {@link MongoPersistentProperty}. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public interface EncryptionContext { + + /** + * Returns the {@link MongoPersistentProperty} to be handled. + * + * @return will never be {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Shortcut for converting a given {@literal value} into its store representation using the root + * {@code ValueConversionContext}. + * + * @param value + * @return + */ + Object convertToMongoType(Object value); + + /** + * Reads the value as an instance of the {@link PersistentProperty#getTypeInformation() property type}. + * + * @param value {@link Object value} to be read; can be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be read as an instance of {@link Class type}. + */ + default T read(@Nullable Object value) { + return (T) read(value, getProperty().getTypeInformation()); + } + + /** + * Reads the value as an instance of {@link Class type}. + * + * @param value {@link Object value} to be read; can be {@literal null}. + * @param target {@link Class type} of value to be read; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be read as an instance of {@link Class type}. + */ + default T read(@Nullable Object value, Class target) { + return read(value, TypeInformation.of(target)); + } + + /** + * Reads the value as an instance of {@link TypeInformation type}. + * + * @param value {@link Object value} to be read; can be {@literal null}. + * @param target {@link TypeInformation type} of value to be read; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be read as an instance of {@link Class type}. + */ + T read(@Nullable Object value, TypeInformation target); + + /** + * Write the value as an instance of the {@link PersistentProperty#getTypeInformation() property type}. + * + * @param value {@link Object value} to write; can be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be written as an instance of the + * {@link PersistentProperty#getTypeInformation() property type}. + * @see PersistentProperty#getTypeInformation() + * @see #write(Object, TypeInformation) + */ + @Nullable + default T write(@Nullable Object value) { + return (T) write(value, getProperty().getTypeInformation()); + } + + /** + * Write the value as an instance of {@link Class type}. + * + * @param value {@link Object value} to write; can be {@literal null}. + * @param target {@link Class type} of value to be written; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be written as an instance of {@link Class type}. + */ + @Nullable + default T write(@Nullable Object value, Class target) { + return write(value, TypeInformation.of(target)); + } + + /** + * Write the value as an instance of given {@link TypeInformation type}. + * + * @param value {@link Object value} to write; can be {@literal null}. + * @param target {@link TypeInformation type} of value to be written; must not be {@literal null}. + * @return can be {@literal null}. + * @throws IllegalStateException if value cannot be written as an instance of {@link Class type}. + */ + @Nullable + T write(@Nullable Object value, TypeInformation target); + + /** + * Lookup the value for a given path within the current context. + * + * @param path the path/property name to resolve the current value for. + * @return can be {@literal null}. + */ + @Nullable + Object lookupValue(String path); + + EvaluationContext getEvaluationContext(Object source); + + /** + * The field name and field query operator + * + * @return can be {@literal null}. + */ + @Nullable + default OperatorContext getOperatorContext() { + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKey.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKey.java new file mode 100644 index 0000000000..d908a5ae26 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKey.java @@ -0,0 +1,81 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonBinary; +import org.springframework.util.Assert; + +/** + * The {@link EncryptionKey} represents a {@literal Data Encryption Key} reference that can be either direct via the + * {@link KeyId key id} or its {@link KeyAltName Key Alternative Name}. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface EncryptionKey { + + /** + * Create a new {@link EncryptionKey} that uses the keys id for reference. + * + * @param key must not be {@literal null}. + * @return new instance of {@link EncryptionKey KeyId}. + */ + static EncryptionKey keyId(BsonBinary key) { + + Assert.notNull(key, "KeyId must not be null"); + + return new KeyId(key); + } + + /** + * Create a new {@link EncryptionKey} that uses an {@literal Key Alternative Name} for reference. + * + * @param keyAltName must not be {@literal null} or empty. + * @return new instance of {@link EncryptionKey KeyAltName}. + */ + static EncryptionKey keyAltName(String keyAltName) { + + Assert.hasText(keyAltName, "Key Alternative Name must not be empty"); + + return new KeyAltName(keyAltName); + } + + /** + * @return the value that allows to reference a specific key. + */ + Object value(); + + /** + * @return the {@link Type} of reference. + */ + Type type(); + + /** + * The key reference type. + */ + enum Type { + + /** + * Key referenced via its {@literal id}. + */ + ID, + + /** + * Key referenced via an {@literal Key Alternative Name}. + */ + ALT + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolver.java new file mode 100644 index 0000000000..a7ae7e3f3e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolver.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonBinary; +import org.bson.types.Binary; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * Interface to obtain a {@link EncryptionKey Data Encryption Key} that is valid in a given {@link EncryptionContext + * context}. + *

          + * Use the {@link #annotated(EncryptionKeyResolver) based} variant which will first try to resolve a potential + * {@link ExplicitEncrypted#keyAltName() Key Alternate Name} from annotations before calling the fallback resolver. + * + * @author Christoph Strobl + * @since 4.1 + * @see EncryptionKey + */ +@FunctionalInterface +public interface EncryptionKeyResolver { + + /** + * Get the {@link EncryptionKey Data Encryption Key}. + * + * @param encryptionContext the current {@link EncryptionContext context}. + * @return never {@literal null}. + */ + EncryptionKey getKey(EncryptionContext encryptionContext); + + /** + * Obtain an {@link EncryptionKeyResolver} that evaluates {@link ExplicitEncrypted#keyAltName()} and only calls the + * fallback {@link EncryptionKeyResolver resolver} if no {@literal Key Alternate Name} is present. + * + * @param fallback must not be {@literal null}. + * @return new instance of {@link EncryptionKeyResolver}. + */ + static EncryptionKeyResolver annotated(EncryptionKeyResolver fallback) { + + Assert.notNull(fallback, "Fallback EncryptionKeyResolver must not be nul"); + + return ((encryptionContext) -> { + + MongoPersistentProperty property = encryptionContext.getProperty(); + ExplicitEncrypted annotation = property.findAnnotation(ExplicitEncrypted.class); + if (annotation == null || !StringUtils.hasText(annotation.keyAltName())) { + + Encrypted encrypted = property.getOwner().findAnnotation(Encrypted.class); + if (encrypted == null) { + return fallback.getKey(encryptionContext); + } + + Object o = EncryptionUtils.resolveKeyId(encrypted.keyId()[0], + () -> encryptionContext.getEvaluationContext(new Object())); + if (o instanceof BsonBinary binary) { + return EncryptionKey.keyId(binary); + } + if (o instanceof Binary binary) { + return EncryptionKey.keyId((BsonBinary) BsonUtils.simpleToBsonValue(binary)); + } + if (o instanceof String string) { + return EncryptionKey.keyAltName(string); + } + + throw new IllegalStateException(String.format("Cannot determine encryption key for %s.%s using key type %s", + property.getOwner().getName(), property.getName(), o == null ? "null" : o.getClass().getName())); + } + + String keyAltName = annotation.keyAltName(); + if (keyAltName.startsWith("/")) { + Object fieldValue = encryptionContext.lookupValue(keyAltName.replace("/", "")); + if (fieldValue == null) { + throw new IllegalStateException(String.format("Key Alternative Name for %s was null", keyAltName)); + } + return new KeyAltName(fieldValue.toString()); + } else { + return new KeyAltName(keyAltName); + } + }); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java new file mode 100644 index 0000000000..73a66e4a8a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java @@ -0,0 +1,235 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import java.util.Map; +import java.util.Objects; + +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Options used to provide additional information when {@link Encryption encrypting} values. like the + * {@link #algorithm()} to be used. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public class EncryptionOptions { + + private final String algorithm; + private final EncryptionKey key; + private final @Nullable QueryableEncryptionOptions queryableEncryptionOptions; + + public EncryptionOptions(String algorithm, EncryptionKey key) { + this(algorithm, key, null); + } + + public EncryptionOptions(String algorithm, EncryptionKey key, + @Nullable QueryableEncryptionOptions queryableEncryptionOptions) { + + Assert.hasText(algorithm, "Algorithm must not be empty"); + Assert.notNull(key, "EncryptionKey must not be empty"); + Assert.notNull(key, "QueryableEncryptionOptions must not be empty"); + + this.key = key; + this.algorithm = algorithm; + this.queryableEncryptionOptions = queryableEncryptionOptions; + } + + public EncryptionKey key() { + return key; + } + + public String algorithm() { + return algorithm; + } + + /** + * @return {@literal null} if not set. + * @since 4.5 + */ + public @Nullable QueryableEncryptionOptions queryableEncryptionOptions() { + return queryableEncryptionOptions; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + EncryptionOptions that = (EncryptionOptions) o; + + if (!ObjectUtils.nullSafeEquals(algorithm, that.algorithm)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(key, that.key)) { + return false; + } + + return ObjectUtils.nullSafeEquals(queryableEncryptionOptions, that.queryableEncryptionOptions); + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(algorithm); + result = 31 * result + ObjectUtils.nullSafeHashCode(key); + result = 31 * result + ObjectUtils.nullSafeHashCode(queryableEncryptionOptions); + return result; + } + + @Override + public String toString() { + return "EncryptionOptions{" + "algorithm='" + algorithm + '\'' + ", key=" + key + ", queryableEncryptionOptions='" + + queryableEncryptionOptions + "'}"; + } + + /** + * Options, like the {@link #getQueryType()}, to apply when encrypting queryable values. + * + * @author Ross Lawley + * @author Christoph Strobl + * @since 4.5 + */ + public static class QueryableEncryptionOptions { + + private static final QueryableEncryptionOptions NONE = new QueryableEncryptionOptions(null, null, Map.of()); + + private final @Nullable String queryType; + private final @Nullable Long contentionFactor; + private final Map attributes; + + private QueryableEncryptionOptions(@Nullable String queryType, @Nullable Long contentionFactor, + Map attributes) { + + this.queryType = queryType; + this.contentionFactor = contentionFactor; + this.attributes = attributes; + } + + /** + * Create an empty {@link QueryableEncryptionOptions}. + * + * @return unmodifiable {@link QueryableEncryptionOptions} instance. + */ + public static QueryableEncryptionOptions none() { + return NONE; + } + + /** + * Define the {@code queryType} to be used for queryable document encryption. + * + * @param queryType can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions queryType(@Nullable String queryType) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Define the {@code contentionFactor} to be used for queryable document encryption. + * + * @param contentionFactor can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions contentionFactor(@Nullable Long contentionFactor) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Define the {@code rangeOptions} to be used for queryable document encryption. + * + * @param attributes can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions attributes(Map attributes) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Get the {@code queryType} to apply. + * + * @return {@literal null} if not set. + */ + public @Nullable String getQueryType() { + return queryType; + } + + /** + * Get the {@code contentionFactor} to apply. + * + * @return {@literal null} if not set. + */ + public @Nullable Long getContentionFactor() { + return contentionFactor; + } + + /** + * Get the {@code rangeOptions} to apply. + * + * @return never {@literal null}. + */ + public Map getAttributes() { + return Map.copyOf(attributes); + } + + /** + * @return {@literal true} if no arguments set. + */ + boolean isEmpty() { + return getQueryType() == null && getContentionFactor() == null && getAttributes().isEmpty(); + } + + @Override + public String toString() { + return "QueryableEncryptionOptions{" + "queryType='" + queryType + '\'' + ", contentionFactor=" + contentionFactor + + ", attributes=" + attributes + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + QueryableEncryptionOptions that = (QueryableEncryptionOptions) o; + + if (!ObjectUtils.nullSafeEquals(queryType, that.queryType)) { + return false; + } + + if (!ObjectUtils.nullSafeEquals(contentionFactor, that.contentionFactor)) { + return false; + } + return ObjectUtils.nullSafeEquals(attributes, that.attributes); + } + + @Override + public int hashCode() { + return Objects.hash(queryType, contentionFactor, attributes); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyAltName.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyAltName.java new file mode 100644 index 0000000000..3f6afc4a8c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyAltName.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.springframework.util.ObjectUtils; + +record KeyAltName(String value) implements EncryptionKey { + + @Override + public Type type() { + return Type.ALT; + } + + @Override + public String toString() { + + if (value().length() <= 3) { + return "KeyAltName('***')"; + } + return String.format("KeyAltName('%s***')", value.substring(0, 3)); + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + KeyAltName that = (KeyAltName) o; + return ObjectUtils.nullSafeEquals(value, that.value); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyId.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyId.java new file mode 100644 index 0000000000..b09a67bb0a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/KeyId.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.springframework.util.ObjectUtils; + +record KeyId(BsonBinary value) implements EncryptionKey { + + @Override + public Type type() { + return Type.ID; + } + + @Override + public String toString() { + + if (BsonBinarySubType.isUuid(value.getType())) { + String representation = value.asUuid().toString(); + if (representation.length() > 6) { + return String.format("KeyId('%s***')", representation.substring(0, 6)); + } + } + return "KeyId('***')"; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + org.springframework.data.mongodb.core.encryption.KeyId that = (org.springframework.data.mongodb.core.encryption.KeyId) o; + return ObjectUtils.nullSafeEquals(value, that.value); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java new file mode 100644 index 0000000000..f83f98d4ac --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java @@ -0,0 +1,146 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.springframework.data.mongodb.util.MongoCompatibilityAdapter.rangeOptionsAdapter; + +import java.util.Map; +import java.util.function.Supplier; + +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonValue; +import org.springframework.data.mongodb.core.encryption.EncryptionKey.Type; +import org.springframework.data.mongodb.core.encryption.EncryptionOptions.QueryableEncryptionOptions; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.Assert; + +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.client.vault.ClientEncryption; + +/** + * {@link ClientEncryption} based {@link Encryption} implementation. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + */ +public class MongoClientEncryption implements Encryption { + + private final Supplier source; + + MongoClientEncryption(Supplier source) { + this.source = source; + } + + /** + * Create a new {@link MongoClientEncryption} instance for the given {@link ClientEncryption}. + * + * @param clientEncryption must not be {@literal null}. + * @return new instance of {@link MongoClientEncryption}. + */ + public static MongoClientEncryption just(ClientEncryption clientEncryption) { + + Assert.notNull(clientEncryption, "ClientEncryption must not be null"); + + return new MongoClientEncryption(() -> clientEncryption); + } + + @Override + public BsonValue decrypt(BsonBinary value) { + return getClientEncryption().decrypt(value); + } + + @Override + public BsonBinary encrypt(BsonValue value, EncryptionOptions options) { + return getClientEncryption().encrypt(value, createEncryptOptions(options)); + } + + @Override + public BsonDocument encryptExpression(BsonDocument value, EncryptionOptions options) { + return getClientEncryption().encryptExpression(value, createEncryptOptions(options)); + } + + public ClientEncryption getClientEncryption() { + return source.get(); + } + + private EncryptOptions createEncryptOptions(EncryptionOptions options) { + + EncryptOptions encryptOptions = new EncryptOptions(options.algorithm()); + + if (Type.ALT.equals(options.key().type())) { + encryptOptions = encryptOptions.keyAltName(options.key().value().toString()); + } else { + encryptOptions = encryptOptions.keyId((BsonBinary) options.key().value()); + } + + if (options.queryableEncryptionOptions() == null) { + return encryptOptions; + } + + QueryableEncryptionOptions qeOptions = options.queryableEncryptionOptions(); + if (qeOptions.getQueryType() != null) { + encryptOptions.queryType(qeOptions.getQueryType()); + } + if (qeOptions.getContentionFactor() != null) { + encryptOptions.contentionFactor(qeOptions.getContentionFactor()); + } + if (!qeOptions.getAttributes().isEmpty()) { + encryptOptions.rangeOptions(rangeOptions(qeOptions.getAttributes())); + } + return encryptOptions; + } + + protected RangeOptions rangeOptions(Map attributes) { + + RangeOptions encryptionRangeOptions = new RangeOptions(); + if (attributes.isEmpty()) { + return encryptionRangeOptions; + } + + if (attributes.containsKey("min")) { + encryptionRangeOptions.min(BsonUtils.simpleToBsonValue(attributes.get("min"))); + } + if (attributes.containsKey("max")) { + encryptionRangeOptions.max(BsonUtils.simpleToBsonValue(attributes.get("max"))); + } + if (attributes.containsKey("trimFactor")) { + Object trimFactor = attributes.get("trimFactor"); + Assert.isInstanceOf(Integer.class, trimFactor, () -> String + .format("Expected to find a %s but it turned out to be %s.", Integer.class, trimFactor.getClass())); + + rangeOptionsAdapter(encryptionRangeOptions).trimFactor((Integer) trimFactor); + } + + if (attributes.containsKey("sparsity")) { + Object sparsity = attributes.get("sparsity"); + Assert.isInstanceOf(Number.class, sparsity, + () -> String.format("Expected to find a %s but it turned out to be %s.", Long.class, sparsity.getClass())); + encryptionRangeOptions.sparsity(((Number) sparsity).longValue()); + } + + if (attributes.containsKey("precision")) { + Object precision = attributes.get("precision"); + Assert.isInstanceOf(Number.class, precision, () -> String + .format("Expected to find a %s but it turned out to be %s.", Integer.class, precision.getClass())); + encryptionRangeOptions.precision(((Number) precision).intValue()); + } + return encryptionRangeOptions; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/package-info.java new file mode 100644 index 0000000000..f3906d89dd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/package-info.java @@ -0,0 +1,6 @@ +/** + * Infrastructure for explicit + * encryption mechanism of Client-Side Field Level Encryption. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.core.encryption; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java index d7e96af6d1..88d3d46c48 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.geo; /** - * Interface definition for structures defined in GeoJSON ({@link http://geojson.org/}) format. + * Interface definition for structures defined in GeoJSON format. * * @author Christoph Strobl * @since 1.7 @@ -27,7 +27,7 @@ public interface GeoJson> { * String value representing the type of the {@link GeoJson} object. * * @return will never be {@literal null}. - * @see http://geojson.org/geojson-spec.html#geojson-objects + * @see https://geojson.org/geojson-spec.html#geojson-objects */ String getType(); @@ -36,7 +36,7 @@ public interface GeoJson> { * determined by {@link #getType()} of geometry. * * @return will never be {@literal null}. - * @see http://geojson.org/geojson-spec.html#geometry-objects + * @see https://geojson.org/geojson-spec.html#geometry-objects */ T getCoordinates(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java index bf2719a632..2372700aec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonGeometryCollection.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import java.util.Collections; import java.util.List; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -27,7 +28,7 @@ * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#geometry-collection + * @see https://geojson.org/geojson-spec.html#geometry-collection */ public class GeoJsonGeometryCollection implements GeoJson>> { @@ -38,59 +39,41 @@ public class GeoJsonGeometryCollection implements GeoJson>> /** * Creates a new {@link GeoJsonGeometryCollection} for the given {@link GeoJson} instances. * - * @param geometries + * @param geometries must not be {@literal null}. */ public GeoJsonGeometryCollection(List> geometries) { - Assert.notNull(geometries, "Geometries must not be null!"); + Assert.notNull(geometries, "Geometries must not be null"); this.geometries.addAll(geometries); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public Iterable> getCoordinates() { return Collections.unmodifiableList(this.geometries); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.geometries); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonGeometryCollection)) { + if (!(obj instanceof GeoJsonGeometryCollection other)) { return false; } - GeoJsonGeometryCollection other = (GeoJsonGeometryCollection) obj; - return ObjectUtils.nullSafeEquals(this.geometries, other.geometries); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java index da80222a60..942138fc76 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonLineString.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,7 @@ * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#linestring + * @see https://geojson.org/geojson-spec.html#linestring */ public class GeoJsonLineString extends GeoJsonMultiPoint { @@ -50,10 +50,6 @@ public GeoJsonLineString(Point first, Point second, Point... others) { super(first, second, others); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonMultiPoint#getType() - */ @Override public String getType() { return TYPE; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java index 2c15ec8c05..bc74a56df3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonModule.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,7 @@ import org.springframework.lang.Nullable; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.Version; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; @@ -34,7 +34,10 @@ import com.fasterxml.jackson.databind.node.ArrayNode; /** - * A Jackson {@link Module} to register custom {@link JsonSerializer} and {@link JsonDeserializer}s for GeoJSON types. + * A Jackson {@link Module} to register custom {@link JsonDeserializer}s for GeoJSON types. + *
          + * Use {@link #geoJsonModule()} to obtain a {@link Module} containing both {@link JsonSerializer serializers} and + * {@link JsonDeserializer deserializers}. * * @author Christoph Strobl * @author Oliver Gierke @@ -47,12 +50,87 @@ public class GeoJsonModule extends SimpleModule { public GeoJsonModule() { - addDeserializer(GeoJsonPoint.class, new GeoJsonPointDeserializer()); - addDeserializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointDeserializer()); - addDeserializer(GeoJsonLineString.class, new GeoJsonLineStringDeserializer()); - addDeserializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringDeserializer()); - addDeserializer(GeoJsonPolygon.class, new GeoJsonPolygonDeserializer()); - addDeserializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonDeserializer()); + registerDeserializersIn(this); + // TODO: add serializers as of next major version (4.0). + } + + /** + * Obtain a {@link Module} containing {@link JsonDeserializer deserializers} for the following {@link GeoJson} types: + *

            + *
          • {@link GeoJsonPoint}
          • + *
          • {@link GeoJsonMultiPoint}
          • + *
          • {@link GeoJsonLineString}
          • + *
          • {@link GeoJsonMultiLineString}
          • + *
          • {@link GeoJsonPolygon}
          • + *
          • {@link GeoJsonMultiPolygon}
          • + *
          + * + * @return a {@link Module} containing {@link JsonDeserializer deserializers} for {@link GeoJson} types. + * @since 3.2 + */ + public static Module deserializers() { + + SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson - Deserializers", + new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson")); + registerDeserializersIn(module); + return module; + } + + /** + * Obtain a {@link Module} containing {@link JsonSerializer serializers} for the following {@link GeoJson} types: + *
            + *
          • {@link GeoJsonPoint}
          • + *
          • {@link GeoJsonMultiPoint}
          • + *
          • {@link GeoJsonLineString}
          • + *
          • {@link GeoJsonMultiLineString}
          • + *
          • {@link GeoJsonPolygon}
          • + *
          • {@link GeoJsonMultiPolygon}
          • + *
          + * + * @return a {@link Module} containing {@link JsonSerializer serializers} for {@link GeoJson} types. + * @since 3.2 + */ + public static Module serializers() { + + SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson - Serializers", + new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson")); + GeoJsonSerializersModule.registerSerializersIn(module); + return module; + } + + /** + * Obtain a {@link Module} containing {@link JsonSerializer serializers} and {@link JsonDeserializer deserializers} + * for the following {@link GeoJson} types: + *
            + *
          • {@link GeoJsonPoint}
          • + *
          • {@link GeoJsonMultiPoint}
          • + *
          • {@link GeoJsonLineString}
          • + *
          • {@link GeoJsonMultiLineString}
          • + *
          • {@link GeoJsonPolygon}
          • + *
          • {@link GeoJsonMultiPolygon}
          • + *
          + * + * @return a {@link Module} containing {@link JsonSerializer serializers} and {@link JsonDeserializer deserializers} + * for {@link GeoJson} types. + * @since 3.2 + */ + public static Module geoJsonModule() { + + SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson", + new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson")); + GeoJsonSerializersModule.registerSerializersIn(module); + registerDeserializersIn(module); + return module; + } + + private static void registerDeserializersIn(SimpleModule module) { + + module.addDeserializer(GeoJsonPoint.class, new GeoJsonPointDeserializer()); + module.addDeserializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointDeserializer()); + module.addDeserializer(GeoJsonLineString.class, new GeoJsonLineStringDeserializer()); + module.addDeserializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringDeserializer()); + module.addDeserializer(GeoJsonPolygon.class, new GeoJsonPolygonDeserializer()); + module.addDeserializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonDeserializer()); } /** @@ -61,14 +139,9 @@ public GeoJsonModule() { */ private static abstract class GeoJsonDeserializer> extends JsonDeserializer { - /* - * (non-Javadoc) - * @see com.fasterxml.jackson.databind.JsonDeserializer#deserialize(com.fasterxml.jackson.core.JsonParser, com.fasterxml.jackson.databind.DeserializationContext) - */ @Nullable @Override - public T deserialize(@Nullable JsonParser jp, @Nullable DeserializationContext ctxt) - throws IOException, JsonProcessingException { + public T deserialize(@Nullable JsonParser jp, @Nullable DeserializationContext ctxt) throws IOException { JsonNode node = jp.readValueAsTree(); JsonNode coordinates = node.get("coordinates"); @@ -134,7 +207,7 @@ protected List toPoints(@Nullable ArrayNode node) { return Collections.emptyList(); } - List points = new ArrayList(node.size()); + List points = new ArrayList<>(node.size()); for (JsonNode coordinatePair : node) { if (coordinatePair.isArray()) { @@ -145,7 +218,7 @@ protected List toPoints(@Nullable ArrayNode node) { } protected GeoJsonLineString toLineString(ArrayNode node) { - return new GeoJsonLineString(toPoints((ArrayNode) node)); + return new GeoJsonLineString(toPoints(node)); } } @@ -163,10 +236,6 @@ protected GeoJsonLineString toLineString(ArrayNode node) { */ private static class GeoJsonPointDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Nullable @Override protected GeoJsonPoint doDeserialize(ArrayNode coordinates) { @@ -193,10 +262,6 @@ protected GeoJsonPoint doDeserialize(ArrayNode coordinates) { */ private static class GeoJsonLineStringDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonLineString doDeserialize(ArrayNode coordinates) { return new GeoJsonLineString(toPoints(coordinates)); @@ -222,10 +287,6 @@ protected GeoJsonLineString doDeserialize(ArrayNode coordinates) { */ private static class GeoJsonMultiPointDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonMultiPoint doDeserialize(ArrayNode coordinates) { return new GeoJsonMultiPoint(toPoints(coordinates)); @@ -252,14 +313,10 @@ protected GeoJsonMultiPoint doDeserialize(ArrayNode coordinates) { */ private static class GeoJsonMultiLineStringDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonMultiLineString doDeserialize(ArrayNode coordinates) { - List lines = new ArrayList(coordinates.size()); + List lines = new ArrayList<>(coordinates.size()); for (JsonNode lineString : coordinates) { if (lineString.isArray()) { @@ -290,10 +347,6 @@ protected GeoJsonMultiLineString doDeserialize(ArrayNode coordinates) { */ private static class GeoJsonPolygonDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Nullable @Override protected GeoJsonPolygon doDeserialize(ArrayNode coordinates) { @@ -329,14 +382,10 @@ protected GeoJsonPolygon doDeserialize(ArrayNode coordinates) { */ private static class GeoJsonMultiPolygonDeserializer extends GeoJsonDeserializer { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJsonModule.GeoJsonDeserializer#doDeserialize(com.fasterxml.jackson.databind.node.ArrayNode) - */ @Override protected GeoJsonMultiPolygon doDeserialize(ArrayNode coordinates) { - List polygones = new ArrayList(coordinates.size()); + List polygones = new ArrayList<>(coordinates.size()); for (JsonNode polygon : coordinates) { for (JsonNode ring : polygon) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java index 73c5cb37ea..8dafe9ea00 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiLineString.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,6 +20,7 @@ import java.util.List; import org.springframework.data.geo.Point; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -28,7 +29,7 @@ * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#multilinestring + * @see https://geojson.org/geojson-spec.html#multilinestring */ public class GeoJsonMultiLineString implements GeoJson> { @@ -43,7 +44,7 @@ public class GeoJsonMultiLineString implements GeoJson... lines) { - Assert.notEmpty(lines, "Points for MultiLineString must not be null!"); + Assert.notEmpty(lines, "Points for MultiLineString must not be null"); for (List line : lines) { this.coordinates.add(new GeoJsonLineString(line)); @@ -57,53 +58,37 @@ public GeoJsonMultiLineString(List... lines) { */ public GeoJsonMultiLineString(List lines) { - Assert.notNull(lines, "Lines for MultiLineString must not be null!"); + Assert.notNull(lines, "Lines for MultiLineString must not be null"); this.coordinates.addAll(lines); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public Iterable getCoordinates() { return Collections.unmodifiableList(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonMultiLineString)) { + if (!(obj instanceof GeoJsonMultiLineString other)) { return false; } - return ObjectUtils.nullSafeEquals(this.coordinates, ((GeoJsonMultiLineString) obj).coordinates); + return ObjectUtils.nullSafeEquals(this.coordinates, other.coordinates); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java index c9fa82abf9..bcb4c3e79e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ import java.util.List; import org.springframework.data.geo.Point; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -28,8 +29,9 @@ * {@link GeoJsonMultiPoint} is defined as list of {@link Point}s. * * @author Christoph Strobl + * @author Ivan Volzhev * @since 1.7 - * @see http://geojson.org/geojson-spec.html#multipoint + * @see https://geojson.org/geojson-spec.html#multipoint */ public class GeoJsonMultiPoint implements GeoJson> { @@ -37,17 +39,31 @@ public class GeoJsonMultiPoint implements GeoJson> { private final List points; + /** + * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. + * + * @param point must not be {@literal null}. + * @since 3.2.5 + */ + public GeoJsonMultiPoint(Point point) { + + Assert.notNull(point, "Point must not be null"); + + this.points = new ArrayList<>(); + this.points.add(point); + } + /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. * - * @param points points must not be {@literal null} and have at least 2 entries. + * @param points points must not be {@literal null} and not empty */ public GeoJsonMultiPoint(List points) { - Assert.notNull(points, "Points must not be null."); - Assert.isTrue(points.size() >= 2, "Minimum of 2 Points required."); + Assert.notNull(points, "Points must not be null"); + Assert.notEmpty(points, "Points must contain at least one point"); - this.points = new ArrayList(points); + this.points = new ArrayList<>(points); } /** @@ -59,58 +75,42 @@ public GeoJsonMultiPoint(List points) { */ public GeoJsonMultiPoint(Point first, Point second, Point... others) { - Assert.notNull(first, "First point must not be null!"); - Assert.notNull(second, "Second point must not be null!"); - Assert.notNull(others, "Additional points must not be null!"); + Assert.notNull(first, "First point must not be null"); + Assert.notNull(second, "Second point must not be null"); + Assert.notNull(others, "Additional points must not be null"); - this.points = new ArrayList(); + this.points = new ArrayList<>(); this.points.add(first); this.points.add(second); this.points.addAll(Arrays.asList(others)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public List getCoordinates() { return Collections.unmodifiableList(this.points); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.points); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonMultiPoint)) { + if (!(obj instanceof GeoJsonMultiPoint other)) { return false; } - return ObjectUtils.nullSafeEquals(this.points, ((GeoJsonMultiPoint) obj).points); + return ObjectUtils.nullSafeEquals(this.points, other.points); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java index 3d3e94a0ba..12b9de9da4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPolygon.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import java.util.Collections; import java.util.List; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -41,53 +42,37 @@ public class GeoJsonMultiPolygon implements GeoJson> { */ public GeoJsonMultiPolygon(List polygons) { - Assert.notNull(polygons, "Polygons for MultiPolygon must not be null!"); + Assert.notNull(polygons, "Polygons for MultiPolygon must not be null"); this.coordinates.addAll(polygons); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public List getCoordinates() { return Collections.unmodifiableList(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(this.coordinates); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoJsonMultiPolygon)) { + if (!(obj instanceof GeoJsonMultiPolygon other)) { return false; } - return ObjectUtils.nullSafeEquals(this.coordinates, ((GeoJsonMultiPolygon) obj).coordinates); + return ObjectUtils.nullSafeEquals(this.coordinates, other.coordinates); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java index f8416ce71d..7bd4e1203c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPoint.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,11 +21,12 @@ import org.springframework.data.geo.Point; /** - * {@link GeoJson} representation of {@link Point}. + * {@link GeoJson} representation of {@link Point}. Uses {@link Point#getX()} as {@literal longitude} and + * {@link Point#getY()} as {@literal latitude}. * * @author Christoph Strobl * @since 1.7 - * @see http://geojson.org/geojson-spec.html#point + * @see https://geojson.org/geojson-spec.html#point */ public class GeoJsonPoint extends Point implements GeoJson> { @@ -36,8 +37,8 @@ public class GeoJsonPoint extends Point implements GeoJson> { /** * Creates {@link GeoJsonPoint} for given coordinates. * - * @param x - * @param y + * @param x longitude between {@literal -180} and {@literal 180} (inclusive). + * @param y latitude between {@literal -90} and {@literal 90} (inclusive). */ public GeoJsonPoint(double x, double y) { super(x, y); @@ -45,6 +46,8 @@ public GeoJsonPoint(double x, double y) { /** * Creates {@link GeoJsonPoint} for given {@link Point}. + *

          + * {@link Point#getX()} translates to {@literal longitude}, {@link Point#getY()} to {@literal latitude}. * * @param point must not be {@literal null}. */ @@ -52,21 +55,18 @@ public GeoJsonPoint(Point point) { super(point); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() + /** + * Obtain the coordinates (x/longitude, y/latitude) array. + * + * @return the coordinates putting {@link #getX() x/longitude} first, and {@link #getY() y/latitude} second. */ @Override public List getCoordinates() { - return Arrays.asList(Double.valueOf(getX()), Double.valueOf(getY())); + return Arrays.asList(getX(), getY()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java index f96359d834..166a10df08 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonPolygon.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,7 +23,9 @@ import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * {@link GeoJson} representation of {@link Polygon}. Unlike {@link Polygon} the {@link GeoJsonPolygon} requires a @@ -32,7 +34,7 @@ * @author Christoph Strobl * @author Mark Paluch * @since 1.7 - * @see http://geojson.org/geojson-spec.html#polygon + * @see https://geojson.org/geojson-spec.html#polygon */ public class GeoJsonPolygon extends Polygon implements GeoJson> { @@ -99,7 +101,7 @@ public GeoJsonPolygon withInnerRing(List points) { */ public GeoJsonPolygon withInnerRing(GeoJsonLineString lineString) { - Assert.notNull(lineString, "LineString must not be null!"); + Assert.notNull(lineString, "LineString must not be null"); Iterator it = this.coordinates.iterator(); GeoJsonPolygon polygon = new GeoJsonPolygon(it.next().getCoordinates()); @@ -112,19 +114,11 @@ public GeoJsonPolygon withInnerRing(GeoJsonLineString lineString) { return polygon; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() - */ @Override public String getType() { return TYPE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates() - */ @Override public List getCoordinates() { return Collections.unmodifiableList(this.coordinates); @@ -142,4 +136,28 @@ private static List asList(Point first, Point second, Point third, Point return result; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + GeoJsonPolygon that = (GeoJsonPolygon) o; + + return ObjectUtils.nullSafeEquals(this.coordinates, that.coordinates); + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + ObjectUtils.nullSafeHashCode(coordinates); + return result; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersModule.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersModule.java new file mode 100644 index 0000000000..5b80720da9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersModule.java @@ -0,0 +1,309 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.geo; + +import java.io.IOException; + +import org.springframework.data.geo.Point; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.Module; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.module.SimpleModule; + +/** + * A Jackson {@link Module} to register custom {@link JsonSerializer}s for GeoJSON types. + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ +class GeoJsonSerializersModule extends SimpleModule { + + private static final long serialVersionUID = 1340494654898895610L; + + GeoJsonSerializersModule() { + registerSerializersIn(this); + } + + + static void registerSerializersIn(SimpleModule module) { + + module.addSerializer(GeoJsonPoint.class, new GeoJsonPointSerializer()); + module.addSerializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointSerializer()); + module.addSerializer(GeoJsonLineString.class, new GeoJsonLineStringSerializer()); + module.addSerializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringSerializer()); + module.addSerializer(GeoJsonPolygon.class, new GeoJsonPolygonSerializer()); + module.addSerializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonSerializer()); + } + + /** + * @param + * @author Christoph Strobl + */ + private static abstract class GeoJsonSerializer> extends JsonSerializer { + + @Override + public void serialize(T shape, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException { + + jsonGenerator.writeStartObject(); + jsonGenerator.writeStringField("type", shape.getType()); + jsonGenerator.writeArrayFieldStart("coordinates"); + + doSerialize(shape, jsonGenerator); + + jsonGenerator.writeEndArray(); + jsonGenerator.writeEndObject(); + } + + /** + * Perform the actual serialization given the {@literal shape} as {@link GeoJson}. + * + * @param shape + * @param jsonGenerator + * @return + */ + protected abstract void doSerialize(T shape, JsonGenerator jsonGenerator) throws IOException; + + /** + * Write a {@link Point} as array.
          + * {@code [10.0, 20.0]} + * + * @param point + * @param jsonGenerator + * @throws IOException + */ + protected void writePoint(Point point, JsonGenerator jsonGenerator) throws IOException { + + jsonGenerator.writeStartArray(); + writeRawCoordinates(point, jsonGenerator); + jsonGenerator.writeEndArray(); + } + + /** + * Write the {@link Point} coordinates.
          + * {@code 10.0, 20.0} + * + * @param point + * @param jsonGenerator + * @throws IOException + */ + protected void writeRawCoordinates(Point point, JsonGenerator jsonGenerator) throws IOException { + + jsonGenerator.writeNumber(point.getX()); + jsonGenerator.writeNumber(point.getY()); + } + + /** + * Write an {@link Iterable} of {@link Point} as array.
          + * {@code [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]} + * + * @param points + * @param jsonGenerator + * @throws IOException + */ + protected void writeLine(Iterable points, JsonGenerator jsonGenerator) throws IOException { + + jsonGenerator.writeStartArray(); + writeRawLine(points, jsonGenerator); + jsonGenerator.writeEndArray(); + } + + /** + * Write an {@link Iterable} of {@link Point}.
          + * {@code [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]} + * + * @param points + * @param jsonGenerator + * @throws IOException + */ + protected void writeRawLine(Iterable points, JsonGenerator jsonGenerator) throws IOException { + + for (Point point : points) { + writePoint(point, jsonGenerator); + } + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonPoint} to: + * + *

          +	 * 
          +	 * { "type": "Point", "coordinates": [10.0, 20.0] }
          +	 * 
          +	 * 
          + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonPointSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonPoint value, JsonGenerator jsonGenerator) throws IOException { + writeRawCoordinates(value, jsonGenerator); + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonLineString} to: + * + *
          +	 * 
          +	 * {
          +	 *   "type": "LineString",
          +	 *   "coordinates": [
          +	 *     [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
          +	 *   ]
          +	 * }
          +	 * 
          +	 * 
          + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonLineStringSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonLineString value, JsonGenerator jsonGenerator) throws IOException { + writeRawLine(value.getCoordinates(), jsonGenerator); + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonMultiPoint} to: + * + *
          +	 * 
          +	 * {
          +	 *   "type": "MultiPoint",
          +	 *   "coordinates": [
          +	 *     [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
          +	 *   ]
          +	 * }
          +	 * 
          +	 * 
          + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonMultiPointSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonMultiPoint value, JsonGenerator jsonGenerator) throws IOException { + writeRawLine(value.getCoordinates(), jsonGenerator); + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonMultiLineString} to: + * + *
          +	 * 
          +	 * {
          +	 *   "type": "MultiLineString",
          +	 *   "coordinates": [
          +	 *     [ [10.0, 20.0], [30.0, 40.0] ],
          +	 *     [ [50.0, 60.0] , [70.0, 80.0] ]
          +	 *   ]
          +	 * }
          +	 * 
          +	 * 
          + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonMultiLineStringSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonMultiLineString value, JsonGenerator jsonGenerator) throws IOException { + + for (GeoJsonLineString lineString : value.getCoordinates()) { + writeLine(lineString.getCoordinates(), jsonGenerator); + } + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonPolygon} to: + * + *
          +	 * 
          +	 * {
          +	 *   "type": "Polygon",
          +	 *   "coordinates": [
          +	 *     [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
          +	 *   ]
          +	 * }
          +	 * 
          +	 * 
          + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonPolygonSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonPolygon value, JsonGenerator jsonGenerator) throws IOException { + + for (GeoJsonLineString lineString : value.getCoordinates()) { + writeLine(lineString.getCoordinates(), jsonGenerator); + } + } + } + + /** + * {@link JsonSerializer} converting {@link GeoJsonMultiPolygon} to: + * + *
          +	 * 
          +	 * {
          +	 *   "type": "MultiPolygon",
          +	 *   "coordinates": [
          +	 *     [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
          +	 *     [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
          +	 *     [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]
          +	 *   ]
          +	 * }
          +	 * 
          +	 * 
          + * + * @author Bjorn Harvold + * @author Christoph Strobl + * @since 3.2 + */ + static class GeoJsonMultiPolygonSerializer extends GeoJsonSerializer { + + @Override + protected void doSerialize(GeoJsonMultiPolygon value, JsonGenerator jsonGenerator) throws IOException { + + for (GeoJsonPolygon polygon : value.getCoordinates()) { + + jsonGenerator.writeStartArray(); + for (GeoJsonLineString lineString : polygon.getCoordinates()) { + writeLine(lineString.getCoordinates(), jsonGenerator); + } + jsonGenerator.writeEndArray(); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java index 2f1f6d18f4..a482c136e7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/Sphere.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,11 +18,12 @@ import java.util.Arrays; import java.util.List; -import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.annotation.PersistenceCreator; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.geo.Shape; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -44,12 +45,12 @@ public class Sphere implements Shape { * @param center must not be {@literal null}. * @param radius must not be {@literal null}. */ - @PersistenceConstructor + @PersistenceCreator public Sphere(Point center, Distance radius) { - Assert.notNull(center, "Center point must not be null!"); - Assert.notNull(radius, "Radius must not be null!"); - Assert.isTrue(radius.getValue() >= 0, "Radius must not be negative!"); + Assert.notNull(center, "Center point must not be null"); + Assert.notNull(radius, "Radius must not be null"); + Assert.isTrue(radius.getValue() >= 0, "Radius must not be negative"); this.center = center; this.radius = radius; @@ -58,7 +59,7 @@ public Sphere(Point center, Distance radius) { /** * Creates a Sphere around the given center {@link Point} with the given radius. * - * @param center + * @param center must not be {@literal null}. * @param radius */ public Sphere(Point center, double radius) { @@ -68,7 +69,7 @@ public Sphere(Point center, double radius) { /** * Creates a Sphere from the given {@link Circle}. * - * @param circle + * @param circle must not be {@literal null}. */ public Sphere(Circle circle) { this(circle.getCenter(), circle.getRadius()); @@ -86,44 +87,31 @@ public Point getCenter() { /** * Returns the radius of the {@link Circle}. * - * @return + * @return never {@literal null}. */ public Distance getRadius() { return radius; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("Sphere [center=%s, radius=%s]", center, radius); } - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (obj == null || !(obj instanceof Sphere)) { + if (!(obj instanceof Sphere other)) { return false; } - Sphere that = (Sphere) obj; - - return this.center.equals(that.center) && this.radius.equals(that.radius); + return this.center.equals(other.center) && this.radius.equals(other.radius); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { int result = 17; @@ -136,7 +124,7 @@ public int hashCode() { * Returns the {@link Shape} as a list of usually {@link Double} or {@link List}s of {@link Double}s. Wildcard bound * to allow implementations to return a more concrete element type. * - * @return + * @return never {@literal null}. */ public List asList() { return Arrays.asList(Arrays.asList(center.getX(), center.getY()), this.radius.getValue()); @@ -145,7 +133,7 @@ public List asList() { /** * Returns the command to be used to create the {@literal $within} criterion. * - * @return + * @return never {@literal null}. */ public String getCommand() { return COMMAND; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java index c2866457e2..05cf13f66b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,102 +17,127 @@ import java.lang.annotation.Documented; import java.lang.annotation.ElementType; +import java.lang.annotation.Repeatable; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.annotation.Collation; +import org.springframework.data.mongodb.core.mapping.Document; + /** - * Mark a class to use compound indexes. + * Mark a class to use compound indexes.
          + *

          + * NOTE: This annotation is repeatable according to Java 8 conventions using {@link CompoundIndexes#value()} as + * container. + * + *

          + * @Document
          + * @CompoundIndex(def = "{'firstname': 1, 'lastname': 1}")
          + * @CompoundIndex(def = "{'address.city': 1, 'address.street': 1}")
          + * class Person {
          + * 	String firstname;
          + * 	String lastname;
          + *
          + * 	Address address;
          + * }
          + * 
          * * @author Jon Brisbin * @author Oliver Gierke * @author Philipp Schneider * @author Johno Crawford * @author Christoph Strobl + * @author Dave Perryman + * @author Stefan Tirea */ +@Collation @Target({ ElementType.TYPE }) @Documented +@Repeatable(CompoundIndexes.class) @Retention(RetentionPolicy.RUNTIME) public @interface CompoundIndex { /** - * The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values - * define the index direction (1 for ascending, -1 for descending).
          + * The actual index definition in JSON format or a {@link org.springframework.expression.spel.standard.SpelExpression + * template expression} resolving to either a JSON String or a {@link org.bson.Document}. The keys of the JSON + * document are the fields to be indexed, the values define the index direction (1 for ascending, -1 for descending). + *
          * If left empty on nested document, the whole document will be indexed. * - * @return - */ - String def() default ""; - - /** - * It does not actually make sense to use that attribute as the direction has to be defined in the {@link #def()} - * attribute actually. + *
          +	 * @Document
          +	 * @CompoundIndex(def = "{'h1': 1, 'h2': 1}")
          +	 * class JsonStringIndexDefinition {
          +	 *   String h1, h2;
          +	 * }
          +	 *
          +	 * @Document
          +	 * @CompoundIndex(def = "#{T(org.bson.Document).parse("{ 'h1': 1, 'h2': 1 }")}")
          +	 * class ExpressionIndexDefinition {
          +	 *   String h1, h2;
          +	 * }
          +	 * 
          * - * @return + * @return empty String by default. */ - @Deprecated - IndexDirection direction() default IndexDirection.ASCENDING; + String def() default ""; /** - * @return - * @see https://docs.mongodb.org/manual/core/index-unique/ + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-unique/ */ boolean unique() default false; /** - * If set to true index will skip over any document that is missing the indexed field. + * If set to true index will skip over any document that is missing the indexed field.
          + * Must not be used with {@link #partialFilter()}. * - * @return - * @see https://docs.mongodb.org/manual/core/index-sparse/ + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-sparse/ */ boolean sparse() default false; /** - * @return - * @see https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping - */ - boolean dropDups() default false; - - /** - * The name of the index to be created.
          + * Index name of the index to be created either as plain value or as + * {@link org.springframework.expression.spel.standard.SpelExpression template expression}.
          *
          * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the * provided name will be prefixed with the path leading to the entity.
          *
          * The structure below * - *
          -	 * 
          +	 * 
           	 * @Document
           	 * class Root {
          -	 *   Hybrid hybrid;
          -	 *   Nested nested;
          +	 * 	Hybrid hybrid;
          +	 * 	Nested nested;
           	 * }
           	 *
           	 * @Document
           	 * @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
           	 * class Hybrid {
          -	 *   String h1, h2;
          +	 * 	String h1, h2;
           	 * }
           	 *
           	 * @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
           	 * class Nested {
          -	 *   String n1, n2;
          +	 * 	String n1, n2;
           	 * }
          -	 * 
           	 * 
          * * resolves in the following index structures * - *
          -	 * 
          +	 * 
           	 * db.root.createIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
           	 * db.root.createIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
           	 * db.hybrid.createIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
          -	 * 
           	 * 
          * - * @return + * @return empty String by default. */ String name() default ""; @@ -120,7 +145,7 @@ * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults * to {@literal false}. * - * @return + * @return {@literal false} by default * @since 1.5 */ boolean useGeneratedName() default false; @@ -128,9 +153,36 @@ /** * If {@literal true} the index will be created in the background. * - * @return - * @see https://docs.mongodb.org/manual/core/indexes/#background-construction + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/indexes/#background-construction */ boolean background() default false; + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
          + * Must not be used with {@link #sparse() sparse = true}. + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 3.1 + */ + String partialFilter() default ""; + + /** + * The actual collation definition in JSON format or a + * {@link org.springframework.expression.spel.standard.SpelExpression template expression} resolving to either a JSON + * String or a {@link org.bson.Document}. The keys of the JSON document are configuration options for the collation + * (language-specific rules for string comparison) to be applied on string properties being part of the index. + *

          + * NOTE: Overrides {@link Document#collation()}. + * + * @return empty String by default. + * @see https://www.mongodb.com/docs/manual/reference/collation/ + * @since 4.0 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java index c6a1fafe12..62f4495408 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -35,14 +35,10 @@ public class CompoundIndexDefinition extends Index { */ public CompoundIndexDefinition(Document keys) { - Assert.notNull(keys, "Keys must not be null!"); + Assert.notNull(keys, "Keys must not be null"); this.keys = keys; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.Index#getIndexKeys() - */ @Override public Document getIndexKeys() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java index 51a68b4ebc..d9195969d9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/CompoundIndexes.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,24 @@ */ package org.springframework.data.mongodb.core.index; +import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * @author Jon Brisbin + * Container annotation that allows to collect multiple {@link CompoundIndex} annotations. + *

          + * Can be used natively, declaring several nested {@link CompoundIndex} annotations. Can also be used in conjunction + * with Java 8's support for repeatable annotations, where {@link CompoundIndex} can simply be declared several + * times on the same {@linkplain ElementType#TYPE type}, implicitly generating this container annotation. + * + * @author Jon Brisbin + * @author Christoph Strobl */ @Target({ ElementType.TYPE }) +@Documented @Retention(RetentionPolicy.RUNTIME) public @interface CompoundIndexes { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/DefaultSearchIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/DefaultSearchIndexOperations.java new file mode 100644 index 0000000000..225bb41ac8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/DefaultSearchIndexOperations.java @@ -0,0 +1,125 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.util.ArrayList; +import java.util.List; + +import org.bson.BsonString; +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.client.model.SearchIndexModel; +import com.mongodb.client.model.SearchIndexType; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public class DefaultSearchIndexOperations implements SearchIndexOperations { + + private final MongoOperations mongoOperations; + private final String collectionName; + private final TypeInformation entityTypeInformation; + + public DefaultSearchIndexOperations(MongoOperations mongoOperations, Class type) { + this(mongoOperations, mongoOperations.getCollectionName(type), type); + } + + public DefaultSearchIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class type) { + + this.collectionName = collectionName; + + if (type != null) { + + MappingContext, MongoPersistentProperty> mappingContext = mongoOperations + .getConverter().getMappingContext(); + entityTypeInformation = mappingContext.getRequiredPersistentEntity(type).getTypeInformation(); + } else { + entityTypeInformation = null; + } + + this.mongoOperations = mongoOperations; + } + + @Override + public String createIndex(SearchIndexDefinition indexDefinition) { + + Document index = indexDefinition.getIndexDocument(entityTypeInformation, + mongoOperations.getConverter().getMappingContext()); + + mongoOperations.getCollection(collectionName) + .createSearchIndexes(List.of(new SearchIndexModel(indexDefinition.getName(), + index.get("definition", Document.class), SearchIndexType.of(new BsonString(indexDefinition.getType()))))); + + return indexDefinition.getName(); + } + + @Override + public void updateIndex(SearchIndexDefinition indexDefinition) { + + Document indexDocument = indexDefinition.getIndexDocument(entityTypeInformation, + mongoOperations.getConverter().getMappingContext()); + + mongoOperations.getCollection(collectionName).updateSearchIndex(indexDefinition.getName(), indexDocument); + } + + @Override + public boolean exists(String indexName) { + return getSearchIndex(indexName) != null; + } + + @Override + public SearchIndexStatus status(String indexName) { + + Document searchIndex = getSearchIndex(indexName); + return searchIndex != null ? SearchIndexStatus.valueOf(searchIndex.getString("status")) + : SearchIndexStatus.DOES_NOT_EXIST; + } + + @Override + public void dropAllIndexes() { + getSearchIndexes(null).forEach(indexInfo -> dropIndex(indexInfo.getString("name"))); + } + + @Override + public void dropIndex(String indexName) { + mongoOperations.getCollection(collectionName).dropSearchIndex(indexName); + } + + @Nullable + private Document getSearchIndex(String indexName) { + + List indexes = getSearchIndexes(indexName); + return indexes.isEmpty() ? null : indexes.iterator().next(); + } + + private List getSearchIndexes(@Nullable String indexName) { + + Document filter = StringUtils.hasText(indexName) ? new Document("name", indexName) : new Document(); + + return mongoOperations.getCollection(collectionName).aggregate(List.of(new Document("$listSearchIndexes", filter))) + .into(new ArrayList<>()); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java index d909762f08..073f18c40b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexType.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java index 1e796edfd1..3fb797559b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeoSpatialIndexed.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,8 +34,8 @@ public @interface GeoSpatialIndexed { /** - * Index name.
          - *
          + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
          * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the * provided name will be prefixed with the path leading to the entity.
          *
          @@ -52,6 +52,7 @@ * @Document * class Hybrid { * @GeoSpatialIndexed(name="index") Point h1; + * @GeoSpatialIndexed(name="#{@myBean.indexName}") Point h2; * } * * class Nested { @@ -67,10 +68,11 @@ * db.root.createIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } ) * db.root.createIndex( { nested.n1: "2d" } , { name: "nested.index" } ) * db.hybrid.createIndex( { h1: "2d" } , { name: "index" } ) + * db.hybrid.createIndex( { h2: "2d"} , { name: the value myBean.getIndexName() returned } ) * *

          * - * @return + * @return empty {@link String} by default. */ String name() default ""; @@ -78,7 +80,7 @@ * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults * to {@literal false}. * - * @return + * @return {@literal false} by default. * @since 1.5 */ boolean useGeneratedName() default false; @@ -86,21 +88,21 @@ /** * Minimum value for indexed values. * - * @return + * @return {@literal -180} by default. */ int min() default -180; /** * Maximum value for indexed values. * - * @return + * @return {@literal +180} by default. */ int max() default 180; /** * Bits of precision for boundary calculations. * - * @return + * @return {@literal 26} by default. */ int bits() default 26; @@ -108,7 +110,7 @@ * The type of the geospatial index. Default is {@link GeoSpatialIndexType#GEO_2D} * * @since 1.4 - * @return + * @return {@link GeoSpatialIndexType#GEO_2D} by default. */ GeoSpatialIndexType type() default GeoSpatialIndexType.GEO_2D; @@ -116,15 +118,17 @@ * The bucket size for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes, in coordinate units. * * @since 1.4 - * @return + * @return {@literal 1.0} by default. + * @deprecated since MongoDB server version 4.4 */ + @Deprecated double bucketSize() default 1.0; /** * The name of the additional field to use for {@link GeoSpatialIndexType#GEO_HAYSTACK} indexes * * @since 1.4 - * @return + * @return empty {@link String} by default. */ String additionalField() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java index b25849c5a6..0949506195 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/GeospatialIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import org.bson.Document; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.MongoClientVersion; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -40,7 +41,7 @@ public class GeospatialIndex implements IndexDefinition { private @Nullable Integer max; private @Nullable Integer bits; private GeoSpatialIndexType type = GeoSpatialIndexType.GEO_2D; - private Double bucketSize = 1.0; + private Double bucketSize = MongoClientVersion.isVersion5orNewer() ? null : 1.0; private @Nullable String additionalField; private Optional filter = Optional.empty(); private Optional collation = Optional.empty(); @@ -52,14 +53,14 @@ public class GeospatialIndex implements IndexDefinition { */ public GeospatialIndex(String field) { - Assert.hasText(field, "Field must have text!"); + Assert.hasText(field, "Field must have text"); this.field = field; } /** * @param name must not be {@literal null} or empty. - * @return + * @return this. */ public GeospatialIndex named(String name) { @@ -69,38 +70,38 @@ public GeospatialIndex named(String name) { /** * @param min - * @return + * @return this. */ public GeospatialIndex withMin(int min) { - this.min = Integer.valueOf(min); + this.min = min; return this; } /** * @param max - * @return + * @return this. */ public GeospatialIndex withMax(int max) { - this.max = Integer.valueOf(max); + this.max = max; return this; } /** * @param bits - * @return + * @return this. */ public GeospatialIndex withBits(int bits) { - this.bits = Integer.valueOf(bits); + this.bits = bits; return this; } /** * @param type must not be {@literal null}. - * @return + * @return this. */ public GeospatialIndex typed(GeoSpatialIndexType type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); this.type = type; return this; @@ -108,16 +109,18 @@ public GeospatialIndex typed(GeoSpatialIndexType type) { /** * @param bucketSize - * @return + * @return this. + * @deprecated since MongoDB server version 4.4 */ + @Deprecated public GeospatialIndex withBucketSize(double bucketSize) { this.bucketSize = bucketSize; return this; } /** - * @param fieldName. - * @return + * @param fieldName + * @return this. */ public GeospatialIndex withAdditionalField(String fieldName) { this.additionalField = fieldName; @@ -128,7 +131,7 @@ public GeospatialIndex withAdditionalField(String fieldName) { * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}. * * @param filter can be {@literal null}. - * @return + * @return this. * @see https://docs.mongodb.com/manual/core/index-partial/ * @since 1.10 @@ -146,7 +149,7 @@ public GeospatialIndex partial(@Nullable IndexFilter filter) { * index. * * @param collation can be {@literal null}. - * @return + * @return this. * @since 2.0 */ public GeospatialIndex collation(@Nullable Collation collation) { @@ -155,36 +158,28 @@ public GeospatialIndex collation(@Nullable Collation collation) { return this; } + @Override public Document getIndexKeys() { Document document = new Document(); switch (type) { - - case GEO_2D: - document.put(field, "2d"); - break; - - case GEO_2DSPHERE: - document.put(field, "2dsphere"); - break; - - case GEO_HAYSTACK: + case GEO_2D -> document.put(field, "2d"); + case GEO_2DSPHERE -> document.put(field, "2dsphere"); + case GEO_HAYSTACK -> { document.put(field, "geoHaystack"); if (!StringUtils.hasText(additionalField)) { - throw new IllegalArgumentException("When defining geoHaystack index, an additionnal field must be defined"); + throw new IllegalArgumentException("When defining geoHaystack index, an additional field must be defined"); } document.put(additionalField, 1); - break; - - default: - throw new IllegalArgumentException("Unsupported geospatial index " + type); + } + default -> throw new IllegalArgumentException("Unsupported geospatial index " + type); } return document; } - @Nullable + @Override public Document getIndexOptions() { Document document = new Document(); @@ -213,7 +208,9 @@ public Document getIndexOptions() { case GEO_HAYSTACK: - document.put("bucketSize", bucketSize); + if (bucketSize != null) { + document.put("bucketSize", bucketSize); + } break; } @@ -223,10 +220,6 @@ public Document getIndexOptions() { return document; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("Geo index: %s - Options: %s", getIndexKeys(), getIndexOptions()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashIndexed.java new file mode 100644 index 0000000000..ce7a29cfc8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashIndexed.java @@ -0,0 +1,64 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation for a property that should be used as key for a + * Hashed Index. If used on a simple property, the + * index uses a hashing function to compute the hash of the value of the index field. Added to a property of complex + * type the embedded document is collapsed and the hash computed for the entire object. + *
          + * + *
          + * @Document
          + * public class DomainType {
          + *
          + * 	@HashIndexed @Id String id;
          + * }
          + * 
          + * + * {@link HashIndexed} can also be used as meta {@link java.lang.annotation.Annotation} to create composed annotations: + * + *
          + * @Indexed
          + * @HashIndexed
          + * @Retention(RetentionPolicy.RUNTIME)
          + * public @interface IndexAndHash {
          + *
          + * 	@AliasFor(annotation = Indexed.class, attribute = "name")
          + * 	String name() default "";
          + * }
          + *
          + * @Document
          + * public class DomainType {
          + *
          + * 	@ComposedHashIndexed(name = "idx-name") String value;
          + * }
          + * 
          + * + * @author Christoph Strobl + * @since 2.2 + * @see HashedIndex + */ +@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +public @interface HashIndexed { +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashedIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashedIndex.java new file mode 100644 index 0000000000..4542834110 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/HashedIndex.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.bson.Document; +import org.springframework.util.Assert; + +/** + * {@link IndexDefinition} implementation for MongoDB + * Hashed Indexes maintaining entries with hashes of + * the values of the indexed field. + * + * @author Christoph Strobl + * @since 2.2 + */ +public class HashedIndex implements IndexDefinition { + + private final String field; + + private HashedIndex(String field) { + + Assert.hasText(field, "Field must not be null nor empty"); + this.field = field; + } + + /** + * Creates a new {@link HashedIndex} for the given field. + * + * @param field must not be {@literal null} nor empty. + * @return new instance of {@link HashedIndex}. + */ + public static HashedIndex hashed(String field) { + return new HashedIndex(field); + } + + @Override + public Document getIndexKeys() { + return new Document(field, "hashed"); + } + + @Override + public Document getIndexOptions() { + return new Document(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java index d2abadb8a3..95f4226e28 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Index.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.index; +import java.time.Duration; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; @@ -23,6 +24,7 @@ import org.bson.Document; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.index.IndexOptions.Unique; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -36,17 +38,11 @@ @SuppressWarnings("deprecation") public class Index implements IndexDefinition { - public enum Duplicates { - RETAIN - } - private final Map fieldSpec = new LinkedHashMap(); private @Nullable String name; - private boolean unique = false; - private boolean dropDuplicates = false; private boolean sparse = false; private boolean background = false; - private long expire = -1; + private final IndexOptions options = IndexOptions.none(); private Optional filter = Optional.empty(); private Optional collation = Optional.empty(); @@ -69,19 +65,20 @@ public Index named(String name) { /** * Reject all documents that contain a duplicate value for the indexed field. * - * @return + * @return this. * @see https://docs.mongodb.org/manual/core/index-unique/ */ public Index unique() { - this.unique = true; + + this.options.setUnique(Unique.YES); return this; } /** * Skip over any document that is missing the indexed field. * - * @return + * @return this. * @see https://docs.mongodb.org/manual/core/index-sparse/ */ @@ -93,7 +90,7 @@ public Index sparse() { /** * Build the index in background (non blocking). * - * @return + * @return this. * @since 1.5 */ public Index background() { @@ -102,29 +99,57 @@ public Index background() { return this; } + /** + * Hidden indexes are not visible to the query planner and cannot be used to support a query. + * + * @return this. + * @see https://www.mongodb.com/docs/manual/core/index-hidden/ + * @since 4.1 + */ + public Index hidden() { + + options.setHidden(true); + return this; + } + /** * Specifies TTL in seconds. * * @param value - * @return + * @return this. * @since 1.5 */ public Index expire(long value) { return expire(value, TimeUnit.SECONDS); } + /** + * Specifies the TTL. + * + * @param timeout must not be {@literal null}. + * @return this. + * @throws IllegalArgumentException if given {@literal timeout} is {@literal null}. + * @since 2.2 + */ + public Index expire(Duration timeout) { + + Assert.notNull(timeout, "Timeout must not be null"); + return expire(timeout.getSeconds()); + } + /** * Specifies TTL with given {@link TimeUnit}. * * @param value - * @param unit - * @return + * @param unit must not be {@literal null}. + * @return this. * @since 1.5 */ public Index expire(long value, TimeUnit unit) { - Assert.notNull(unit, "TimeUnit for expiration must not be null."); - this.expire = unit.toSeconds(value); + Assert.notNull(unit, "TimeUnit for expiration must not be null"); + options.setExpire(Duration.ofSeconds(unit.toSeconds(value))); return this; } @@ -132,7 +157,7 @@ public Index expire(long value, TimeUnit unit) { * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}. * * @param filter can be {@literal null}. - * @return + * @return this. * @see https://docs.mongodb.com/manual/core/index-partial/ * @since 1.10 @@ -150,7 +175,7 @@ public Index partial(@Nullable IndexFilter filter) { * index. * * @param collation can be {@literal null}. - * @return + * @return this. * @since 2.0 */ public Index collation(@Nullable Collation collation) { @@ -159,10 +184,6 @@ public Index collation(@Nullable Collation collation) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys() - */ public Document getIndexKeys() { Document document = new Document(); @@ -180,21 +201,13 @@ public Document getIndexOptions() { if (StringUtils.hasText(name)) { document.put("name", name); } - if (unique) { - document.put("unique", true); - } - if (dropDuplicates) { - document.put("dropDups", true); - } if (sparse) { document.put("sparse", true); } if (background) { document.put("background", true); } - if (expire >= 0) { - document.put("expireAfterSeconds", expire); - } + document.putAll(options.toDocument()); filter.ifPresent(val -> document.put("partialFilterExpression", val.getFilterObject())); collation.ifPresent(val -> document.append("collation", val.toDocument())); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java index 666349dae0..663577f420 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +24,17 @@ */ public interface IndexDefinition { + /** + * Get the {@link Document} containing properties covered by the index. + * + * @return never {@literal null}. + */ Document getIndexKeys(); + /** + * Get the index properties such as {@literal unique},... + * + * @return never {@literal null}. + */ Document getIndexOptions(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java index 863e68bf8d..65fcf85a36 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexDirection.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,5 +19,5 @@ * @author Jon Brisbin */ public enum IndexDirection { - ASCENDING, DESCENDING; + ASCENDING, DESCENDING } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java index 78630d00ad..a5cbf6c896 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,11 +26,25 @@ * @author Oliver Gierke * @author Christoph Strobl */ -@SuppressWarnings("deprecation") public final class IndexField { enum Type { - GEO, TEXT, DEFAULT; + GEO, TEXT, DEFAULT, + + /** + * @since 2.2 + */ + HASH, + + /** + * @since 3.3 + */ + WILDCARD, + + /** + * @since ?.? + */ + VECTOR } private final String key; @@ -47,9 +61,11 @@ private IndexField(String key, @Nullable Direction direction, @Nullable Type typ Assert.hasText(key, "Key must not be null or empty"); if (Type.GEO.equals(type) || Type.TEXT.equals(type)) { - Assert.isNull(direction, "Geo/Text indexes must not have a direction!"); + Assert.isNull(direction, "Geo/Text indexes must not have a direction"); } else { - Assert.notNull(direction, "Default indexes require a direction"); + if (!(Type.HASH.equals(type) || Type.WILDCARD.equals(type) || Type.VECTOR.equals(type))) { + Assert.notNull(direction, "Default indexes require a direction"); + } } this.key = key; @@ -60,16 +76,43 @@ private IndexField(String key, @Nullable Direction direction, @Nullable Type typ public static IndexField create(String key, Direction order) { - Assert.notNull(order, "Direction must not be null!"); + Assert.notNull(order, "Direction must not be null"); return new IndexField(key, order, Type.DEFAULT); } + public static IndexField vector(String key) { + return new IndexField(key, null, Type.VECTOR); + } + + /** + * Creates a {@literal hashed} {@link IndexField} for the given key. + * + * @param key must not be {@literal null} or empty. + * @return new instance of {@link IndexField}. + * @since 2.2 + */ + static IndexField hashed(String key) { + return new IndexField(key, null, Type.HASH); + } + + /** + * Creates a {@literal wildcard} {@link IndexField} for the given key. The {@code key} must follow the + * {@code fieldName.$**} notation. + * + * @param key must not be {@literal null} or empty. + * @return new instance of {@link IndexField}. + * @since 3.3 + */ + static IndexField wildcard(String key) { + return new IndexField(key, null, Type.WILDCARD); + } + /** * Creates a geo {@link IndexField} for the given key. * * @param key must not be {@literal null} or empty. - * @return + * @return new instance of {@link IndexField}. */ public static IndexField geo(String key) { return new IndexField(key, null, Type.GEO); @@ -120,31 +163,41 @@ public boolean isText() { return Type.TEXT.equals(type); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) + /** + * Returns whether the {@link IndexField} is a {@literal hashed}. + * + * @return {@literal true} if {@link IndexField} is hashed. + * @since 2.2 */ + public boolean isHashed() { + return Type.HASH.equals(type); + } + + /** + * Returns whether the {@link IndexField} is contains a {@literal wildcard} expression. + * + * @return {@literal true} if {@link IndexField} contains a wildcard {@literal $**}. + * @since 3.3 + */ + public boolean isWildcard() { + return Type.WILDCARD.equals(type); + } + @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof IndexField)) { + if (!(obj instanceof IndexField other)) { return false; } - IndexField that = (IndexField) obj; - - return this.key.equals(that.key) && ObjectUtils.nullSafeEquals(this.direction, that.direction) - && this.type == that.type; + return this.key.equals(other.key) && ObjectUtils.nullSafeEquals(this.direction, other.direction) + && this.type == other.type; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -156,10 +209,6 @@ public int hashCode() { return result; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("IndexField [ key: %s, direction: %s, type: %s, weight: %s]", key, direction, type, weight); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java index f2bf54506a..097075811b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexFilter.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,7 +29,7 @@ public interface IndexFilter { /** * Get the raw (unmapped) filter expression. * - * @return + * @return never {@literal null}. */ Document getFilterObject(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java index 1f253d60d7..de7153bfb5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,19 +17,25 @@ import static org.springframework.data.domain.Sort.Direction.*; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import org.bson.Document; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; /** + * Index information for a MongoDB index. + * * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl @@ -37,8 +43,8 @@ */ public class IndexInfo { - private static final Double ONE = Double.valueOf(1); - private static final Double MINUS_ONE = Double.valueOf(-1); + private static final Double ONE = 1.0; + private static final Double MINUS_ONE = (double) -1; private static final Collection TWO_D_IDENTIFIERS = Arrays.asList("2d", "2dsphere"); private final List indexFields; @@ -47,8 +53,11 @@ public class IndexInfo { private final boolean unique; private final boolean sparse; private final String language; + private final boolean hidden; + private @Nullable Duration expireAfter; private @Nullable String partialFilterExpression; private @Nullable Document collation; + private @Nullable Document wildcardProjection; public IndexInfo(List indexFields, String name, boolean unique, boolean sparse, String language) { @@ -57,13 +66,25 @@ public IndexInfo(List indexFields, String name, boolean unique, bool this.unique = unique; this.sparse = sparse; this.language = language; + this.hidden = false; + } + + public IndexInfo(List indexFields, String name, boolean unique, boolean sparse, String language, + boolean hidden) { + + this.indexFields = Collections.unmodifiableList(indexFields); + this.name = name; + this.unique = unique; + this.sparse = sparse; + this.language = language; + this.hidden = hidden; } /** * Creates new {@link IndexInfo} parsing required properties from the given {@literal sourceDocument}. * - * @param sourceDocument - * @return + * @param sourceDocument never {@literal null}. + * @return new instance of {@link IndexInfo}. * @since 1.10 */ public static IndexInfo indexInfoOf(Document sourceDocument) { @@ -91,31 +112,65 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { } else { - Double keyValue = new Double(value.toString()); + if (ObjectUtils.nullSafeEquals("hashed", value)) { + indexFields.add(IndexField.hashed(key)); + } else if (key.endsWith("$**")) { + indexFields.add(IndexField.wildcard(key)); + } else { + + Double keyValue = Double.valueOf(value.toString()); - if (ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, ASC)); - } else if (MINUS_ONE.equals(keyValue)) { - indexFields.add(IndexField.create(key, DESC)); + if (ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, ASC)); + } else if (MINUS_ONE.equals(keyValue)) { + indexFields.add(IndexField.create(key, DESC)); + } } } } String name = sourceDocument.get("name").toString(); - boolean unique = sourceDocument.containsKey("unique") ? (Boolean) sourceDocument.get("unique") : false; - boolean sparse = sourceDocument.containsKey("sparse") ? (Boolean) sourceDocument.get("sparse") : false; - String language = sourceDocument.containsKey("default_language") ? (String) sourceDocument.get("default_language") + boolean unique = sourceDocument.get("unique", false); + boolean sparse = sourceDocument.get("sparse", false); + boolean hidden = sourceDocument.getBoolean("hidden", false); + String language = sourceDocument.containsKey("default_language") ? sourceDocument.getString("default_language") : ""; - String partialFilter = sourceDocument.containsKey("partialFilterExpression") - ? ((Document) sourceDocument.get("partialFilterExpression")).toJson() : ""; - IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language); + String partialFilter = extractPartialFilterString(sourceDocument); + + IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language, hidden); info.partialFilterExpression = partialFilter; info.collation = sourceDocument.get("collation", Document.class); + + if (sourceDocument.containsKey("expireAfterSeconds")) { + + Number expireAfterSeconds = sourceDocument.get("expireAfterSeconds", Number.class); + info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class)); + } + + if (sourceDocument.containsKey("wildcardProjection")) { + info.wildcardProjection = sourceDocument.get("wildcardProjection", Document.class); + } + return info; } + /** + * @param sourceDocument never {@literal null}. + * @return the {@link String} representation of the partial filter {@link Document}. + * @since 2.1.11 + */ + @Nullable + private static String extractPartialFilterString(Document sourceDocument) { + + if (!sourceDocument.containsKey("partialFilterExpression")) { + return null; + } + + return BsonUtils.toJson(sourceDocument.get("partialFilterExpression", Document.class)); + } + /** * Returns the individual index fields of the index. * @@ -133,15 +188,9 @@ public List getIndexFields() { */ public boolean isIndexForFields(Collection keys) { - Assert.notNull(keys, "Collection of keys must not be null!"); - - List indexKeys = new ArrayList(indexFields.size()); - - for (IndexField field : indexFields) { - indexKeys.add(field.getKey()); - } + Assert.notNull(keys, "Collection of keys must not be null"); - return indexKeys.containsAll(keys); + return this.indexFields.stream().map(IndexField::getKey).collect(Collectors.toSet()).containsAll(keys); } public String getName() { @@ -183,30 +232,72 @@ public Optional getCollation() { return Optional.ofNullable(collation); } + /** + * Get {@literal wildcardProjection} information. + * + * @return {@link Optional#empty() empty} if not set. + * @since 3.3 + */ + public Optional getWildcardProjection() { + return Optional.ofNullable(wildcardProjection); + } + + /** + * Get the duration after which documents within the index expire. + * + * @return the expiration time if set, {@link Optional#empty()} otherwise. + * @since 2.2 + */ + public Optional getExpireAfter() { + return Optional.ofNullable(expireAfter); + } + + /** + * @return {@literal true} if a hashed index field is present. + * @since 2.2 + */ + public boolean isHashed() { + return getIndexFields().stream().anyMatch(IndexField::isHashed); + } + + /** + * @return {@literal true} if a wildcard index field is present. + * @since 3.3 + */ + public boolean isWildcard() { + return getIndexFields().stream().anyMatch(IndexField::isWildcard); + } + + public boolean isHidden() { + return hidden; + } + @Override public String toString() { + return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", sparse=" + sparse + ", language=" + language + ", partialFilterExpression=" + partialFilterExpression + ", collation=" + collation - + "]"; + + ", expireAfterSeconds=" + ObjectUtils.nullSafeToString(expireAfter) + ", hidden=" + hidden + "]"; } @Override public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ObjectUtils.nullSafeHashCode(indexFields); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + (sparse ? 1231-2018 : 1237); - result = prime * result + (unique ? 1231-2018 : 1237); - result = prime * result + ObjectUtils.nullSafeHashCode(language); - result = prime * result + ObjectUtils.nullSafeHashCode(partialFilterExpression); - result = prime * result + ObjectUtils.nullSafeHashCode(collation); + int result = 17; + result += 31 * ObjectUtils.nullSafeHashCode(indexFields); + result += 31 * ObjectUtils.nullSafeHashCode(name); + result += 31 * ObjectUtils.nullSafeHashCode(unique); + result += 31 * ObjectUtils.nullSafeHashCode(sparse); + result += 31 * ObjectUtils.nullSafeHashCode(language); + result += 31 * ObjectUtils.nullSafeHashCode(partialFilterExpression); + result += 31 * ObjectUtils.nullSafeHashCode(collation); + result += 31 * ObjectUtils.nullSafeHashCode(expireAfter); + result += 31 * ObjectUtils.nullSafeHashCode(hidden); return result; } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } @@ -243,10 +334,16 @@ public boolean equals(Object obj) { if (!ObjectUtils.nullSafeEquals(partialFilterExpression, other.partialFilterExpression)) { return false; } - - if (!ObjectUtils.nullSafeEquals(collation, collation)) { + if (!ObjectUtils.nullSafeEquals(collation, other.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(expireAfter, other.expireAfter)) { + return false; + } + if (hidden != other.hidden) { return false; } return true; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java index 645c0fb69c..88e6d7a815 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,9 +32,33 @@ public interface IndexOperations { * class. If not it will be created. * * @param indexDefinition must not be {@literal null}. + * @return the index name. + * @deprecated since 4.5, in favor of {@link #createIndex(IndexDefinition)}. */ + @Deprecated(since = "4.5", forRemoval = true) String ensureIndex(IndexDefinition indexDefinition); + /** + * Create the index for the provided {@link IndexDefinition} exists for the collection indicated by the entity class. + * If not it will be created. + * + * @param indexDefinition must not be {@literal null}. + * @return the index name. + * @since 4.5 + */ + default String createIndex(IndexDefinition indexDefinition) { + return ensureIndex(indexDefinition); + } + + /** + * Alters the index with given {@literal name}. + * + * @param name name of index to change. + * @param options index options. + * @since 4.1 + */ + void alterIndex(String name, IndexOptions options); + /** * Drops an index from this collection. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java index 7690e1186f..613a3dc4f4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsAdapter.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -36,7 +36,7 @@ public interface IndexOperationsAdapter extends IndexOperations { */ static IndexOperationsAdapter blocking(ReactiveIndexOperations reactiveIndexOperations) { - Assert.notNull(reactiveIndexOperations, "ReactiveIndexOperations must not be null!"); + Assert.notNull(reactiveIndexOperations, "ReactiveIndexOperations must not be null"); return new IndexOperationsAdapter() { @@ -50,6 +50,11 @@ public void dropIndex(String name) { reactiveIndexOperations.dropIndex(name).block(); } + @Override + public void alterIndex(String name, IndexOptions options) { + reactiveIndexOperations.alterIndex(name, options); + } + @Override public void dropAllIndexes() { reactiveIndexOperations.dropAllIndexes().block(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java index b0c8d935af..ca3d951c94 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOperationsProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,22 +13,38 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.core.index; +import org.springframework.lang.Nullable; + /** - * TODO: Revisit for a better pattern. + * Provider interface to obtain {@link IndexOperations} by MongoDB collection name or entity type. * * @author Mark Paluch * @author Jens Schauder * @since 2.0 */ +@FunctionalInterface public interface IndexOperationsProvider { /** - * Returns the operations that can be performed on indexes + * Returns the operations that can be performed on indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @return index operations on the named collection + */ + default IndexOperations indexOps(String collectionName) { + return indexOps(collectionName, null); + } + + /** + * Returns the operations that can be performed on indexes. * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @param type the type used for field mapping. Can be {@literal null}. * @return index operations on the named collection + * @since 3.2 */ - IndexOperations indexOps(String collectionName); + IndexOperations indexOps(String collectionName, @Nullable Class type); + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOptions.java new file mode 100644 index 0000000000..887542cb0c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexOptions.java @@ -0,0 +1,160 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.time.Duration; + +import org.bson.Document; +import org.springframework.lang.Nullable; + +/** + * Changeable properties of an index. Can be used for index creation and modification. + * + * @author Christoph Strobl + * @since 4.1 + */ +public class IndexOptions { + + @Nullable + private Duration expire; + + @Nullable + private Boolean hidden; + + @Nullable + private Unique unique; + + public enum Unique { + + NO, + + /** + * When unique is true the index rejects duplicate entries. + */ + YES, + + /** + * An existing index is not checked for pre-existing, duplicate index entries but inserting new duplicate entries + * fails. + */ + PREPARE + } + + /** + * @return new empty instance of {@link IndexOptions}. + */ + public static IndexOptions none() { + return new IndexOptions(); + } + + /** + * @return new instance of {@link IndexOptions} having the {@link Unique#YES} flag set. + */ + public static IndexOptions unique() { + + IndexOptions options = new IndexOptions(); + options.unique = Unique.YES; + return options; + } + + /** + * @return new instance of {@link IndexOptions} having the hidden flag set. + */ + public static IndexOptions hidden() { + + IndexOptions options = new IndexOptions(); + options.hidden = true; + return options; + } + + /** + * @return new instance of {@link IndexOptions} with given expiration. + */ + public static IndexOptions expireAfter(Duration duration) { + + IndexOptions options = new IndexOptions(); + options.expire = duration; + return options; + } + + /** + * @return the expiration time. A {@link Duration#isNegative() negative value} represents no expiration, {@literal null} if not set. + */ + @Nullable + public Duration getExpire() { + return expire; + } + + /** + * @param expire must not be {@literal null}. + */ + public void setExpire(Duration expire) { + this.expire = expire; + } + + /** + * @return {@literal true} if hidden, {@literal null} if not set. + */ + @Nullable + public Boolean isHidden() { + return hidden; + } + + /** + * @param hidden + */ + public void setHidden(boolean hidden) { + this.hidden = hidden; + } + + /** + * @return the unique property value, {@literal null} if not set. + */ + @Nullable + public Unique getUnique() { + return unique; + } + + /** + * @param unique must not be {@literal null}. + */ + public void setUnique(Unique unique) { + this.unique = unique; + } + + /** + * @return the store native representation + */ + public Document toDocument() { + + Document document = new Document(); + if(unique != null) { + switch (unique) { + case NO -> document.put("unique", false); + case YES -> document.put("unique", true); + case PREPARE -> document.put("prepareUnique", true); + } + } + if(hidden != null) { + document.put("hidden", hidden); + } + + if (expire != null && !expire.isNegative()) { + document.put("expireAfterSeconds", expire.getSeconds()); + } + return document; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java index 912ff727d3..362247725f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexPredicate.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java index c374350cd1..8428941474 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,67 @@ */ package org.springframework.data.mongodb.core.index; -import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; +import org.springframework.util.Assert; /** * {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class. + *

          + * The {@link IndexResolver} considers index annotations like {@link Indexed}, {@link GeoSpatialIndexed}, + * {@link HashIndexed}, {@link TextIndexed} and {@link WildcardIndexed} on properties as well as {@link CompoundIndex} + * and {@link WildcardIndexed} on types. + *

          + * Unless specified otherwise the index name will be created out of the keys/path involved in the index.
          + * {@link TextIndexed} properties are collected into a single index that covers the detected fields.
          + * {@link java.util.Map} like structures, unless annotated with {@link WildcardIndexed}, are skipped because the + * {@link java.util.Map.Entry#getKey() map key}, which cannot be resolved from static metadata, needs to be part of the + * index. * * @author Christoph Strobl * @author Thomas Darimont + * @author Mark Paluch * @since 1.5 */ -interface IndexResolver { +public interface IndexResolver { /** - * Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s are created - * for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}. + * Creates a new {@link IndexResolver} given {@link MongoMappingContext}. * - * @param typeInformation + * @param mappingContext must not be {@literal null}. + * @return the new {@link IndexResolver}. + * @since 2.2 + */ + static IndexResolver create( + MappingContext, MongoPersistentProperty> mappingContext) { + + Assert.notNull(mappingContext, "MongoMappingContext must not be null"); + + return new MongoPersistentEntityIndexResolver(mappingContext); + } + + /** + * Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s + * are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}. + * + * @param typeInformation must not be {@literal null}. + * @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type. + */ + Iterable resolveIndexFor(TypeInformation typeInformation); + + /** + * Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s + * are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}. + * + * @param entityType must not be {@literal null}. * @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type. + * @see 2.2 */ - Iterable resolveIndexFor(TypeInformation typeInformation); + default Iterable resolveIndexFor(Class entityType) { + return resolveIndexFor(TypeInformation.of(entityType)); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java index d8618bac8a..0866556c10 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/Indexed.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,6 +20,10 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.annotation.Collation; +import org.springframework.data.mongodb.core.mapping.Document; + /** * Mark a field to be indexed using MongoDB's indexing feature. * @@ -30,37 +34,43 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Jordi Llach + * @author Mark Paluch + * @author Stefan Tirea */ -@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD}) +@Collation +@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) public @interface Indexed { /** * If set to true reject all documents that contain a duplicate value for the indexed field. * - * @return - * @see https://docs.mongodb.org/manual/core/index-unique/ + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-unique/ */ boolean unique() default false; - IndexDirection direction() default IndexDirection.ASCENDING; - /** - * If set to true index will skip over any document that is missing the indexed field. + * The index sort direction. * - * @return - * @see https://docs.mongodb.org/manual/core/index-sparse/ + * @return {@link IndexDirection#ASCENDING} by default. */ - boolean sparse() default false; + IndexDirection direction() default IndexDirection.ASCENDING; /** - * @return - * @see https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping + * If set to true index will skip over any document that is missing the indexed field.
          + * Must not be used with {@link #partialFilter()}. + * + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/index-sparse/ */ - boolean dropDups() default false; + boolean sparse() default false; /** - * Index name.
          + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
          *
          * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the * provided name will be prefixed with the path leading to the entity.
          @@ -78,6 +88,7 @@ * @Document * class Hybrid { * @Indexed(name="index") String h1; + * @Indexed(name="#{@myBean.indexName}") String h2; * } * * class Nested { @@ -93,10 +104,11 @@ * db.root.createIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } ) * db.root.createIndex( { nested.n1: 1 } , { name: "nested.index" } ) * db.hybrid.createIndex( { h1: 1} , { name: "index" } ) + * db.hybrid.createIndex( { h2: 1} , { name: the value myBean.getIndexName() returned } ) * *

          * - * @return + * @return empty String by default. */ String name() default ""; @@ -104,7 +116,7 @@ * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults * to {@literal false}. * - * @return + * @return {@literal false} by default. * @since 1.5 */ boolean useGeneratedName() default false; @@ -112,16 +124,72 @@ /** * If {@literal true} the index will be created in the background. * - * @return - * @see https://docs.mongodb.org/manual/core/indexes/#background-construction + * @return {@literal false} by default. + * @see https://docs.mongodb.org/manual/core/indexes/#background-construction */ boolean background() default false; /** * Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry. * - * @return - * @see https://docs.mongodb.org/manual/tutorial/expire-data/ + * @return {@literal -1} by default. + * @see https://docs.mongodb.org/manual/tutorial/expire-data/ + * @deprecated since 4.4 - Please use {@link #expireAfter()} instead. */ + @Deprecated(since="4.4", forRemoval = true) int expireAfterSeconds() default -1; + + /** + * Alternative for {@link #expireAfterSeconds()} to configure the timeout after which the document should expire. + * Defaults to an empty {@link String} for no expiry. Accepts numeric values followed by their unit of measure: + *
            + *
          • d: Days
          • + *
          • h: Hours
          • + *
          • m: Minutes
          • + *
          • s: Seconds
          • + *
          • Alternatively: A Spring {@literal template expression}. The expression can result in a + * {@link java.time.Duration} or a valid expiration {@link String} according to the already mentioned + * conventions.
          • + *
          + * Supports ISO-8601 style. + * + *
          +	 * @Indexed(expireAfter = "10s") String expireAfterTenSeconds;
          +	 * @Indexed(expireAfter = "1d") String expireAfterOneDay;
          +	 * @Indexed(expireAfter = "P2D") String expireAfterTwoDays;
          +	 * @Indexed(expireAfter = "#{@mySpringBean.timeout}") String expireAfterTimeoutObtainedFromSpringBean;
          +	 * 
          + * + * @return empty by default. + * @since 2.2 + */ + String expireAfter() default ""; + + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
          + * Must not be used with {@link #sparse() sparse = true}. + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + * @since 3.1 + */ + String partialFilter() default ""; + + /** + * The actual collation definition in JSON format or a + * {@link org.springframework.expression.spel.standard.SpelExpression template expression} resolving to either a JSON + * String or a {@link org.bson.Document}. The keys of the JSON document are configuration options for the collation + * (language-specific rules for string comparison) applied to the indexed based on the field value. + *

          + * NOTE: Overrides {@link Document#collation()}. + * + * @return empty by default. + * @see https://www.mongodb.com/docs/manual/reference/collation/ + * @since 4.0 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java index b43992dc0e..940d676fdc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoMappingEventPublisher.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,9 @@ import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.ApplicationListener; import org.springframework.data.mapping.context.MappingContextEvent; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.util.Assert; @@ -38,7 +37,20 @@ */ public class MongoMappingEventPublisher implements ApplicationEventPublisher { - private final MongoPersistentEntityIndexCreator indexCreator; + private final ApplicationListener> indexCreator; + + /** + * Creates a new {@link MongoMappingEventPublisher} for the given {@link ApplicationListener}. + * + * @param indexCreator must not be {@literal null}. + * @since 2.1 + */ + public MongoMappingEventPublisher(ApplicationListener> indexCreator) { + + Assert.notNull(indexCreator, "ApplicationListener must not be null"); + + this.indexCreator = indexCreator; + } /** * Creates a new {@link MongoMappingEventPublisher} for the given {@link MongoPersistentEntityIndexCreator}. @@ -47,24 +59,17 @@ public class MongoMappingEventPublisher implements ApplicationEventPublisher { */ public MongoMappingEventPublisher(MongoPersistentEntityIndexCreator indexCreator) { - Assert.notNull(indexCreator, "MongoPersistentEntityIndexCreator must not be null!"); + Assert.notNull(indexCreator, "MongoPersistentEntityIndexCreator must not be null"); + this.indexCreator = indexCreator; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationEventPublisher#publishEvent(org.springframework.context.ApplicationEvent) - */ @SuppressWarnings("unchecked") public void publishEvent(ApplicationEvent event) { - if (event instanceof MappingContextEvent) { - indexCreator.onApplicationEvent((MappingContextEvent, MongoPersistentProperty>) event); + if (event instanceof MappingContextEvent mappingContextEvent) { + indexCreator.onApplicationEvent(mappingContextEvent); } } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationEventPublisher#publishEvent(java.lang.Object) - */ public void publishEvent(Object event) {} } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java index 38323979c5..e20b0704cc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreator.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,15 +19,15 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.context.ApplicationListener; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.context.MappingContextEvent; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; @@ -53,7 +53,7 @@ */ public class MongoPersistentEntityIndexCreator implements ApplicationListener> { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class); + private static final Log LOGGER = LogFactory.getLog(MongoPersistentEntityIndexCreator.class); private final Map, Boolean> classesSeen = new ConcurrentHashMap, Boolean>(); private final IndexOperationsProvider indexOperationsProvider; @@ -62,28 +62,30 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener event) { if (!event.wasEmittedBy(mappingContext)) { @@ -107,8 +105,9 @@ public void onApplicationEvent(MappingContextEvent event) { PersistentEntity entity = event.getPersistentEntity(); // Double check type as Spring infrastructure does not consider nested generics - if (entity instanceof MongoPersistentEntity) { - checkForIndexes((MongoPersistentEntity) entity); + if (entity instanceof MongoPersistentEntity mongoPersistentEntity) { + + checkForIndexes(mongoPersistentEntity); } } @@ -121,7 +120,7 @@ private void checkForIndexes(final MongoPersistentEntity entity) { this.classesSeen.put(type, Boolean.TRUE); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Analyzing class " + type + " for index information."); + LOGGER.debug("Analyzing class " + type + " for index information"); } checkForAndCreateIndexes(entity); @@ -131,7 +130,15 @@ private void checkForIndexes(final MongoPersistentEntity entity) { private void checkForAndCreateIndexes(MongoPersistentEntity entity) { if (entity.isAnnotationPresent(Document.class)) { - for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) { + + String collection = entity.getCollection(); + + for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) { + + IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder definitionHolder + ? definitionHolder + : new IndexDefinitionHolder("", indexDefinition, collection); + createIndex(indexToCreate); } } @@ -144,16 +151,16 @@ void createIndex(IndexDefinitionHolder indexDefinition) { IndexOperations indexOperations = indexOperationsProvider.indexOps(indexDefinition.getCollection()); indexOperations.ensureIndex(indexDefinition); - } catch (UncategorizedMongoDbException ex) { + } catch (DataIntegrityViolationException ex) { - if (ex.getCause() instanceof MongoException && - MongoDbErrorCodes.isDataIntegrityViolationCode(((MongoException) ex.getCause()).getCode())) { + if (ex.getCause() instanceof MongoException mongoException + && MongoDbErrorCodes.isDataIntegrityViolationCode(mongoException.getCode())) { IndexInfo existingIndex = fetchIndexInformation(indexDefinition); - String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'."; + String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'"; if (existingIndex != null) { - message += " Index already defined as '%s'."; + message += " Index already defined as '%s'"; } throw new DataIntegrityViolationException( @@ -196,8 +203,10 @@ private IndexInfo fetchIndexInformation(@Nullable IndexDefinitionHolder indexDef orElse(null); } catch (Exception e) { - LOGGER.debug( - String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Failed to load index information for collection '%s'", indexDefinition.getCollection()), e); + } } return null; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java index 59949f367f..a5988b8c1d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,8 @@ */ package org.springframework.data.mongodb.core.index; -import lombok.AccessLevel; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - +import java.lang.annotation.Annotation; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -26,31 +24,47 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.annotation.MergedAnnotation; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.domain.Sort; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.AssociationHandler; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.PropertyHandler; +import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy; import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder; import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexedFieldSpec; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DotPath; +import org.springframework.data.mongodb.util.DurationUtil; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.data.spel.EvaluationContextProvider; import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -63,76 +77,108 @@ * @author Thomas Darimont * @author Martin Macko * @author Mark Paluch + * @author Dave Perryman + * @author Stefan Tirea * @since 1.5 */ public class MongoPersistentEntityIndexResolver implements IndexResolver { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class); + private static final Log LOGGER = LogFactory.getLog(MongoPersistentEntityIndexResolver.class); + private static final SpelExpressionParser PARSER = new SpelExpressionParser(); - private final MongoMappingContext mappingContext; + private final MappingContext, MongoPersistentProperty> mappingContext; + private EvaluationContextProvider evaluationContextProvider = EvaluationContextProvider.DEFAULT; /** * Create new {@link MongoPersistentEntityIndexResolver}. * * @param mappingContext must not be {@literal null}. */ - public MongoPersistentEntityIndexResolver(MongoMappingContext mappingContext) { + public MongoPersistentEntityIndexResolver( + MappingContext, MongoPersistentProperty> mappingContext) { Assert.notNull(mappingContext, "Mapping context must not be null in order to resolve index definitions"); this.mappingContext = mappingContext; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexResolver#resolveIndexForClass(org.springframework.data.util.TypeInformation) - */ @Override public Iterable resolveIndexFor(TypeInformation typeInformation) { return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(typeInformation)); } /** - * Resolve the {@link IndexDefinition}s for given {@literal root} entity by traversing {@link MongoPersistentProperty} - * scanning for index annotations {@link Indexed}, {@link CompoundIndex} and {@link GeospatialIndex}. The given - * {@literal root} has therefore to be annotated with {@link Document}. + * Resolve the {@link IndexDefinition}s for a given {@literal root} entity by traversing + * {@link MongoPersistentProperty} scanning for index annotations {@link Indexed}, {@link CompoundIndex} and + * {@link GeospatialIndex}. The given {@literal root} has therefore to be annotated with {@link Document}. * * @param root must not be null. * @return List of {@link IndexDefinitionHolder}. Will never be {@code null}. * @throws IllegalArgumentException in case of missing {@link Document} annotation marking root entities. */ - public List resolveIndexForEntity(final MongoPersistentEntity root) { + public List resolveIndexForEntity(MongoPersistentEntity root) { - Assert.notNull(root, "Index cannot be resolved for given 'null' entity."); + Assert.notNull(root, "MongoPersistentEntity must not be null"); Document document = root.findAnnotation(Document.class); - Assert.notNull(document, "Given entity is not collection root."); + Assert.notNull(document, () -> String + .format("Entity %s is not a collection root; Make sure to annotate it with @Document", root.getName())); + + verifyWildcardIndexedProjection(root); - final List indexInformation = new ArrayList<>(); - indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", root.getCollection(), root)); - indexInformation.addAll(potentiallyCreateTextIndexDefinition(root)); + List indexInformation = new ArrayList<>(); + String collection = root.getCollection(); + indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions("", collection, root)); + indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection)); root.doWithProperties((PropertyHandler) property -> this .potentiallyAddIndexForProperty(root, property, indexInformation, new CycleGuard())); - indexInformation.addAll(resolveIndexesForDbrefs("", root.getCollection(), root)); + indexInformation.addAll(resolveIndexesForDbrefs("", collection, root)); return indexInformation; } + private void verifyWildcardIndexedProjection(MongoPersistentEntity entity) { + + entity.doWithAll(it -> { + + if (it.isAnnotationPresent(WildcardIndexed.class)) { + + WildcardIndexed indexed = it.getRequiredAnnotation(WildcardIndexed.class); + + if (!ObjectUtils.isEmpty(indexed.wildcardProjection())) { + + throw new MappingException(String.format( + "WildcardIndexed.wildcardProjection cannot be used on nested paths; Offending property: %s.%s", + entity.getName(), it.getName())); + } + } + }); + } + private void potentiallyAddIndexForProperty(MongoPersistentEntity root, MongoPersistentProperty persistentProperty, List indexes, CycleGuard guard) { try { + if (isMapWithoutWildcardIndex(persistentProperty)) { + return; + } + if (persistentProperty.isEntity()) { - indexes.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(), - persistentProperty.getFieldName(), Path.of(persistentProperty), root.getCollection(), guard)); + indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), + persistentProperty.isUnwrapped() ? "" : persistentProperty.getFieldName(), Path.of(persistentProperty), + root.getCollection(), guard)); } - IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty( + List indexDefinitions = createIndexDefinitionHolderForProperty( persistentProperty.getFieldName(), root.getCollection(), persistentProperty); - if (indexDefinitionHolder != null) { - indexes.add(indexDefinitionHolder); + if (!indexDefinitions.isEmpty()) { + indexes.addAll(indexDefinitions); } } catch (CyclicPropertyReferenceException e) { - LOGGER.info(e.getMessage()); + if (LOGGER.isInfoEnabled()) { + LOGGER.info(e.getMessage()); + } } } @@ -147,58 +193,88 @@ private void potentiallyAddIndexForProperty(MongoPersistentEntity root, Mongo * @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property * types. Will never be {@code null}. */ - private List resolveIndexForClass(final TypeInformation type, final String dotPath, - final Path path, final String collection, final CycleGuard guard) { + private List resolveIndexForClass(TypeInformation type, String dotPath, Path path, + String collection, CycleGuard guard) { + + return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type), dotPath, path, collection, guard); + } - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); + private List resolveIndexForEntity(MongoPersistentEntity entity, String dotPath, Path path, + String collection, CycleGuard guard) { - final List indexInformation = new ArrayList<>(); + List indexInformation = new ArrayList<>(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions(dotPath, collection, entity)); entity.doWithProperties((PropertyHandler) property -> this - .guradAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard)); + .guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard)); indexInformation.addAll(resolveIndexesForDbrefs(dotPath, collection, entity)); return indexInformation; } - private void guradAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath, + private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath, Path path, String collection, List indexes, CycleGuard guard) { - String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "") + persistentProperty.getFieldName(); + DotPath propertyDotPath = DotPath.from(dotPath); + + if (!persistentProperty.isUnwrapped()) { + propertyDotPath = propertyDotPath.append(persistentProperty.getFieldName()); + } Path propertyPath = path.append(persistentProperty); guard.protect(persistentProperty, propertyPath); + if (isMapWithoutWildcardIndex(persistentProperty)) { + return; + } + if (persistentProperty.isEntity()) { try { - indexes.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(), propertyDotPath, - propertyPath, collection, guard)); + indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), + propertyDotPath.toString(), propertyPath, collection, guard)); } catch (CyclicPropertyReferenceException e) { LOGGER.info(e.getMessage()); } } - IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, collection, - persistentProperty); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, persistentProperty); - if (indexDefinitionHolder != null) { - indexes.add(indexDefinitionHolder); + if (!indexDefinitions.isEmpty()) { + indexes.addAll(indexDefinitions); } } - @Nullable - private IndexDefinitionHolder createIndexDefinitionHolderForProperty(String dotPath, String collection, + private List createIndexDefinitionHolderForProperty(String dotPath, String collection, MongoPersistentProperty persistentProperty) { + List indices = new ArrayList<>(2); + + if (persistentProperty.isUnwrapped() && (persistentProperty.isAnnotationPresent(Indexed.class) + || persistentProperty.isAnnotationPresent(HashIndexed.class) + || persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class))) { + throw new InvalidDataAccessApiUsageException( + String.format("Index annotation not allowed on unwrapped object for path '%s'", dotPath)); + } + if (persistentProperty.isAnnotationPresent(Indexed.class)) { - return createIndexDefinition(dotPath, collection, persistentProperty); + indices.add(createIndexDefinition(dotPath, collection, persistentProperty)); } else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) { - return createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty); + indices.add(createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty)); } - return null; + if (persistentProperty.isAnnotationPresent(HashIndexed.class)) { + indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty)); + } + if (persistentProperty.isAnnotationPresent(WildcardIndexed.class)) { + indices.add(createWildcardIndexDefinition(dotPath, collection, + persistentProperty.getRequiredAnnotation(WildcardIndexed.class), + mappingContext.getPersistentEntity(persistentProperty))); + } + + return indices; } private List potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection, @@ -211,8 +287,20 @@ private List potentiallyCreateCompoundIndexDefinitions(St return createCompoundIndexDefinitions(dotPath, collection, entity); } + private List potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection, + MongoPersistentEntity entity) { + + if (!entity.isAnnotationPresent(WildcardIndexed.class)) { + return Collections.emptyList(); + } + + return Collections.singletonList(new IndexDefinitionHolder(dotPath, + createWildcardIndexDefinition(dotPath, collection, entity.getRequiredAnnotation(WildcardIndexed.class), entity), + collection)); + } + private Collection potentiallyCreateTextIndexDefinition( - MongoPersistentEntity root) { + MongoPersistentEntity root, String collection) { String name = root.getType().getSimpleName() + "_TextIndex"; if (name.getBytes().length > 127) { @@ -236,26 +324,29 @@ private Collection potentiallyCreateTextIndexDe } try { - appendTextIndexInformation("", Path.empty(), indexDefinitionBuilder, root, + appendTextIndexInformation(DotPath.empty(), Path.empty(), indexDefinitionBuilder, root, new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), new CycleGuard()); } catch (CyclicPropertyReferenceException e) { LOGGER.info(e.getMessage()); } + if (root.hasCollation()) { + indexDefinitionBuilder.withSimpleCollation(); + } + TextIndexDefinition indexDefinition = indexDefinitionBuilder.build(); if (!indexDefinition.hasFieldSpec()) { return Collections.emptyList(); } - IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, root.getCollection()); + IndexDefinitionHolder holder = new IndexDefinitionHolder("", indexDefinition, collection); return Collections.singletonList(holder); } - private void appendTextIndexInformation(final String dotPath, final Path path, - final TextIndexDefinitionBuilder indexDefinitionBuilder, final MongoPersistentEntity entity, - final TextIndexIncludeOptions includeOptions, final CycleGuard guard) { + private void appendTextIndexInformation(DotPath dotPath, Path path, TextIndexDefinitionBuilder indexDefinitionBuilder, + MongoPersistentEntity entity, TextIndexIncludeOptions includeOptions, CycleGuard guard) { entity.doWithProperties(new PropertyHandler() { @@ -264,16 +355,19 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) guard.protect(persistentProperty, path); - if (persistentProperty.isExplicitLanguageProperty() && !StringUtils.hasText(dotPath)) { + if (persistentProperty.isExplicitLanguageProperty() && dotPath.isEmpty()) { indexDefinitionBuilder.withLanguageOverride(persistentProperty.getFieldName()); } + if (persistentProperty.isMap()) { + return; + } + TextIndexed indexed = persistentProperty.findAnnotation(TextIndexed.class); if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) { - String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "") - + persistentProperty.getFieldName(); + DotPath propertyDotPath = dotPath.append(persistentProperty.getFieldName()); Path propertyPath = path.append(persistentProperty); @@ -286,7 +380,7 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) TextIndexIncludeOptions optionsForNestedType = includeOptions; if (!IncludeStrategy.FORCE.equals(includeOptions.getStrategy()) && indexed != null) { optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE, - new TextIndexedFieldSpec(propertyDotPath, weight)); + new TextIndexedFieldSpec(propertyDotPath.toString(), weight)); } try { @@ -295,11 +389,11 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) } catch (CyclicPropertyReferenceException e) { LOGGER.info(e.getMessage()); } catch (InvalidDataAccessApiUsageException e) { - LOGGER.info(String.format("Potentially invalid index structure discovered. Breaking operation for %s.", + LOGGER.info(String.format("Potentially invalid index structure discovered; Breaking operation for %s", entity.getName()), e); } } else if (includeOptions.isForce() || indexed != null) { - indexDefinitionBuilder.onField(propertyDotPath, weight); + indexDefinitionBuilder.onField(propertyDotPath.toString(), weight); } } @@ -309,11 +403,12 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) } /** - * Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of given type. + * Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of a given + * type. * * @param dotPath The properties {@literal "dot"} path representation from its document root. * @param fallbackCollection - * @param type + * @param entity * @return */ protected List createCompoundIndexDefinitions(String dotPath, String fallbackCollection, @@ -337,15 +432,14 @@ protected List createCompoundIndexDefinitions(String dotP return indexDefinitions; } - @SuppressWarnings("deprecation") protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String collection, CompoundIndex index, MongoPersistentEntity entity) { CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition( - resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def())); + resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def(), entity)); if (!index.useGeneratedName()) { - indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null)); + indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null)); } if (index.unique()) { @@ -360,20 +454,51 @@ protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, St indexDefinition.background(); } + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity)); + } + + indexDefinition.collation(resolveCollation(index, entity)); return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } - private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString) { + protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, String collection, + WildcardIndexed index, @Nullable MongoPersistentEntity entity) { + + WildcardIndex indexDefinition = new WildcardIndex(dotPath); + + if (StringUtils.hasText(index.wildcardProjection()) && ObjectUtils.isEmpty(dotPath)) { + indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity)); + } + + if (!index.useGeneratedName()) { + indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null)); + } + + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity)); + } + + indexDefinition.collation(resolveCollation(index, entity)); + return new IndexDefinitionHolder(dotPath, indexDefinition, collection); + } + + private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString, + PersistentEntity entity) { if (!StringUtils.hasText(dotPath) && !StringUtils.hasText(keyDefinitionString)) { - throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys."); + throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys"); } if (!StringUtils.hasText(keyDefinitionString)) { return new org.bson.Document(dotPath, 1); } - org.bson.Document dbo = org.bson.Document.parse(keyDefinitionString); + Object keyDefToUse = ExpressionUtils.evaluate(keyDefinitionString, () -> getEvaluationContextForProperty(entity)); + + org.bson.Document dbo = (keyDefToUse instanceof org.bson.Document document) ? document + : org.bson.Document.parse(ObjectUtils.nullSafeToString(keyDefToUse)); + if (!StringUtils.hasText(dotPath)) { return dbo; } @@ -387,19 +512,19 @@ private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dot } /** - * Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for given + * Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for a given * {@link MongoPersistentProperty}. * * @param dotPath The properties {@literal "dot"} path representation from its document root. * @param collection - * @param persitentProperty + * @param persistentProperty * @return */ @Nullable protected IndexDefinitionHolder createIndexDefinition(String dotPath, String collection, - MongoPersistentProperty persitentProperty) { + MongoPersistentProperty persistentProperty) { - Indexed index = persitentProperty.findAnnotation(Indexed.class); + Indexed index = persistentProperty.findAnnotation(Indexed.class); if (index == null) { return null; @@ -409,7 +534,8 @@ protected IndexDefinitionHolder createIndexDefinition(String dotPath, String col IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC); if (!index.useGeneratedName()) { - indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty)); + indexDefinition + .named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty)); } if (index.unique()) { @@ -428,9 +554,135 @@ protected IndexDefinitionHolder createIndexDefinition(String dotPath, String col indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS); } + if (StringUtils.hasText(index.expireAfter())) { + + if (index.expireAfterSeconds() >= 0) { + throw new IllegalStateException(String.format( + "@Indexed already defines an expiration timeout of %s seconds via Indexed#expireAfterSeconds; Please make to use either expireAfterSeconds or expireAfter", + index.expireAfterSeconds())); + } + + Duration timeout = computeIndexTimeout(index.expireAfter(), + () -> getEvaluationContextForProperty(persistentProperty.getOwner())); + if (!timeout.isNegative()) { + indexDefinition.expire(timeout); + } + } + + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), persistentProperty.getOwner())); + } + + indexDefinition.collation(resolveCollation(index, persistentProperty.getOwner())); return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } + private PartialIndexFilter evaluatePartialFilter(String filterExpression, PersistentEntity entity) { + + Object result = ExpressionUtils.evaluate(filterExpression, () -> getEvaluationContextForProperty(entity)); + + if (result instanceof org.bson.Document document) { + return PartialIndexFilter.of(document); + } + + return PartialIndexFilter.of(BsonUtils.parse(filterExpression, null)); + } + + private org.bson.Document evaluateWildcardProjection(String projectionExpression, PersistentEntity entity) { + + Object result = ExpressionUtils.evaluate(projectionExpression, () -> getEvaluationContextForProperty(entity)); + + if (result instanceof org.bson.Document document) { + return document; + } + + return BsonUtils.parse(projectionExpression, null); + } + + private Collation evaluateCollation(String collationExpression, PersistentEntity entity) { + + Object result = ExpressionUtils.evaluate(collationExpression, () -> getEvaluationContextForProperty(entity)); + if (result instanceof org.bson.Document document) { + return Collation.from(document); + } + if (result instanceof Collation collation) { + return collation; + } + if (result instanceof String stringValue) { + return Collation.parse(stringValue); + } + if (result instanceof Map) { + return Collation.from(new org.bson.Document((Map) result)); + } + throw new IllegalStateException("Cannot parse collation " + result); + + } + + /** + * Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given + * {@link MongoPersistentProperty}. + * + * @param dotPath The properties {@literal "dot"} path representation from its document root. + * @param collection + * @param persistentProperty + * @return + * @since 2.2 + */ + @Nullable + protected IndexDefinitionHolder createHashedIndexDefinition(String dotPath, String collection, + MongoPersistentProperty persistentProperty) { + + HashIndexed index = persistentProperty.findAnnotation(HashIndexed.class); + + if (index == null) { + return null; + } + + return new IndexDefinitionHolder(dotPath, HashedIndex.hashed(dotPath), collection); + } + + /** + * Get the default {@link EvaluationContext}. + * + * @return never {@literal null}. + * @since 2.2 + */ + protected EvaluationContext getEvaluationContext() { + return evaluationContextProvider.getEvaluationContext(null); + } + + /** + * Get the {@link EvaluationContext} for a given {@link PersistentEntity entity} the default one. + * + * @param persistentEntity can be {@literal null} + * @return + */ + private EvaluationContext getEvaluationContextForProperty(@Nullable PersistentEntity persistentEntity) { + + if (persistentEntity == null || !(persistentEntity instanceof BasicMongoPersistentEntity)) { + return getEvaluationContext(); + } + + EvaluationContext contextFromEntity = ((BasicMongoPersistentEntity) persistentEntity).getEvaluationContext(null); + + if (contextFromEntity != null && !EvaluationContextProvider.DEFAULT.equals(contextFromEntity)) { + return contextFromEntity; + } + + return getEvaluationContext(); + } + + /** + * Set the {@link EvaluationContextProvider} used for obtaining the {@link EvaluationContext} used to compute + * {@link org.springframework.expression.spel.standard.SpelExpression expressions}. + * + * @param evaluationContextProvider must not be {@literal null}. + * @since 2.2 + */ + public void setEvaluationContextProvider(EvaluationContextProvider evaluationContextProvider) { + this.evaluationContextProvider = evaluationContextProvider; + } + /** * Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link GeoSpatialIndexed} for * {@link MongoPersistentProperty}. @@ -455,17 +707,44 @@ protected IndexDefinitionHolder createGeoSpatialIndexDefinition(String dotPath, indexDefinition.withMin(index.min()).withMax(index.max()); if (!index.useGeneratedName()) { - indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty)); + indexDefinition + .named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty)); + } + + if (MongoClientVersion.isVersion5orNewer()) { + + Optional defaultBucketSize = MergedAnnotation.of(GeoSpatialIndexed.class).getDefaultValue("bucketSize", + Double.class); + if (!defaultBucketSize.isPresent() || index.bucketSize() != defaultBucketSize.get()) { + indexDefinition.withBucketSize(index.bucketSize()); + } else { + if (LOGGER.isInfoEnabled()) { + LOGGER.info( + "GeoSpatialIndexed.bucketSize no longer supported by Mongo Client 5 or newer. Ignoring bucketSize for path %s." + .formatted(dotPath)); + } + } + } else { + indexDefinition.withBucketSize(index.bucketSize()); } - indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField()); + indexDefinition.typed(index.type()).withAdditionalField(index.additionalField()); return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } - private String pathAwareIndexName(String indexName, String dotPath, @Nullable MongoPersistentProperty property) { + private String pathAwareIndexName(String indexName, String dotPath, @Nullable PersistentEntity entity, + @Nullable MongoPersistentProperty property) { + + String nameToUse = ""; + if (StringUtils.hasText(indexName)) { - String nameToUse = StringUtils.hasText(indexName) ? indexName : ""; + Object result = ExpressionUtils.evaluate(indexName, () -> getEvaluationContextForProperty(entity)); + + if (result != null) { + nameToUse = ObjectUtils.nullSafeToString(result); + } + } if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) { return StringUtils.hasText(nameToUse) ? nameToUse : dotPath; @@ -495,22 +774,61 @@ private void resolveAndAddIndexesForAssociation(Association indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, property); - if (indexDefinitionHolder != null) { - indexes.add(indexDefinitionHolder); + if (!indexDefinitions.isEmpty()) { + indexes.addAll(indexDefinitions); } } + /** + * Compute the index timeout value by evaluating a potential + * {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value. + * + * @param timeoutValue must not be {@literal null}. + * @param evaluationContext must not be {@literal null}. + * @return never {@literal null} + * @since 2.2 + * @throws IllegalArgumentException for invalid duration values. + */ + private static Duration computeIndexTimeout(String timeoutValue, Supplier evaluationContext) { + return DurationUtil.evaluate(timeoutValue, evaluationContext); + } + + /** + * Resolve the "collation" attribute from a given {@link Annotation} if present. + * + * @param annotation + * @param entity + * @return the collation present on either the annotation or the entity as a fallback. Might be {@literal null}. + * @since 4.0 + */ + @Nullable + private Collation resolveCollation(Annotation annotation, @Nullable PersistentEntity entity) { + return MergedAnnotation.from(annotation).getValue("collation", String.class).filter(StringUtils::hasText) + .map(it -> evaluateCollation(it, entity)).orElseGet(() -> { + + if (entity instanceof MongoPersistentEntity mongoPersistentEntity + && mongoPersistentEntity.hasCollation()) { + return mongoPersistentEntity.getCollation(); + } + return null; + }); + } + + private static boolean isMapWithoutWildcardIndex(MongoPersistentProperty property) { + return property.isMap() && !property.isAnnotationPresent(WildcardIndexed.class); + } + /** * {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used * to detect potential cycles within the references. @@ -570,8 +888,6 @@ private String createMapKey(MongoPersistentProperty property) { * @author Christoph Strobl * @author Mark Paluch */ - @RequiredArgsConstructor(access = AccessLevel.PRIVATE) - @EqualsAndHashCode static class Path { private static final Path EMPTY = new Path(Collections.emptyList(), false); @@ -579,6 +895,11 @@ static class Path { private final List> elements; private final boolean cycle; + private Path(List> elements, boolean cycle) { + this.elements = elements; + this.cycle = cycle; + } + /** * @return an empty {@link Path}. * @since 1.10.8 @@ -622,10 +943,6 @@ public boolean isCycle() { return cycle; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return this.elements.isEmpty() ? "(empty)" : toPath(this.elements.iterator()); @@ -680,6 +997,28 @@ private static String toPath(Iterator> iterator) { return builder.toString(); } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + Path that = (Path) o; + + if (this.cycle != that.cycle) { + return false; + } + return ObjectUtils.nullSafeEquals(this.elements, that.elements); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(elements); + result = 31 * result + (cycle ? 1 : 0); + return result; + } } } @@ -702,10 +1041,6 @@ public CyclicPropertyReferenceException(String propertyName, @Nullable Class this.dotPath = dotPath; } - /* - * (non-Javadoc) - * @see java.lang.Throwable#getMessage() - */ @Override public String getMessage() { return String.format("Found cycle for field '%s' in type '%s' for path '%s'", propertyName, @@ -760,23 +1095,20 @@ public IndexDefinition getIndexDefinition() { return indexDefinition; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys() - */ @Override public org.bson.Document getIndexKeys() { return indexDefinition.getIndexKeys(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions() - */ @Override public org.bson.Document getIndexOptions() { return indexDefinition.getIndexOptions(); } + + @Override + public String toString() { + return "IndexDefinitionHolder{" + "indexKeys=" + getIndexKeys() + '}'; + } } /** @@ -786,7 +1118,7 @@ public org.bson.Document getIndexOptions() { static class TextIndexIncludeOptions { enum IncludeStrategy { - FORCE, DEFAULT; + FORCE, DEFAULT } private final IncludeStrategy strategy; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java index 5d8e95e4da..8b835f72c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/PartialIndexFilter.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,29 +16,29 @@ package org.springframework.data.mongodb.core.index; import org.bson.Document; - -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - import org.springframework.data.mongodb.core.query.CriteriaDefinition; - -import com.mongodb.DBObject; +import org.springframework.util.Assert; /** - * {@link IndexFilter} implementation for usage with plain {@link DBObject} as well as {@link CriteriaDefinition} filter + * {@link IndexFilter} implementation for usage with plain {@link Document} as well as {@link CriteriaDefinition} filter * expressions. * * @author Christoph Strobl * @since 1.10 */ -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) public class PartialIndexFilter implements IndexFilter { - private final @NonNull Object filterExpression; + private final Object filterExpression; + + private PartialIndexFilter(Object filterExpression) { + + Assert.notNull(filterExpression, "FilterExpression must not be null"); + + this.filterExpression = filterExpression; + } /** - * Create new {@link PartialIndexFilter} for given {@link DBObject filter expression}. + * Create new {@link PartialIndexFilter} for given {@link Document filter expression}. * * @param where must not be {@literal null}. * @return @@ -57,21 +57,17 @@ public static PartialIndexFilter of(CriteriaDefinition where) { return new PartialIndexFilter(where); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexFilter#getFilterObject() - */ public Document getFilterObject() { - if (filterExpression instanceof Document) { - return (Document) filterExpression; + if (filterExpression instanceof Document document) { + return document; } - if (filterExpression instanceof CriteriaDefinition) { - return ((CriteriaDefinition) filterExpression).getCriteriaObject(); + if (filterExpression instanceof CriteriaDefinition criteriaDefinition) { + return criteriaDefinition.getCriteriaObject(); } throw new IllegalArgumentException( - String.format("Unknown type %s used as filter expression.", filterExpression.getClass())); + String.format("Unknown type %s used as filter expression", filterExpression.getClass())); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java index b96d92f2e1..15b110c08a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,9 +32,33 @@ public interface ReactiveIndexOperations { * class. If not it will be created. * * @param indexDefinition must not be {@literal null}. + * @return a {@link Mono} emitting the name of the index on completion. + * @deprecated since 4.5, in favor of {@link #createIndex(IndexDefinition)}. */ + @Deprecated(since = "4.5", forRemoval = true) Mono ensureIndex(IndexDefinition indexDefinition); + /** + * Create the index for the provided {@link IndexDefinition} exists for the collection indicated by the entity class. + * If not it will be created. + * + * @param indexDefinition must not be {@literal null}. + * @return the index name. + * @since 4.5 + */ + default Mono createIndex(IndexDefinition indexDefinition) { + return ensureIndex(indexDefinition); + } + + /** + * Alters the index with given {@literal name}. + * + * @param name name of index to change. + * @param options index options. + * @since 4.1 + */ + Mono alterIndex(String name, IndexOptions options); + /** * Drops an index from this collection. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperationsProvider.java new file mode 100644 index 0000000000..70dcfa0fbb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveIndexOperationsProvider.java @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +/** + * Provider interface to obtain {@link ReactiveIndexOperations} by MongoDB collection name. + * + * @author Mark Paluch + * @since 2.1 + */ +@FunctionalInterface +public interface ReactiveIndexOperationsProvider { + + /** + * Returns the operations that can be performed on indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @return index operations on the named collection + */ + ReactiveIndexOperations indexOps(String collectionName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreator.java new file mode 100644 index 0000000000..0d818e19d9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreator.java @@ -0,0 +1,196 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.util.MongoDbErrorCodes; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +import com.mongodb.MongoException; + +/** + * Component that inspects {@link MongoPersistentEntity} instances contained in the given {@link MongoMappingContext} + * for indexing metadata and ensures the indexes to be available using reactive infrastructure. + * + * @author Mark Paluch + * @since 2.1 + */ +public class ReactiveMongoPersistentEntityIndexCreator { + + private static final Log LOGGER = LogFactory.getLog(ReactiveMongoPersistentEntityIndexCreator.class); + + private final Map, Boolean> classesSeen = new ConcurrentHashMap, Boolean>(); + private final MongoMappingContext mappingContext; + private final ReactiveIndexOperationsProvider operationsProvider; + private final IndexResolver indexResolver; + + /** + * Creates a new {@link ReactiveMongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext}, + * {@link ReactiveIndexOperationsProvider}. + * + * @param mappingContext must not be {@literal null}. + * @param operationsProvider must not be {@literal null}. + */ + public ReactiveMongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, + ReactiveIndexOperationsProvider operationsProvider) { + this(mappingContext, operationsProvider, IndexResolver.create(mappingContext)); + } + + /** + * Creates a new {@link ReactiveMongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext}, + * {@link ReactiveIndexOperationsProvider}, and {@link IndexResolver}. + * + * @param mappingContext must not be {@literal null}. + * @param operationsProvider must not be {@literal null}. + * @param indexResolver must not be {@literal null}. + */ + public ReactiveMongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, + ReactiveIndexOperationsProvider operationsProvider, IndexResolver indexResolver) { + + Assert.notNull(mappingContext, "MongoMappingContext must not be null"); + Assert.notNull(operationsProvider, "ReactiveIndexOperations must not be null"); + Assert.notNull(indexResolver, "IndexResolver must not be null"); + + this.mappingContext = mappingContext; + this.operationsProvider = operationsProvider; + this.indexResolver = indexResolver; + } + + /** + * Returns whether the current index creator was registered for the given {@link MappingContext}. + * + * @param context + * @return + */ + public boolean isIndexCreatorFor(MappingContext context) { + return this.mappingContext.equals(context); + } + + /** + * Inspect entities for index creation. + * + * @return a {@link Mono} that completes without value after indexes were created. + */ + public Mono checkForIndexes(MongoPersistentEntity entity) { + + Class type = entity.getType(); + + if (!classesSeen.containsKey(type)) { + + if (this.classesSeen.put(type, Boolean.TRUE) == null) { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("Analyzing class " + type + " for index information"); + } + + return checkForAndCreateIndexes(entity); + } + } + + return Mono.empty(); + } + + private Mono checkForAndCreateIndexes(MongoPersistentEntity entity) { + + List> publishers = new ArrayList<>(); + + if (entity.isAnnotationPresent(Document.class)) { + + String collection = entity.getCollection(); + for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) { + + IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder definitionHolder + ? definitionHolder + : new IndexDefinitionHolder("", indexDefinition, collection); + + publishers.add(createIndex(indexToCreate)); + } + } + + return publishers.isEmpty() ? Mono.empty() : Flux.merge(publishers).then(); + } + + Mono createIndex(IndexDefinitionHolder indexDefinition) { + + return operationsProvider.indexOps(indexDefinition.getCollection()).ensureIndex(indexDefinition) // + .onErrorResume(ReactiveMongoPersistentEntityIndexCreator::isDataIntegrityViolation, + e -> translateException(e, indexDefinition)); + + } + + private Mono translateException(Throwable e, IndexDefinitionHolder indexDefinition) { + + Mono existingIndex = fetchIndexInformation(indexDefinition); + + Mono defaultError = Mono.error(new DataIntegrityViolationException( + String.format("Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'", + indexDefinition.getPath(), indexDefinition.getCollection(), indexDefinition.getIndexKeys(), + indexDefinition.getIndexOptions()), + e.getCause())); + + return existingIndex.flatMap(it -> { + return Mono. error(new DataIntegrityViolationException( + String.format("Index already defined as '%s'", indexDefinition.getPath()), e.getCause())); + }).switchIfEmpty(defaultError); + } + + private Mono fetchIndexInformation(IndexDefinitionHolder indexDefinition) { + + Object indexNameToLookUp = indexDefinition.getIndexOptions().get("name"); + + Flux existingIndexes = operationsProvider.indexOps(indexDefinition.getCollection()).getIndexInfo(); + + return existingIndexes // + .filter(indexInfo -> ObjectUtils.nullSafeEquals(indexNameToLookUp, indexInfo.getName())) // + .next() // + .doOnError(e -> { + if(LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Failed to load index information for collection '%s'", indexDefinition.getCollection()), + e); + } + }); + } + + private static boolean isDataIntegrityViolation(Throwable t) { + + if (t instanceof UncategorizedMongoDbException) { + + return t.getCause() instanceof MongoException mongoException + && MongoDbErrorCodes.isDataIntegrityViolationCode(mongoException.getCode()); + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexDefinition.java new file mode 100644 index 0000000000..9d4315beae --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexDefinition.java @@ -0,0 +1,87 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * Definition for an Atlas Search Index (Search Index or Vector Index). + * + * @author Marcin Grzejszczak + * @author Mark Paluch + * @since 4.5 + */ +public interface SearchIndexDefinition { + + /** + * @return the name of the index. + */ + String getName(); + + /** + * @return the type of the index. Typically, {@code search} or {@code vectorSearch}. + */ + String getType(); + + /** + * Returns the index document for this index without any potential entity context resolving field name mappings. The + * resulting document contains the index name, type and {@link #getDefinition(TypeInformation, MappingContext) + * definition}. + * + * @return never {@literal null}. + */ + default Document getRawIndexDocument() { + return getIndexDocument(null, null); + } + + /** + * Returns the index document for this index in the context of a potential entity to resolve field name mappings. The + * resulting document contains the index name, type and {@link #getDefinition(TypeInformation, MappingContext) + * definition}. + * + * @param entity can be {@literal null}. + * @param mappingContext + * @return never {@literal null}. + */ + default Document getIndexDocument(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + + Document document = new Document(); + document.put("name", getName()); + document.put("type", getType()); + document.put("definition", getDefinition(entity, mappingContext)); + + return document; + } + + /** + * Returns the actual index definition for this index in the context of a potential entity to resolve field name + * mappings. Entity and context can be {@literal null} to create a generic index definition without applying field + * name mapping. + * + * @param entity can be {@literal null}. + * @param mappingContext can be {@literal null}. + * @return never {@literal null}. + */ + Document getDefinition(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexInfo.java new file mode 100644 index 0000000000..1a657ecf0b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexInfo.java @@ -0,0 +1,129 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.util.function.Supplier; + +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * Index information for a MongoDB Search Index. + * + * @author Christoph Strobl + * @since 4.5 + */ +public class SearchIndexInfo { + + private final @Nullable Object id; + private final SearchIndexStatus status; + private final Lazy indexDefinition; + + SearchIndexInfo(@Nullable Object id, SearchIndexStatus status, Supplier indexDefinition) { + this.id = id; + this.status = status; + this.indexDefinition = Lazy.of(indexDefinition); + } + + /** + * Parse a BSON document describing an index into a {@link SearchIndexInfo}. + * + * @param source BSON document describing the index. + * @return a new {@link SearchIndexInfo} instance. + */ + public static SearchIndexInfo parse(String source) { + return of(Document.parse(source)); + } + + /** + * Create an index from its BSON {@link Document} representation into a {@link SearchIndexInfo}. + * + * @param indexDocument BSON document describing the index. + * @return a new {@link SearchIndexInfo} instance. + */ + public static SearchIndexInfo of(Document indexDocument) { + + Object id = indexDocument.get("id"); + SearchIndexStatus status = SearchIndexStatus + .valueOf(indexDocument.get("status", SearchIndexStatus.DOES_NOT_EXIST.name())); + + return new SearchIndexInfo(id, status, () -> readIndexDefinition(indexDocument)); + } + + /** + * The id of the index. Can be {@literal null}, eg. for an index not yet created. + * + * @return can be {@literal null}. + */ + @Nullable + public Object getId() { + return id; + } + + /** + * @return the current status of the index. + */ + public SearchIndexStatus getStatus() { + return status; + } + + /** + * @return the current index definition. + */ + public SearchIndexDefinition getIndexDefinition() { + return indexDefinition.get(); + } + + private static SearchIndexDefinition readIndexDefinition(Document document) { + + String type = document.get("type", "search"); + if (type.equals("vectorSearch")) { + return VectorIndex.of(document); + } + + return new SearchIndexDefinition() { + + @Override + public String getName() { + return document.getString("name"); + } + + @Override + public String getType() { + return type; + } + + @Override + public Document getDefinition(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + if (document.containsKey("latestDefinition")) { + return document.get("latestDefinition", new Document()); + } + return document.get("definition", new Document()); + } + + @Override + public String toString() { + return getDefinition(null, null).toJson(); + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperations.java new file mode 100644 index 0000000000..ee3f59cf95 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperations.java @@ -0,0 +1,75 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import org.springframework.dao.DataAccessException; + +/** + * Search Index operations on a collection for Atlas Search. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + * @see VectorIndex + */ +public interface SearchIndexOperations { + + /** + * Create the index for the given {@link SearchIndexDefinition} in the collection indicated by the entity class. + * + * @param indexDefinition must not be {@literal null}. + * @return the index name. + */ + String createIndex(SearchIndexDefinition indexDefinition); + + /** + * Alters the search index matching the index {@link SearchIndexDefinition#getName() name}. + *

          + * Atlas Search might not support updating indices which raises a {@link DataAccessException}. + * + * @param indexDefinition the index definition. + */ + void updateIndex(SearchIndexDefinition indexDefinition); + + /** + * Check whether an index with the given {@code indexName} exists for the collection indicated by the entity class. To + * ensure an existing index is queryable it is recommended to check its {@link #status(String) status}. + * + * @param indexName name of index to check for presence. + * @return {@literal true} if the index exists; {@literal false} otherwise. + */ + boolean exists(String indexName); + + /** + * Check the actual {@link SearchIndexStatus status} of an index. + * + * @param indexName name of index to get the status for. + * @return the current status of the index or {@link SearchIndexStatus#DOES_NOT_EXIST} if the index cannot be found. + */ + SearchIndexStatus status(String indexName); + + /** + * Drops an index from the collection indicated by the entity class. + * + * @param indexName name of index to drop. + */ + void dropIndex(String indexName); + + /** + * Drops all search indices from the collection indicated by the entity class. + */ + void dropAllIndexes(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperationsProvider.java new file mode 100644 index 0000000000..ee87c8d61e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexOperationsProvider.java @@ -0,0 +1,51 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +/** + * Provider interface to obtain {@link SearchIndexOperations} by MongoDB collection name or entity type. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public interface SearchIndexOperationsProvider { + + /** + * Returns the operations that can be performed on search indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @return index operations on the named collection + */ + SearchIndexOperations searchIndexOps(String collectionName); + + /** + * Returns the operations that can be performed on search indexes. + * + * @param type the type used for field mapping. + * @return index operations on the named collection + */ + SearchIndexOperations searchIndexOps(Class type); + + /** + * Returns the operations that can be performed on search indexes. + * + * @param collectionName name of the MongoDB collection, must not be {@literal null}. + * @param type the type used for field mapping. Can be {@literal null}. + * @return index operations on the named collection + */ + SearchIndexOperations searchIndexOps(Class type, String collectionName); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexStatus.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexStatus.java new file mode 100644 index 0000000000..91143d73c6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/SearchIndexStatus.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025. the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +/** + * Representation of different conditions a search index can be in. + * + * @author Christoph Strobl + * @since 4.5 + */ +public enum SearchIndexStatus { + + /** building or re-building the index - might be queryable */ + BUILDING, + + /** nothing to be seen here - not queryable */ + DOES_NOT_EXIST, + + /** will cease to exist - no longer queryable */ + DELETING, + + /** well, this one is broken - not queryable */ + FAILED, + + /** busy with other things, check back later - not queryable */ + PENDING, + + /** ask me anything - queryable */ + READY, + + /** ask me anything about outdated data - still queryable */ + STALE +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java index 9b9bcde4a4..a87b15de45 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ import org.bson.Document; import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -40,9 +41,10 @@ public class TextIndexDefinition implements IndexDefinition { private @Nullable String defaultLanguage; private @Nullable String languageOverride; private @Nullable IndexFilter filter; + private @Nullable Collation collation; TextIndexDefinition() { - fieldSpecs = new LinkedHashSet(); + fieldSpecs = new LinkedHashSet<>(); } /** @@ -86,10 +88,6 @@ public boolean hasFieldSpec() { return !fieldSpecs.isEmpty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys() - */ @Override public Document getIndexKeys() { @@ -101,10 +99,6 @@ public Document getIndexKeys() { return keys; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions() - */ @Override public Document getIndexOptions() { @@ -116,6 +110,10 @@ public Document getIndexOptions() { options.put("default_language", defaultLanguage); } + if (collation != null) { + options.put("collation", collation.toDocument()); + } + Document weightsDocument = new Document(); for (TextIndexedFieldSpec fieldSpec : fieldSpecs) { if (fieldSpec.isWeighted()) { @@ -163,7 +161,7 @@ public TextIndexedFieldSpec(String fieldname) { */ public TextIndexedFieldSpec(String fieldname, @Nullable Float weight) { - Assert.hasText(fieldname, "Text index field cannot be blank."); + Assert.hasText(fieldname, "Text index field cannot be blank"); this.fieldname = fieldname; this.weight = weight != null ? weight : 1.0F; } @@ -199,7 +197,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -207,12 +205,10 @@ public boolean equals(Object obj) { if (obj == null) { return false; } - if (!(obj instanceof TextIndexedFieldSpec)) { + if (!(obj instanceof TextIndexedFieldSpec other)) { return false; } - TextIndexedFieldSpec other = (TextIndexedFieldSpec) obj; - return ObjectUtils.nullSafeEquals(this.fieldname, other.fieldname); } @@ -246,14 +242,14 @@ public TextIndexDefinitionBuilder named(String name) { /** * Define the index to span all fields using wildcard.
          - * NOTE {@link TextIndexDefinition} cannot contain any other fields when defined with wildcard. + * NOTE: {@link TextIndexDefinition} cannot contain any other fields when defined with wildcard. * * @return */ public TextIndexDefinitionBuilder onAllFields() { if (!instance.fieldSpecs.isEmpty()) { - throw new InvalidDataAccessApiUsageException("Cannot add wildcard fieldspect to non empty."); + throw new InvalidDataAccessApiUsageException("Cannot add wildcard fieldspect to non empty"); } this.instance.fieldSpecs.add(ALL_FIELDS); @@ -294,7 +290,7 @@ public TextIndexDefinitionBuilder onField(String fieldname, Float weight) { if (this.instance.fieldSpecs.contains(ALL_FIELDS)) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot add %s to field spec for all fields.", fieldname)); + String.format("Cannot add %s to field spec for all fields", fieldname)); } this.instance.fieldSpecs.add(new TextIndexedFieldSpec(fieldname, weight)); @@ -325,7 +321,7 @@ public TextIndexDefinitionBuilder withLanguageOverride(String fieldname) { if (StringUtils.hasText(this.instance.languageOverride)) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot set language override on %s as it is already defined on %s.", fieldname, + String.format("Cannot set language override on %s as it is already defined on %s", fieldname, this.instance.languageOverride)); } @@ -348,6 +344,17 @@ public TextIndexDefinitionBuilder partial(@Nullable IndexFilter filter) { return this; } + /** + * Configure to use simple {@link Collation}. Required if the collection uses a non-simple collation. + * + * @since 2.2 + */ + public TextIndexDefinitionBuilder withSimpleCollation() { + + this.instance.collation = Collation.simple(); + return this; + } + public TextIndexDefinition build() { return this.instance; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java index cdffd1b0f4..61fc4c05f3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/TextIndexed.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/VectorIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/VectorIndex.java new file mode 100644 index 0000000000..b46dbf4d0c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/VectorIndex.java @@ -0,0 +1,349 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.Document; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Contract; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * {@link SearchIndexDefinition} for creating MongoDB + * Vector Index required to + * run {@code $vectorSearch} queries. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.5 + */ +public class VectorIndex implements SearchIndexDefinition { + + private final String name; + private final List fields = new ArrayList<>(); + + /** + * Create a new {@link VectorIndex} instance. + * + * @param name The name of the index. + */ + public VectorIndex(String name) { + this.name = name; + } + + /** + * Add a filter field. + * + * @param path dot notation to field/property used for filtering. + * @return this. + */ + @Contract("_ -> this") + public VectorIndex addFilter(String path) { + + Assert.hasText(path, "Path must not be null or empty"); + + return addField(new VectorFilterField(path, "filter")); + } + + /** + * Add a vector field and accept a {@link VectorFieldBuilder} customizer. + * + * @param path dot notation to field/property used for filtering. + * @param customizer customizer function. + * @return this. + */ + @Contract("_, _ -> this") + public VectorIndex addVector(String path, Consumer customizer) { + + Assert.hasText(path, "Path must not be null or empty"); + + VectorFieldBuilder builder = new VectorFieldBuilder(path, "vector"); + customizer.accept(builder); + return addField(builder.build()); + } + + @Override + public String getName() { + return name; + } + + @Override + public String getType() { + return "vectorSearch"; + } + + @Override + public Document getDefinition(@Nullable TypeInformation entity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + + MongoPersistentEntity persistentEntity = entity != null + ? (mappingContext != null ? mappingContext.getPersistentEntity(entity) : null) + : null; + + Document definition = new Document(); + List fields = new ArrayList<>(); + definition.put("fields", fields); + + for (SearchField field : this.fields) { + + Document filter = new Document("type", field.type()); + filter.put("path", resolvePath(field.path(), persistentEntity, mappingContext)); + + if (field instanceof VectorIndexField vif) { + + filter.put("numDimensions", vif.dimensions()); + filter.put("similarity", vif.similarity()); + if (StringUtils.hasText(vif.quantization)) { + filter.put("quantization", vif.quantization()); + } + } + fields.add(filter); + } + + return definition; + } + + @Contract("_ -> this") + private VectorIndex addField(SearchField filterField) { + + fields.add(filterField); + return this; + } + + @Override + public String toString() { + return "VectorIndex{" + "name='" + name + '\'' + ", fields=" + fields + ", type='" + getType() + '\'' + '}'; + } + + /** + * Parse the {@link Document} into a {@link VectorIndex}. + */ + static VectorIndex of(Document document) { + + VectorIndex index = new VectorIndex(document.getString("name")); + + String definitionKey = document.containsKey("latestDefinition") ? "latestDefinition" : "definition"; + Document definition = document.get(definitionKey, Document.class); + + for (Object entry : definition.get("fields", List.class)) { + if (entry instanceof Document field) { + if (field.get("type").equals("vector")) { + index.addField(new VectorIndexField(field.getString("path"), "vector", field.getInteger("numDimensions"), + field.getString("similarity"), field.getString("quantization"))); + } else { + index.addField(new VectorFilterField(field.getString("path"), "filter")); + } + } + } + + return index; + } + + private String resolvePath(String path, @Nullable MongoPersistentEntity persistentEntity, + @Nullable MappingContext, MongoPersistentProperty> mappingContext) { + + if (persistentEntity == null || mappingContext == null) { + return path; + } + + QueryMapper.MetadataBackedField mbf = new QueryMapper.MetadataBackedField(path, persistentEntity, mappingContext); + + return mbf.getMappedKey(); + } + + interface SearchField { + + String path(); + + String type(); + } + + record VectorFilterField(String path, String type) implements SearchField { + } + + record VectorIndexField(String path, String type, int dimensions, @Nullable String similarity, + @Nullable String quantization) implements SearchField { + } + + /** + * Builder to create a vector field + */ + public static class VectorFieldBuilder { + + private final String path; + private final String type; + + private int dimensions; + private @Nullable String similarity; + private @Nullable String quantization; + + VectorFieldBuilder(String path, String type) { + + this.path = path; + this.type = type; + } + + /** + * Number of vector dimensions enforced at index- & query-time. + * + * @param dimensions value between {@code 0} and {@code 4096}. + * @return this. + */ + @Contract("_ -> this") + public VectorFieldBuilder dimensions(int dimensions) { + this.dimensions = dimensions; + return this; + } + + /** + * Use similarity based on the angle between vectors. + * + * @return new instance of {@link VectorIndex}. + */ + @Contract(" -> this") + public VectorFieldBuilder cosine() { + return similarity(SimilarityFunction.COSINE); + } + + /** + * Use similarity based the distance between vector ends. + */ + @Contract(" -> this") + public VectorFieldBuilder euclidean() { + return similarity(SimilarityFunction.EUCLIDEAN); + } + + /** + * Use similarity based on both angle and magnitude of the vectors. + * + * @return new instance of {@link VectorIndex}. + */ + @Contract(" -> this") + public VectorFieldBuilder dotProduct() { + return similarity(SimilarityFunction.DOT_PRODUCT); + } + + /** + * Similarity function used. + * + * @param similarity should be one of {@literal euclidean | cosine | dotProduct}. + * @return this. + * @see SimilarityFunction + * @see #similarity(SimilarityFunction) + */ + @Contract("_ -> this") + public VectorFieldBuilder similarity(String similarity) { + + this.similarity = similarity; + return this; + } + + /** + * Similarity function used. + * + * @param similarity must not be {@literal null}. + * @return this. + */ + @Contract("_ -> this") + public VectorFieldBuilder similarity(SimilarityFunction similarity) { + + return similarity(similarity.getFunctionName()); + } + + /** + * Quantization used. + * + * @param quantization should be one of {@literal none | scalar | binary}. + * @return this. + * @see Quantization + * @see #quantization(Quantization) + */ + public VectorFieldBuilder quantization(String quantization) { + + this.quantization = quantization; + return this; + } + + /** + * Quantization used. + * + * @param quantization must not be {@literal null}. + * @return this. + */ + public VectorFieldBuilder quantization(Quantization quantization) { + return quantization(quantization.getQuantizationName()); + } + + VectorIndexField build() { + return new VectorIndexField(this.path, this.type, this.dimensions, this.similarity, this.quantization); + } + } + + /** + * Similarity function used to calculate vector distance. + */ + public enum SimilarityFunction { + + DOT_PRODUCT("dotProduct"), COSINE("cosine"), EUCLIDEAN("euclidean"); + + final String functionName; + + SimilarityFunction(String functionName) { + this.functionName = functionName; + } + + public String getFunctionName() { + return functionName; + } + } + + /** + * Vector quantization. Quantization reduce vector sizes while preserving performance. + */ + public enum Quantization { + + NONE("none"), + + /** + * Converting a float point into an integer. + */ + SCALAR("scalar"), + + /** + * Converting a float point into a single bit. + */ + BINARY("binary"); + + final String quantizationName; + + Quantization(String quantizationName) { + this.quantizationName = quantizationName; + } + + public String getQuantizationName() { + return quantizationName; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java new file mode 100644 index 0000000000..dcd2b7c022 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java @@ -0,0 +1,199 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.time.Duration; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * {@link WildcardIndex} is a specific {@link Index} that can be used to include all fields into an index based on the + * {@code $**" : 1} pattern on a root object (the one typically carrying the + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation). On those it is possible to use + * {@link #wildcardProjectionInclude(String...)} and {@link #wildcardProjectionExclude(String...)} to define specific + * paths for in-/exclusion. + *
          + * It can also be used to define an index on a specific field path and its subfields, e.g. + * {@code "path.to.field.$**" : 1}.
          + * Note that {@literal wildcardProjections} are not allowed in this case. + *
          + * LIMITATIONS
          + *

            + *
          • {@link #unique() Unique} and {@link #expire(long) ttl} options are not supported.
          • + *
          • Keys used for sharding must not be included
          • + *
          • Cannot be used to generate any type of geo index.
          • + *
          + * + * @author Christoph Strobl + * @see MongoDB Reference Documentation: Wildcard + * Indexes/ + * @since 3.3 + */ +public class WildcardIndex extends Index { + + private @Nullable String fieldName; + private final Map wildcardProjection = new LinkedHashMap<>(); + + /** + * Create a new instance of {@link WildcardIndex} using {@code $**}. + */ + public WildcardIndex() {} + + /** + * Create a new instance of {@link WildcardIndex} for the given {@literal path}. If no {@literal path} is provided the + * index will be considered a root one using {@code $**}.
          + * NOTE: {@link #wildcardProjectionInclude(String...)}, {@link #wildcardProjectionExclude(String...)} + * can only be used for top level index definitions having an {@literal empty} or {@literal null} path. + * + * @param path can be {@literal null}. If {@literal null} all fields will be indexed. + */ + public WildcardIndex(@Nullable String path) { + this.fieldName = path; + } + + /** + * Include the {@code _id} field in {@literal wildcardProjection}. + * + * @return this. + */ + public WildcardIndex includeId() { + + wildcardProjection.put(FieldName.ID.name(), 1); + return this; + } + + /** + * Set the index name to use. + * + * @param name + * @return this. + */ + @Override + public WildcardIndex named(String name) { + + super.named(name); + return this; + } + + /** + * Unique option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index unique() { + throw new UnsupportedOperationException("Wildcard Index does not support 'unique'"); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index expire(long seconds) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'"); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index expire(long value, TimeUnit timeUnit) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'"); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException not supported for wildcard indexes. + */ + @Override + public Index expire(Duration duration) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'"); + } + + /** + * Add fields to be included from indexing via {@code wildcardProjection}.
          + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionInclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 1); + } + return this; + } + + /** + * Add fields to be excluded from indexing via {@code wildcardProjection}.
          + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionExclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 0); + } + return this; + } + + /** + * Set the fields to be in-/excluded from indexing via {@code wildcardProjection}.
          + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param includeExclude must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjection(Map includeExclude) { + + wildcardProjection.putAll(includeExclude); + return this; + } + + private String getTargetFieldName() { + return StringUtils.hasText(fieldName) ? (fieldName + ".$**") : "$**"; + } + + @Override + public Document getIndexKeys() { + return new Document(getTargetFieldName(), 1); + } + + @Override + public Document getIndexOptions() { + + Document options = new Document(super.getIndexOptions()); + if (!CollectionUtils.isEmpty(wildcardProjection)) { + options.put("wildcardProjection", new Document(wildcardProjection)); + } + return options; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java new file mode 100644 index 0000000000..e7eaf3bf15 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.annotation.Collation; + +/** + * Annotation for an entity or property that should be used as key for a + * Wildcard Index.
          + * If placed on a {@link ElementType#TYPE type} that is a root level domain entity (one having an + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation) will advise the index creator to create a + * wildcard index for it. + * + *
          + *
          + * @Document
          + * @WildcardIndexed
          + * public class Product {
          + *     ...
          + * }
          + *
          + * db.product.createIndex({ "$**" : 1 } , {})
          + * 
          + * + * {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index. + * + *
          + *
          + * @Document
          + * @WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
          + * public class User {
          + *     private @Id String id;
          + *     private UserMetadata userMetadata;
          + * }
          + *
          + *
          + * db.user.createIndex(
          + *   { "$**" : 1 },
          + *   { "wildcardProjection" :
          + *     { "userMetadata.age" : 0 }
          + *   }
          + * )
          + * 
          + * + * Wildcard indexes can also be expressed by adding the annotation directly to the field. Please note that + * {@literal wildcardProjection} is not allowed on nested paths. + * + *
          + * @Document
          + * public class User {
          + *
          + *     private @Id String id;
          + *
          + *     @WildcardIndexed
          + *     private UserMetadata userMetadata;
          + * }
          + *
          + *
          + * db.user.createIndex({ "userMetadata.$**" : 1 }, {})
          + * 
          + * + * @author Christoph Strobl + * @since 3.3 + */ +@Collation +@Documented +@Target({ ElementType.TYPE, ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +public @interface WildcardIndexed { + + /** + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
          + *
          + * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the + * provided name will be prefixed with the path leading to the entity. + * + * @return empty by default. + */ + String name() default ""; + + /** + * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults + * to {@literal false}. + * + * @return {@literal false} by default. + */ + boolean useGeneratedName() default false; + + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
          + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + */ + String partialFilter() default ""; + + /** + * Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String. + *
          + * NOTE: Can only be applied on root level documents. + * + * @return empty by default. + */ + String wildcardProjection() default ""; + + /** + * Defines the collation to apply. + * + * @return an empty {@link String} by default. + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java index da45966bb3..3d68dbaac2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,31 +17,35 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Map; -import org.springframework.beans.BeansException; -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; -import org.springframework.context.expression.BeanFactoryAccessor; -import org.springframework.context.expression.BeanFactoryResolver; import org.springframework.data.annotation.Id; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.AssociationHandler; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.model.BasicPersistentEntity; import org.springframework.data.mongodb.MongoCollectionUtils; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; import org.springframework.data.util.TypeInformation; +import org.springframework.expression.EvaluationContext; import org.springframework.expression.Expression; -import org.springframework.expression.ParserContext; -import org.springframework.expression.common.LiteralExpression; import org.springframework.expression.spel.standard.SpelExpressionParser; -import org.springframework.expression.spel.support.StandardEvaluationContext; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -55,16 +59,20 @@ * @author Mark Paluch */ public class BasicMongoPersistentEntity extends BasicPersistentEntity - implements MongoPersistentEntity, ApplicationContextAware { + implements MongoPersistentEntity { - private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected! Both %s and %s map to the same field name %s! Disambiguate using @Field annotation!"; - private static final SpelExpressionParser PARSER = new SpelExpressionParser(); + private static final String AMBIGUOUS_FIELD_MAPPING = "Ambiguous field mapping detected; Both %s and %s map to the same field name %s; Disambiguate using @Field annotation"; + private static final ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); private final String collection; private final String language; - private final StandardEvaluationContext context; - private final @Nullable Expression expression; + private final @Nullable ValueExpression expression; + + private final @Nullable String collation; + private final @Nullable ValueExpression collationExpression; + + private final ShardKey shardKey; /** * Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the @@ -79,73 +87,98 @@ public BasicMongoPersistentEntity(TypeInformation typeInformation) { Class rawType = typeInformation.getType(); String fallback = MongoCollectionUtils.getPreferredCollectionName(rawType); - this.context = new StandardEvaluationContext(); - if (this.isAnnotationPresent(Document.class)) { Document document = this.getRequiredAnnotation(Document.class); this.collection = StringUtils.hasText(document.collection()) ? document.collection() : fallback; this.language = StringUtils.hasText(document.language()) ? document.language() : ""; - this.expression = detectExpression(document); + this.expression = detectExpression(document.collection()); + this.collation = document.collation(); + this.collationExpression = detectExpression(document.collation()); } else { this.collection = fallback; this.language = ""; this.expression = null; + this.collation = null; + this.collationExpression = null; } + + this.shardKey = detectShardKey(); } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) - */ - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + private ShardKey detectShardKey() { + + if (!isAnnotationPresent(Sharded.class)) { + return ShardKey.none(); + } + + Sharded sharded = getRequiredAnnotation(Sharded.class); - context.addPropertyAccessor(new BeanFactoryAccessor()); - context.setBeanResolver(new BeanFactoryResolver(applicationContext)); - context.setRootObject(applicationContext); + String[] keyProperties = sharded.shardKey(); + if (ObjectUtils.isEmpty(keyProperties)) { + keyProperties = new String[] { FieldName.ID.name() }; + } + + ShardKey shardKey = ShardingStrategy.HASH.equals(sharded.shardingStrategy()) ? ShardKey.hash(keyProperties) + : ShardKey.range(keyProperties); + + return sharded.immutableKey() ? ShardKey.immutable(shardKey) : shardKey; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getCollection() - */ + @Override public String getCollection() { - return expression == null ? collection : expression.getValue(context, String.class); + + return expression == null // + ? collection // + : ObjectUtils.nullSafeToString(expression.evaluate(getValueEvaluationContext(null))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getLanguage() - */ @Override public String getLanguage() { return this.language; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#getTextScoreProperty() - */ @Nullable @Override public MongoPersistentProperty getTextScoreProperty() { return getPersistentProperty(TextScore.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentEntity#hasTextScoreProperty() - */ @Override public boolean hasTextScoreProperty() { return getTextScoreProperty() != null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.BasicPersistentEntity#verify() - */ + @Override + public org.springframework.data.mongodb.core.query.Collation getCollation() { + + Object collationValue = collationExpression != null + ? collationExpression.evaluate(getValueEvaluationContext(null)) + : this.collation; + + if (collationValue == null) { + return null; + } + + if (collationValue instanceof org.bson.Document document) { + return org.springframework.data.mongodb.core.query.Collation.from(document); + } + + if (collationValue instanceof org.springframework.data.mongodb.core.query.Collation collation) { + return collation; + } + + return StringUtils.hasText(collationValue.toString()) + ? org.springframework.data.mongodb.core.query.Collation.parse(collationValue.toString()) + : null; + } + + @Override + public ShardKey getShardKey() { + return shardKey; + } + @Override public void verify() { @@ -155,6 +188,26 @@ public void verify() { verifyFieldTypes(); } + @Override + public EvaluationContext getEvaluationContext(Object rootObject) { + return super.getEvaluationContext(rootObject); + } + + @Override + public EvaluationContext getEvaluationContext(Object rootObject, ExpressionDependencies dependencies) { + return super.getEvaluationContext(rootObject, dependencies); + } + + @Override + public ValueEvaluationContext getValueEvaluationContext(Object rootObject) { + return super.getValueEvaluationContext(rootObject); + } + + @Override + public ValueEvaluationContext getValueEvaluationContext(Object rootObject, ExpressionDependencies dependencies) { + return super.getValueEvaluationContext(rootObject, dependencies); + } + private void verifyFieldUniqueness() { AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler(); @@ -172,14 +225,10 @@ private void verifyFieldTypes() { * * @author Oliver Gierke */ - static enum MongoPersistentPropertyComparator implements Comparator { + enum MongoPersistentPropertyComparator implements Comparator { INSTANCE; - /* - * (non-Javadoc) - * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) - */ public int compare(@Nullable MongoPersistentProperty o1, @Nullable MongoPersistentProperty o2) { if (o1 != null && o1.getFieldOrder() == Integer.MAX_VALUE) { @@ -205,12 +254,12 @@ public int compare(@Nullable MongoPersistentProperty o1, @Nullable MongoPersiste * ambiguity a @see {@link MappingException} is thrown. * * @param property - the new id property candidate - * @return + * @return can be {@literal null}. */ @Override protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNull(MongoPersistentProperty property) { - Assert.notNull(property, "MongoPersistentProperty must not be null!"); + Assert.notNull(property, "MongoPersistentProperty must not be null"); if (!property.isIdProperty()) { return null; @@ -220,7 +269,7 @@ protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNul boolean currentIdPropertyIsSet = currentIdProperty != null; @SuppressWarnings("null") - boolean currentIdPropertyIsExplicit = currentIdPropertyIsSet ? currentIdProperty.isExplicitIdProperty() : false; + boolean currentIdPropertyIsExplicit = currentIdPropertyIsSet && currentIdProperty.isExplicitIdProperty(); boolean newIdPropertyIsExplicit = property.isExplicitIdProperty(); if (!currentIdPropertyIsSet) { @@ -234,7 +283,7 @@ protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNul if (newIdPropertyIsExplicit && currentIdPropertyIsExplicit) { throw new MappingException( String.format("Attempt to add explicit id property %s but already have an property %s registered " - + "as explicit id. Check your mapping configuration!", property.getField(), currentIdPropertyField)); + + "as explicit id; Check your mapping configuration", property.getField(), currentIdPropertyField)); } else if (newIdPropertyIsExplicit && !currentIdPropertyIsExplicit) { // explicit id property takes precedence over implicit id property @@ -246,32 +295,28 @@ protected MongoPersistentProperty returnPropertyIfBetterIdPropertyCandidateOrNul } else { throw new MappingException( String.format("Attempt to add id property %s but already have an property %s registered " - + "as id. Check your mapping configuration!", property.getField(), currentIdPropertyField)); + + "as id; Check your mapping configuration", property.getField(), currentIdPropertyField)); } return null; } /** - * Returns a SpEL {@link Expression} frór the collection String expressed in the given {@link Document} annotation if - * present or {@literal null} otherwise. Will also return {@literal null} it the collection {@link String} evaluates - * to a {@link LiteralExpression} (indicating that no subsequent evaluation is necessary). + * Returns a Value {@link Expression} if the given {@link String} is actually an expression that does not evaluate to + * a literal expression (indicating that no subsequent evaluation is necessary). * - * @param document can be {@literal null} - * @return + * @param potentialExpression can be {@literal null} + * @return can be {@literal null}. */ @Nullable - private static Expression detectExpression(Document document) { + private static ValueExpression detectExpression(@Nullable String potentialExpression) { - String collection = document.collection(); - - if (!StringUtils.hasText(collection)) { + if (!StringUtils.hasText(potentialExpression)) { return null; } - Expression expression = PARSER.parseExpression(document.collection(), ParserContext.TEMPLATE_EXPRESSION); - - return expression instanceof LiteralExpression ? null : expression; + ValueExpression expression = PARSER.parse(potentialExpression); + return expression.isLiteral() ? null : expression; } /** @@ -299,24 +344,45 @@ private void assertUniqueness(MongoPersistentProperty property) { MongoPersistentProperty existingProperty = properties.get(fieldName); if (existingProperty != null) { - throw new MappingException( - String.format(AMBIGUOUS_FIELD_MAPPING, property.toString(), existingProperty.toString(), fieldName)); + throw new MappingException(String.format(AMBIGUOUS_FIELD_MAPPING, property, existingProperty, fieldName)); } properties.put(fieldName, property); } } + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getType().getSimpleName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } + /** * @author Christoph Strobl * @since 1.6 */ private static class PropertyTypeAssertionHandler implements PropertyHandler { - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.PropertyHandler#doWithPersistentProperty(org.springframework.data.mapping.PersistentProperty) - */ @Override public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) { @@ -343,9 +409,9 @@ private static void potentiallyAssertDBRefTargetType(MongoPersistentProperty per if (persistentProperty.isDbReference() && persistentProperty.getDBRef().lazy()) { if (persistentProperty.isArray() || Modifier.isFinal(persistentProperty.getActualType().getModifiers())) { - throw new MappingException(String.format( - "Invalid lazy DBRef property for %s. Found %s which must not be an array nor a final class.", - persistentProperty.getField(), persistentProperty.getActualType())); + throw new MappingException( + String.format("Invalid lazy DBRef property for %s; Found %s which must not be an array nor a final class", + persistentProperty.getField(), persistentProperty.getActualType())); } } } @@ -359,7 +425,7 @@ private static void assertPropertyType(MongoPersistentProperty persistentPropert } throw new MappingException( - String.format("Missmatching types for %s. Found %s expected one of %s.", persistentProperty.getField(), + String.format("Mismatching types for %s; Found %s expected one of %s", persistentProperty.getField(), persistentProperty.getActualType(), StringUtils.arrayToCommaDelimitedString(validMatches))); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 59de91d0d6..5c3b4e6532 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,17 @@ */ package org.springframework.data.mongodb.core.mapping; -import java.math.BigInteger; -import java.util.HashSet; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import java.util.Set; -import org.bson.types.ObjectId; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.data.annotation.Id; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueEvaluationContext; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty; @@ -30,7 +33,14 @@ import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.MongoField.MongoFieldBuilder; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -41,37 +51,26 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava */ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty implements MongoPersistentProperty { - private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class); + private static final Log LOG = LogFactory.getLog(BasicMongoPersistentProperty.class); - private static final String ID_FIELD_NAME = "_id"; + public static final String ID_FIELD_NAME = FieldName.ID.name(); private static final String LANGUAGE_FIELD_NAME = "language"; - private static final Set> SUPPORTED_ID_TYPES = new HashSet>(); - private static final Set SUPPORTED_ID_PROPERTY_NAMES = new HashSet(); - - static { - - SUPPORTED_ID_TYPES.add(ObjectId.class); - SUPPORTED_ID_TYPES.add(String.class); - SUPPORTED_ID_TYPES.add(BigInteger.class); - - SUPPORTED_ID_PROPERTY_NAMES.add("id"); - SUPPORTED_ID_PROPERTY_NAMES.add("_id"); - } + private static final Set SUPPORTED_ID_PROPERTY_NAMES = Set.of("id", ID_FIELD_NAME); private final FieldNamingStrategy fieldNamingStrategy; /** * Creates a new {@link BasicMongoPersistentProperty}. * - * @param field - * @param propertyDescriptor - * @param owner - * @param simpleTypeHolder - * @param fieldNamingStrategy + * @param property the source property. + * @param owner the owing entity. + * @param simpleTypeHolder must not be {@literal null}. + * @param fieldNamingStrategy can be {@literal null}. */ public BasicMongoPersistentProperty(Property property, MongoPersistentEntity owner, SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) { @@ -79,17 +78,12 @@ public BasicMongoPersistentProperty(Property property, MongoPersistentEntity super(property, owner, simpleTypeHolder); this.fieldNamingStrategy = fieldNamingStrategy == null ? PropertyNameFieldNamingStrategy.INSTANCE : fieldNamingStrategy; - - if (isIdProperty() && getFieldName() != ID_FIELD_NAME) { - LOG.warn("Customizing field name for id property not allowed! Custom name will not be considered!"); - } } /** * Also considers fields as id that are of supported id type and name. * * @see #SUPPORTED_ID_PROPERTY_NAMES - * @see #SUPPORTED_ID_TYPES */ @Override public boolean isIdProperty() { @@ -103,13 +97,9 @@ public boolean isIdProperty() { return SUPPORTED_ID_PROPERTY_NAMES.contains(getName()) && !hasExplicitFieldName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitIdProperty() - */ @Override public boolean isExplicitIdProperty() { - return isAnnotationPresent(Id.class); + return super.isIdProperty(); } /** @@ -117,31 +107,40 @@ public boolean isExplicitIdProperty() { * * @return */ + @Override public String getFieldName() { + return getMongoField().getName().name(); + } - if (isIdProperty()) { + @Override + public Class getFieldType() { - if (getOwner().getIdProperty() == null) { - return ID_FIELD_NAME; - } + Field fieldAnnotation = findAnnotation(Field.class); - if (getOwner().isIdProperty(this)) { - return ID_FIELD_NAME; + if (!getOwner().isIdProperty(this)) { + + if (fieldAnnotation == null || fieldAnnotation.targetType() == FieldType.IMPLICIT) { + return getType(); } + + return fieldAnnotation.targetType().getJavaClass(); } - if (hasExplicitFieldName()) { - return getAnnotatedFieldName(); + if (fieldAnnotation == null) { + return FieldType.OBJECT_ID.getJavaClass(); } - String fieldName = fieldNamingStrategy.getFieldName(this); + FieldType fieldType = getMongoField().getFieldType(); + if (fieldType == FieldType.IMPLICIT) { - if (!StringUtils.hasText(fieldName)) { - throw new MappingException(String.format("Invalid (null or empty) field name returned for property %s by %s!", - this, fieldNamingStrategy.getClass())); + if (isEntity()) { + return org.bson.Document.class; + } + + return getType(); } - return fieldName; + return fieldType.getJavaClass(); } /** @@ -149,7 +148,8 @@ public String getFieldName() { * {@link org.springframework.data.mongodb.core.mapping.Field#value()} present. * @since 1.7 */ - protected boolean hasExplicitFieldName() { + @Override + public boolean hasExplicitFieldName() { return StringUtils.hasText(getAnnotatedFieldName()); } @@ -162,68 +162,191 @@ private String getAnnotatedFieldName() { return annotation != null ? annotation.value() : null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getFieldOrder() - */ + @Override public int getFieldOrder() { + return getMongoField().getOrder(); + } + + @Override + public boolean writeNullValues() { org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation( org.springframework.data.mongodb.core.mapping.Field.class); - return annotation != null ? annotation.order() : Integer.MAX_VALUE; + return annotation != null && annotation.write() == Field.Write.ALWAYS; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AbstractPersistentProperty#createAssociation() - */ @Override protected Association createAssociation() { - return new Association(this, null); + return new Association<>(this, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isDbReference() - */ + @Override public boolean isDbReference() { return isAnnotationPresent(DBRef.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDBRef() - */ + @Override + public boolean isDocumentReference() { + return isAnnotationPresent(DocumentReference.class); + } + + @Override @Nullable public DBRef getDBRef() { return findAnnotation(DBRef.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isLanguageProperty() - */ + @Nullable + @Override + public DocumentReference getDocumentReference() { + return findAnnotation(DocumentReference.class); + } + @Override public boolean isLanguageProperty() { return getFieldName().equals(LANGUAGE_FIELD_NAME) || isExplicitLanguageProperty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isExplicitLanguageProperty() - */ @Override public boolean isExplicitLanguageProperty() { return isAnnotationPresent(Language.class); - }; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isTextScoreProperty() - */ @Override public boolean isTextScoreProperty() { return isAnnotationPresent(TextScore.class); } + + /** + * Obtain the {@link EvaluationContext} for a specific root object. + * + * @param rootObject can be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + public EvaluationContext getEvaluationContext(@Nullable Object rootObject) { + + if (getOwner() instanceof BasicMongoPersistentEntity mongoPersistentEntity) { + return mongoPersistentEntity.getEvaluationContext(rootObject); + } + return rootObject != null ? new StandardEvaluationContext(rootObject) : new StandardEvaluationContext(); + } + + /** + * Obtain the {@link EvaluationContext} for a specific root object. + * + * @param rootObject can be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + public ValueEvaluationContext getValueEvaluationContext(@Nullable Object rootObject) { + + if (getOwner() instanceof BasicMongoPersistentEntity mongoPersistentEntity) { + return mongoPersistentEntity.getValueEvaluationContext(rootObject); + } + + StandardEvaluationContext standardEvaluationContext = rootObject != null ? new StandardEvaluationContext(rootObject) + : new StandardEvaluationContext(); + + return ValueEvaluationContext.of(new StandardEnvironment(), standardEvaluationContext); + } + + @Override + public MongoField getMongoField() { + return doGetMongoField(); + } + + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getOwner().getType().getSimpleName() + "." + getName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } + + protected MongoField doGetMongoField() { + + MongoFieldBuilder builder = MongoField.builder(); + if (isAnnotationPresent(Field.class) && Type.KEY.equals(findAnnotation(Field.class).nameType())) { + builder.name(doGetFieldName()); + } else { + builder.path(doGetFieldName()); + } + builder.fieldType(doGetFieldType()); + builder.order(doGetFieldOrder()); + return builder.build(); + } + + private String doGetFieldName() { + + if (isIdProperty()) { + + if (getOwner().getIdProperty() == null) { + return ID_FIELD_NAME; + } + + if (getOwner().isIdProperty(this)) { + return ID_FIELD_NAME; + } + } + + if (hasExplicitFieldName()) { + return getAnnotatedFieldName(); + } + + String fieldName = fieldNamingStrategy.getFieldName(this); + + if (!StringUtils.hasText(fieldName)) { + throw new MappingException(String.format("Invalid (null or empty) field name returned for property %s by %s", + this, fieldNamingStrategy.getClass())); + } + + return fieldName; + } + + private FieldType doGetFieldType() { + + Field fieldAnnotation = findAnnotation(Field.class); + return fieldAnnotation != null ? fieldAnnotation.targetType() : FieldType.IMPLICIT; + } + + private int doGetFieldOrder() { + + Field annotation = findAnnotation(Field.class); + return annotation != null ? annotation.order() : Integer.MAX_VALUE; + } + + protected void validate() { + + if (isIdProperty() && hasExplicitFieldName()) { + + String annotatedName = getAnnotatedFieldName(); + if (!ID_FIELD_NAME.equals(annotatedName)) { + if (LOG.isWarnEnabled()) { + LOG.warn(String.format( + "Customizing field name for id property '%s.%s' is not allowed; Custom name ('%s') will not be considered", + getOwner().getName(), getName(), annotatedName)); + } + } + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java index 4b82a9e81f..105c38b288 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,100 +18,131 @@ import org.springframework.data.mapping.model.FieldNamingStrategy; import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.util.Lazy; import org.springframework.lang.Nullable; /** * {@link MongoPersistentProperty} caching access to {@link #isIdProperty()} and {@link #getFieldName()}. * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty { - private @Nullable Boolean isIdProperty; - private @Nullable Boolean isAssociation; - private @Nullable String fieldName; - private @Nullable Boolean usePropertyAccess; - private @Nullable Boolean isTransient; + private final Lazy isEntity = Lazy.of(super::isEntity); + private final Lazy isUnwrapped = Lazy.of(super::isUnwrapped); + private final Lazy isIdProperty = Lazy.of(super::isIdProperty); + private final Lazy isAssociation = Lazy.of(super::isAssociation); + private final Lazy dbref = Lazy.of(super::getDBRef); + private final Lazy fieldName = Lazy.of(super::getFieldName); + private final Lazy hasExplicitFieldName = Lazy.of(super::hasExplicitFieldName); + private final Lazy writeNullValues = Lazy.of(super::writeNullValues); + private final Lazy> fieldType = Lazy.of(super::getFieldType); + private final Lazy usePropertyAccess = Lazy.of(super::usePropertyAccess); + private final Lazy isTransient = Lazy.of(super::isTransient); + private final Lazy mongoField = Lazy.of(super::getMongoField); + private final Lazy isTextScoreProperty = Lazy.of(super::isTextScoreProperty); + private final Lazy isLanguageProperty = Lazy.of(super::isLanguageProperty); + private final Lazy isExplicitLanguageProperty = Lazy.of(super::isExplicitLanguageProperty); + private final Lazy documentReference = Lazy.of(super::getDocumentReference); /** * Creates a new {@link CachingMongoPersistentProperty}. * - * @param field - * @param propertyDescriptor - * @param owner - * @param simpleTypeHolder - * @param fieldNamingStrategy + * @param property must not be {@literal null}. + * @param owner must not be {@literal null}. + * @param simpleTypeHolder must not be {@literal null}. + * @param fieldNamingStrategy can be {@literal null}. */ public CachingMongoPersistentProperty(Property property, MongoPersistentEntity owner, - SimpleTypeHolder simpleTypeHolder, FieldNamingStrategy fieldNamingStrategy) { + SimpleTypeHolder simpleTypeHolder, @Nullable FieldNamingStrategy fieldNamingStrategy) { super(property, owner, simpleTypeHolder, fieldNamingStrategy); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#isIdProperty() - */ @Override - public boolean isIdProperty() { + public boolean isEntity() { + return isEntity.get(); + } - if (this.isIdProperty == null) { - this.isIdProperty = super.isIdProperty(); - } + @Override + public boolean isUnwrapped() { + return isUnwrapped.get(); + } - return this.isIdProperty; + @Override + public boolean isIdProperty() { + return isIdProperty.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#isAssociation() - */ @Override public boolean isAssociation() { - if (this.isAssociation == null) { - this.isAssociation = super.isAssociation(); - } - return this.isAssociation; + return isAssociation.get(); + } + + @Override + public boolean hasExplicitFieldName() { + return hasExplicitFieldName.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldName() - */ @Override public String getFieldName() { + return fieldName.get(); + } - if (this.fieldName == null) { - this.fieldName = super.getFieldName(); - } + @Override + public boolean writeNullValues() { + return writeNullValues.get(); + } - return this.fieldName; + @Override + public Class getFieldType() { + return fieldType.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#usePropertyAccess() - */ @Override public boolean usePropertyAccess() { + return usePropertyAccess.get(); + } + + @Override + public boolean isTransient() { + return isTransient.get(); + } - if (this.usePropertyAccess == null) { - this.usePropertyAccess = super.usePropertyAccess(); - } + @Override + public boolean isTextScoreProperty() { + return isTextScoreProperty.get(); + } - return this.usePropertyAccess; + @Override + public boolean isDbReference() { + return getDBRef() != null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.AnnotationBasedPersistentProperty#isTransient() - */ @Override - public boolean isTransient() { + public DBRef getDBRef() { + return dbref.getNullable(); + } - if (this.isTransient == null) { - this.isTransient = super.isTransient(); - } + @Override + public DocumentReference getDocumentReference() { + return documentReference.getNullable(); + } - return this.isTransient; + @Override + public boolean isLanguageProperty() { + return isLanguageProperty.get(); + } + + @Override + public boolean isExplicitLanguageProperty() { + return isExplicitLanguageProperty.get(); } + + @Override + public MongoField getMongoField() { + return mongoField.get(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java index 088b0a1197..7a861829a4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DBRef.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -39,14 +39,14 @@ /** * The database the referred entity resides in. * - * @return + * @return empty String by default. */ String db() default ""; /** * Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}. * - * @return + * @return {@literal false} by default. */ boolean lazy() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java index 4d31b1e187..ef4980fab6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Document.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.Persistent; +import org.springframework.data.mongodb.core.annotation.Collation; /** * Identifies a domain object to be persisted to MongoDB. @@ -32,6 +33,7 @@ * @author Christoph Strobl */ @Persistent +@Collation @Inherited @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) @@ -60,9 +62,18 @@ /** * Defines the default language to be used with this document. * + * @return an empty String by default. * @since 1.6 - * @return */ String language() default ""; + /** + * Defines the collation to apply when executing a query or creating indexes. + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java new file mode 100644 index 0000000000..90da5dd87d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +/** + * A custom pointer to a linked document to be used along with {@link DocumentReference} for storing the linkage value. + * + * @author Christoph Strobl + * @since 3.3 + */ +@FunctionalInterface +public interface DocumentPointer { + + /** + * The actual pointer value. This can be any simple type, like a {@link String} or {@link org.bson.types.ObjectId} or + * a {@link org.bson.Document} holding more information like the target collection, multiple fields forming the key, + * etc. + * + * @return the value stored in MongoDB and used for constructing the {@link DocumentReference#lookup() lookup query}. + */ + T getPointer(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java new file mode 100644 index 0000000000..5b8a74f40e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java @@ -0,0 +1,132 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.MongoDatabaseFactory; + +/** + * A {@link DocumentReference} allows referencing entities in MongoDB using a flexible schema. While the goal is the + * same as when using {@link DBRef}, the store representation is different. The reference can be anything, a single + * value, an entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the + * mapping layer will use the referenced entities {@literal id} value for storage and retrieval. + * + *
          + * public class Account {
          + *   private String id;
          + *   private Float total;
          + * }
          + *
          + * public class Person {
          + *   private String id;
          + *   @DocumentReference
          + *   private List<Account> accounts;
          + * }
          + *
          + * Account account = ...
          + *
          + * mongoTemplate.insert(account);
          + *
          + * template.update(Person.class)
          + *   .matching(where("id").is(...))
          + *   .apply(new Update().push("accounts").value(account))
          + *   .first();
          + * 
          + * + * {@link #lookup()} allows defining a query filter that is independent from the {@literal _id} field and in combination + * with {@link org.springframework.data.convert.WritingConverter writing converters} offers a flexible way of defining + * references between entities. + * + *
          + * public class Book {
          + * 	private ObjectId id;
          + * 	private String title;
          + *
          + * 	@Field("publisher_ac") @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
          + * }
          + *
          + * public class Publisher {
          + *
          + * 	private ObjectId id;
          + * 	private String acronym;
          + * 	private String name;
          + *
          + * 	@DocumentReference(lazy = true) private List<Book> books;
          + * }
          + *
          + * @WritingConverter
          + * public class PublisherReferenceConverter implements Converter<Publisher, DocumentPointer<String>> {
          + *
          + * 	public DocumentPointer<String> convert(Publisher source) {
          + * 		return () -> source.getAcronym();
          + * 	}
          + * }
          + * 
          + * + * @author Christoph Strobl + * @since 3.3 + * @see MongoDB + * Reference Documentation + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD }) +@Reference +public @interface DocumentReference { + + /** + * The database the referenced entity resides in. Uses the default database provided by + * {@link org.springframework.data.mongodb.MongoDatabaseFactory} if empty. + * + * @see MongoDatabaseFactory#getMongoDatabase() + * @see MongoDatabaseFactory#getMongoDatabase(String) + */ + String db() default ""; + + /** + * The collection the referenced entity resides in. Defaults to the collection of the referenced entity type. + * + * @see MongoPersistentEntity#getCollection() + */ + String collection() default ""; + + /** + * The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property the + * individual lookups are combined via an {@code $or} operator. {@code target} points to the source value (or + * document) stored at the reference property. Properties of {@code target} can be used to define the reference query. + * + * @return an {@literal _id} based lookup. + */ + String lookup() default "{ '_id' : ?#{#target} }"; + + /** + * A specific sort. + */ + String sort() default ""; + + /** + * Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}. + * + * @return {@literal false} by default. + */ + boolean lazy() default false; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java new file mode 100644 index 0000000000..3e169026a9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@link Encrypted} provides data required for MongoDB Client Side Field Level Encryption that is applied during schema + * resolution. It can be applied on top level (typically those types annotated with {@link Document} to provide the + * {@literal encryptMetadata}. + * + *
          + * @Document
          + * @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==")
          + * public class Patient {
          + * 	 private ObjectId id;
          + * 	 private String name;
          + *
          + * 	 @Field("publisher_ac")
          + * 	 @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
          + * }
          + *
          + * "encryptMetadata": {
          + *    "keyId": [
          + *      {
          + *        "$binary": {
          + *          "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
          + *          "subType": "04"
          + *        }
          + *      }
          + *    ]
          + *  }
          + * 
          + * + *
          + * On property level it is used for deriving field specific {@literal encrypt} settings. + * + *
          + * public class Patient {
          + * 	 private ObjectId id;
          + * 	 private String name;
          + *
          + * 	 @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
          + * 	 private String ssn;
          + * }
          + *
          + * "ssn" : {
          + *   "encrypt": {
          + *      "keyId": [
          + *        {
          + *          "$binary": {
          + *            "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
          + *            "subType": "04"
          + *          }
          + *        }
          + *      ],
          + *      "algorithm" : "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic",
          + *      "bsonType" : "string"
          + *    }
          + *  }
          + * 
          + * + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.FIELD }) +public @interface Encrypted { + + /** + * Get the {@code keyId} to use. The value must resolve to either the UUID representation of the key or a base64 + * encoded value representing the UUID value. + *
          + * On {@link ElementType#TYPE} level the {@link #keyId()} can be left empty if explicitly set for fields.
          + * On {@link ElementType#FIELD} level the {@link #keyId()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the key id to use. May contain a parsable {@link org.springframework.expression.Expression expression}. In + * this case the {@code #target} variable will hold the target element name. + */ + String[] keyId() default {}; + + /** + * Set the algorithm to use. + *
          + * On {@link ElementType#TYPE} level the {@link #algorithm()} can be left empty if explicitly set for fields.
          + * On {@link ElementType#FIELD} level the {@link #algorithm()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the encryption algorithm. + * @see org.springframework.data.mongodb.core.EncryptionAlgorithms + */ + String algorithm() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java new file mode 100644 index 0000000000..37d1019f62 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.mongodb.core.convert.encryption.EncryptingConverter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; + +/** + * {@link ExplicitEncrypted} is a {@link ElementType#FIELD field} level {@link ValueConverter} annotation that indicates + * the target element is subject to encryption during the mapping process, in which a given domain type is converted + * into the store specific format. + *

          + * The {@link #value()} attribute, defines the bean type to look up within the + * {@link org.springframework.context.ApplicationContext} to obtain the {@link EncryptingConverter} responsible for the + * actual {@literal en-/decryption} while {@link #algorithm()} and {@link #keyAltName()} can be used to define aspects + * of the encryption process. + * + *

          + * public class Patient {
          + * 	private ObjectId id;
          + * 	private String name;
          + *
          + * 	@ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "secred-key-alternative-name") //
          + * 	private String ssn;
          + * }
          + * 
          + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.1 + * @see ValueConverter + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Encrypted +@ValueConverter +public @interface ExplicitEncrypted { + + /** + * Define the algorithm to use. + *

          + * A {@literal Deterministic} algorithm ensures that a given input value always encrypts to the same output while a + * {@literal randomized} one will produce different results every time. A {@literal range} algorithm allows for + * the value to be queried whilst encrypted. + *

          + * Please make sure to use an algorithm that is in line with MongoDB's encryption rules for simple types, complex + * objects and arrays as well as the query limitations that come with each of them. + * + * @return the string representation of the encryption algorithm to use. + * @see org.springframework.data.mongodb.core.EncryptionAlgorithms + */ + @AliasFor(annotation = Encrypted.class, value = "algorithm") + String algorithm() default ""; + + /** + * Set the {@literal Key Alternate Name} that references the {@literal Data Encryption Key} to be used. + *

          + * An empty String indicates that no alternative key name was configured. + *

          + * It is possible to use the {@literal "/"} character as a prefix to access a particular field value in the same + * domain type. In this case {@code "/name"} references the value of the {@literal name} field. Please note that + * update operations will require the full object to resolve those values. + * + * @return the {@literal Key Alternate Name} if set or an empty {@link String}. + */ + String keyAltName() default ""; + + /** + * The {@link EncryptingConverter} type handling the {@literal en-/decryption} of the annotated property. + * + * @return the configured {@link EncryptingConverter}. A {@link MongoEncryptionConverter} by default. + */ + @AliasFor(annotation = ValueConverter.class, value = "value") + Class value() default MongoEncryptionConverter.class; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java index d7e65310b8..68ff4bb976 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +21,15 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; + /** * Annotation to define custom metadata for document fields. * * @author Oliver Gierke + * @author Christoph Strobl + * @author Divya Srivastava */ @Documented @Retention(RetentionPolicy.RUNTIME) @@ -32,16 +37,75 @@ public @interface Field { /** - * The key to be used to store the field inside the document. + * The key to be used to store the field inside the document. Alias for {@link #name()}. * - * @return + * @return an empty {@link String} by default. + * @see #name() */ + @AliasFor("name") String value() default ""; + /** + * The key to be used to store the field inside the document. Alias for {@link #value()}. The name may contain MongoDB + * special characters like dot ({@literal .}). In this case the name is by default treated as a {@link Type#PATH + * path}. To preserve dots within the name set the {@link #nameType()} attribute to {@link Type#KEY}. + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor("value") + String name() default ""; + + /** + * The used {@link Type type} has impact on how a given {@link #name()} is treated if it contains dot ({@literal .}) + * characters. + * + * @return {@link Type#PATH} by default. + * @since 4.2 + */ + Type nameType() default Type.PATH; + /** * The order in which various fields shall be stored. Has to be a positive integer. * * @return the order the field shall have in the document or -1 if undefined. */ int order() default Integer.MAX_VALUE; + + /** + * The actual desired target type the field should be stored as. + * + * @return {@link FieldType#IMPLICIT} by default. + * @since 2.2 + */ + FieldType targetType() default FieldType.IMPLICIT; + + /** + * Write rules when to include a property value upon conversion. If set to {@link Write#NON_NULL} (default) + * {@literal null} values are not written to the target {@code Document}. Setting the value to {@link Write#ALWAYS} + * explicitly adds an entry for the given field holding {@literal null} as a value {@code 'fieldName' : null }.
          + * NOTE: Setting the value to {@link Write#ALWAYS} may lead to increased document size. + * + * @return {@link Write#NON_NULL} by default. + * @since 3.3 + */ + Write write() default Write.NON_NULL; + + /** + * Enumeration of write strategies to define when a property is included for write conversion. + * + * @since 3.3 + */ + enum Write { + + /** + * Value that indicates that property is to be always included, independent of value of the property. + */ + ALWAYS, + + /** + * Value that indicates that only properties with non-{@literal null} values are to be included. + */ + NON_NULL + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldName.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldName.java new file mode 100644 index 0000000000..2efb50a42f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldName.java @@ -0,0 +1,140 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.springframework.util.ObjectUtils; + +/** + * Value Object representing a field name that should be used to read/write fields within the MongoDB document. + * {@link FieldName Field names} field names may contain special characters (such as dot ({@literal .})) but may be + * treated differently depending on their {@link Type type}. + * + * @author Christoph Strobl + * @since 4.2 + */ +public record FieldName(String name, Type type, String[] parts) { + + public FieldName(String name, Type type) { + this(name, type, name.split("\\.")); + } + + private static final String ID_KEY = "_id"; + + public static final FieldName ID = new FieldName(ID_KEY, Type.KEY); + + /** + * Create a new {@link FieldName} that treats the given {@literal value} as is. + * + * @param value must not be {@literal null}. + * @return new instance of {@link FieldName}. + */ + public static FieldName name(String value) { + return new FieldName(value, Type.KEY); + } + + /** + * Create a new {@link FieldName} that treats the given {@literal value} as a path. If the {@literal value} contains + * dot ({@literal .}) characters, they are considered deliminators in a path. + * + * @param value must not be {@literal null}. + * @return new instance of {@link FieldName}. + */ + public static FieldName path(String value) { + return new FieldName(value, Type.PATH); + } + + /** + * Get the parts the field name consists of. If the {@link FieldName} is a {@link Type#KEY} or a {@link Type#PATH} + * that does not contain dot ({@literal .}) characters an array containing a single element is returned. Otherwise the + * {@link #name()} is split into segments using dot ({@literal .}) as a separator. + * + * @return never {@literal null}. + */ + public String[] parts() { + + if (isKey()) { + return new String[] { name }; + } + + return parts; + } + + /** + * @param type return true if the given {@link Type} is equal to {@link #type()}. + * @return {@literal true} if values are equal. + */ + public boolean isOfType(Type type) { + return ObjectUtils.nullSafeEquals(type(), type); + } + + /** + * @return whether the field name represents a key (i.e. as-is name). + */ + public boolean isKey() { + return isOfType(Type.KEY); + } + + /** + * @return whether the field name represents a path (i.e. dot-path). + */ + public boolean isPath() { + return isOfType(Type.PATH); + } + + @Override + public String toString() { + return "FieldName{%s=%s}".formatted(isKey() ? "key" : "path", name); + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + FieldName fieldName = (FieldName) o; + return ObjectUtils.nullSafeEquals(name, fieldName.name) && type == fieldName.type; + } + + @Override + public int hashCode() { + + int hashCode = ObjectUtils.nullSafeHashCode(name); + return 31 * hashCode + ObjectUtils.nullSafeHashCode(type); + } + + /** + * The {@link FieldName.Type type} defines how to treat a {@link FieldName} that contains special characters. + * + * @author Christoph Strobl + * @since 4.2 + */ + public enum Type { + + /** + * Dot ({@literal .}) characters are treated as separators for segments in a path. + */ + PATH, + + /** + * Values are used as is. + */ + KEY + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java new file mode 100644 index 0000000000..7fc4199dd9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java @@ -0,0 +1,86 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.Date; +import java.util.regex.Pattern; + +import org.bson.types.BSONTimestamp; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; +import org.bson.types.ObjectId; + +/** + * Enumeration of field value types that can be used to represent a {@link org.bson.Document} field value. This + * enumeration contains a subset of {@link org.bson.BsonType} that is supported by the mapping and conversion + * components. + *
          + * Bson types are identified by a {@code byte} {@link #getBsonType() value}. This enumeration typically returns the + * according bson type value except for {@link #IMPLICIT} which is a marker to derive the field type from a property. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + * @see org.bson.BsonType + */ +public enum FieldType { + + /** + * Implicit type that is derived from the property value. + */ + IMPLICIT(-1, Object.class), // + DOUBLE(1, Double.class), // + STRING(2, String.class), // + ARRAY(4, Object[].class), // + BINARY(5, Binary.class), // + OBJECT_ID(7, ObjectId.class), // + BOOLEAN(8, Boolean.class), // + DATE_TIME(9, Date.class), // + PATTERN(11, Pattern.class), // + SCRIPT(13, Code.class), // + INT32(15, Integer.class), // + TIMESTAMP(16, BSONTimestamp.class), // + INT64(17, Long.class), // + DECIMAL128(18, Decimal128.class); + + private final int bsonType; + private final Class javaClass; + + FieldType(int bsonType, Class javaClass) { + + this.bsonType = bsonType; + this.javaClass = javaClass; + } + + /** + * Returns the BSON type identifier. Can be {@code -1} if {@link FieldType} maps to a synthetic Bson type. + * + * @return the BSON type identifier. Can be {@code -1} if {@link FieldType} maps to a synthetic Bson type. + */ + public int getBsonType() { + return bsonType; + } + + /** + * Returns the Java class used to represent the type. + * + * @return the Java class used to represent the type. + */ + public Class getJavaClass() { + return javaClass; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java index bed291dce2..db8cd1790d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Language.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoField.java new file mode 100644 index 0000000000..6f0e1ae4c3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoField.java @@ -0,0 +1,211 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * Value Object for representing a field to read/write within a MongoDB {@link org.bson.Document}. + * + * @author Christoph Strobl + * @since 4.2 + */ +public class MongoField { + + private final FieldName name; + private final FieldType fieldType; + private final int order; + + protected MongoField(FieldName name, Class targetFieldType, int fieldOrder) { + this(name, FieldType.valueOf(targetFieldType.getSimpleName()), fieldOrder); + } + + protected MongoField(FieldName name, FieldType fieldType, int fieldOrder) { + + this.name = name; + this.fieldType = fieldType; + this.order = fieldOrder; + } + + /** + * Create a new {@link MongoField} with given {@literal name}. + * + * @param name the name to be used as is (with all its potentially special characters). + * @return new instance of {@link MongoField}. + */ + public static MongoField fromKey(String name) { + return builder().name(name).build(); + } + + /** + * Create a new {@link MongoField} with given {@literal name}. + * + * @param name the name to be used path expression. + * @return new instance of {@link MongoField}. + */ + public static MongoField fromPath(String name) { + return builder().path(name).build(); + } + + /** + * @return new instance of {@link MongoFieldBuilder}. + */ + public static MongoFieldBuilder builder() { + return new MongoFieldBuilder(); + } + + /** + * @return never {@literal null}. + */ + public FieldName getName() { + return name; + } + + /** + * Get the position of the field within the target document. + * + * @return {@link Integer#MAX_VALUE} if undefined. + */ + public int getOrder() { + return order; + } + + /** + * @param prefix a prefix to the current name. + * @return new instance of {@link MongoField} with prefix appended to current field name. + */ + MongoField withPrefix(String prefix) { + return new MongoField(new FieldName(prefix + name.name(), name.type()), fieldType, order); + } + + /** + * Get the fields target type if defined. + * + * @return never {@literal null}. + */ + public FieldType getFieldType() { + return fieldType; + } + + @Override + public boolean equals(Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + MongoField that = (MongoField) o; + + if (order != that.order) + return false; + if (!ObjectUtils.nullSafeEquals(name, that.name)) { + return false; + } + return fieldType == that.fieldType; + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(name); + result = 31 * result + ObjectUtils.nullSafeHashCode(fieldType); + result = 31 * result + order; + return result; + } + + @Override + public String toString() { + return name.toString(); + } + + /** + * Builder for {@link MongoField}. + */ + public static class MongoFieldBuilder { + + private String name; + private Type nameType = Type.PATH; + private FieldType type = FieldType.IMPLICIT; + private int order = Integer.MAX_VALUE; + + /** + * Configure the field type. + * + * @param fieldType + * @return + */ + public MongoFieldBuilder fieldType(FieldType fieldType) { + + this.type = fieldType; + return this; + } + + /** + * Configure the field name as key. Key field names are used as-is without applying path segmentation splitting + * rules. + * + * @param fieldName + * @return + */ + public MongoFieldBuilder name(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be empty"); + + this.name = fieldName; + this.nameType = Type.KEY; + return this; + } + + /** + * Configure the field name as path. Path field names are applied as paths potentially pointing into subdocuments. + * + * @param path + * @return + */ + public MongoFieldBuilder path(String path) { + + Assert.hasText(path, "Field path (name) must not be empty"); + + this.name = path; + this.nameType = Type.PATH; + return this; + } + + /** + * Configure the field order, defaulting to {@link Integer#MAX_VALUE} (undefined). + * + * @param order + * @return + */ + public MongoFieldBuilder order(int order) { + + this.order = order; + return this; + } + + /** + * Build a new {@link MongoField}. + * + * @return a new {@link MongoField}. + */ + public MongoField build() { + return new MongoField(new FieldName(name, nameType), type, order); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java new file mode 100644 index 0000000000..6e1eb40324 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java @@ -0,0 +1,62 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.Id; + +/** + * {@link MongoId} represents a MongoDB specific {@link Id} annotation that allows customizing {@literal id} conversion. + * Id properties use {@link org.springframework.data.mongodb.core.mapping.FieldType#IMPLICIT} as the default + * {@literal id's} target type. This means that the actual property value is used. No conversion attempts to any other + * type are made.
          + * In contrast to {@link Id @Id}, {@link String} {@literal id's} are stored as the such even when the actual value + * represents a valid {@link org.bson.types.ObjectId#isValid(String) ObjectId hex String}. To trigger {@link String} to + * {@link org.bson.types.ObjectId} conversion use {@link MongoId#targetType() @MongoId(FieldType.OBJECT_ID)}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +@Id +@Field +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +public @interface MongoId { + + /** + * @return the preferred id type. + * @see #targetType() + */ + @AliasFor(annotation = Field.class, attribute="targetType") + FieldType value() default FieldType.IMPLICIT; + + /** + * Get the preferred {@literal _id} type to be used. Defaults to {@link FieldType#IMPLICIT} which uses the property's + * type. If defined different, the given value is attempted to be converted into the desired target type via + * {@link org.springframework.data.mongodb.core.convert.MongoConverter#convertId(Object, Class)}. + * + * @return the preferred {@literal id} type. {@link FieldType#IMPLICIT} by default. + */ + @AliasFor(annotation = Field.class, attribute="targetType") + FieldType targetType() default FieldType.IMPLICIT; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index 5ddb3d8e25..76c0269861 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,6 +26,7 @@ import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.util.NullableWrapperConverters; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -35,14 +36,18 @@ * * @author Jon Brisbin * @author Oliver Gierke + * @author Christoph Strobl */ -public class MongoMappingContext extends AbstractMappingContext, MongoPersistentProperty> +public class MongoMappingContext extends AbstractMappingContext, MongoPersistentProperty> implements ApplicationContextAware { private static final FieldNamingStrategy DEFAULT_NAMING_STRATEGY = PropertyNameFieldNamingStrategy.INSTANCE; private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY; - private @Nullable ApplicationContext context; + private boolean autoIndexCreation = false; + + @Nullable + private ApplicationContext applicationContext; /** * Creates a new {@link MongoMappingContext}. @@ -62,47 +67,75 @@ public void setFieldNamingStrategy(@Nullable FieldNamingStrategy fieldNamingStra this.fieldNamingStrategy = fieldNamingStrategy == null ? DEFAULT_NAMING_STRATEGY : fieldNamingStrategy; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.context.AbstractMappingContext#shouldCreatePersistentEntityFor(org.springframework.data.util.TypeInformation) - */ @Override protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { + + if (NullableWrapperConverters.supports(type.getType())) { + return false; + } + return !MongoSimpleTypes.HOLDER.isSimpleType(type.getType()) && !AbstractMap.class.isAssignableFrom(type.getType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.AbstractMappingContext#createPersistentProperty(java.lang.reflect.Field, java.beans.PropertyDescriptor, org.springframework.data.mapping.MutablePersistentEntity, org.springframework.data.mapping.SimpleTypeHolder) - */ @Override - public MongoPersistentProperty createPersistentProperty(Property property, BasicMongoPersistentEntity owner, + public MongoPersistentProperty createPersistentProperty(Property property, MongoPersistentEntity owner, SimpleTypeHolder simpleTypeHolder) { - return new CachingMongoPersistentProperty(property, owner, simpleTypeHolder, fieldNamingStrategy); + + CachingMongoPersistentProperty cachingMongoPersistentProperty = new CachingMongoPersistentProperty(property, owner, simpleTypeHolder, fieldNamingStrategy); + cachingMongoPersistentProperty.validate(); + return cachingMongoPersistentProperty; } - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.BasicMappingContext#createPersistentEntity(org.springframework.data.util.TypeInformation, org.springframework.data.mapping.model.MappingContext) - */ @Override protected BasicMongoPersistentEntity createPersistentEntity(TypeInformation typeInformation) { + return new BasicMongoPersistentEntity<>(typeInformation); + } - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity(typeInformation); + @Override + public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - if (context != null) { - entity.setApplicationContext(context); - } + this.applicationContext = applicationContext; + super.setApplicationContext(applicationContext); + } - return entity; + /** + * Returns whether auto-index creation is enabled or disabled.
          + * NOTE: Index creation should happen at a well-defined time that is ideally controlled by the + * application itself. + * + * @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise.
          + * INFO: As of 3.x the default will is set to {@literal false} was {@literal true} in 2.x. + * @since 2.2 + * @see org.springframework.data.mongodb.core.index.Indexed + */ + public boolean isAutoIndexCreation() { + return autoIndexCreation; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + /** + * Enables/disables auto-index creation.
          + * NOTE:Index creation should happen at a well-defined time that is ideally controlled by the + * application itself. + * + * @param autoCreateIndexes set to {@literal true} to enable auto-index creation. + * @since 2.2 + * @see org.springframework.data.mongodb.core.index.Indexed */ + public void setAutoIndexCreation(boolean autoCreateIndexes) { + this.autoIndexCreation = autoCreateIndexes; + } + + @Nullable @Override - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { - this.context = applicationContext; + public MongoPersistentEntity getPersistentEntity(MongoPersistentProperty persistentProperty) { + + MongoPersistentEntity entity = super.getPersistentEntity(persistentProperty); + + if (entity == null || !persistentProperty.isUnwrapped()) { + return entity; + } + + return new UnwrappedMongoPersistentEntity<>(entity, new UnwrapEntityContext(persistentProperty)); } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java index 1926412028..e02bd00c8d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,10 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.model.MutablePersistentEntity; import org.springframework.lang.Nullable; /** @@ -24,7 +27,7 @@ * @author Oliver Gierke * @author Christoph Strobl */ -public interface MongoPersistentEntity extends PersistentEntity { +public interface MongoPersistentEntity extends MutablePersistentEntity { /** * Returns the collection the entity shall be persisted to. @@ -36,17 +39,17 @@ public interface MongoPersistentEntity extends PersistentEntity extends PersistentEntity getEncryptionKeyIds(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index 65419d2968..e75ac015aa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,13 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentProperty; +import org.springframework.lang.NonNull; import org.springframework.lang.Nullable; /** @@ -28,6 +31,7 @@ * @author Patryk Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Divya Srivastava */ public interface MongoPersistentProperty extends PersistentProperty { @@ -38,6 +42,22 @@ public interface MongoPersistentProperty extends PersistentProperty getFieldType(); + /** * Returns the order of the field if defined. Will return -1 if undefined. * @@ -45,6 +65,15 @@ public interface MongoPersistentProperty extends PersistentProperty { - - INSTANCE; - - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ - public String convert(MongoPersistentProperty source) { - return source.getFieldName(); - } - } + @Nullable + DocumentReference getDocumentReference(); /** * Returns whether property access shall be used for reading the property value. This means it will use the getter @@ -121,4 +151,54 @@ public String convert(MongoPersistentProperty source) { * @return */ boolean usePropertyAccess(); + + /** + * @return {@literal true} if the property defines an explicit {@link Field#targetType() target type}. + * @since 2.2 + */ + default boolean hasExplicitWriteTarget() { + + Field field = findAnnotation(Field.class); + return field != null && !FieldType.IMPLICIT.equals(field.targetType()); + } + + /** + * @return {@literal true} if the property should be unwrapped. + * @since 3.2 + */ + default boolean isUnwrapped() { + return isEntity() && isAnnotationPresent(Unwrapped.class); + } + + /** + * @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified. + * {@literal null} no {@link Encrypted} annotation found. + * @since 3.3 + */ + Collection getEncryptionKeyIds(); + + /** + * @return the {@link MongoField} representing the raw field to read/write in a MongoDB document. + * @since 4.2 + */ + MongoField getMongoField(); + + /** + * Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name. + * + * @author Oliver Gierke + */ + enum PropertyToFieldNameConverter implements Converter { + + INSTANCE; + + @NonNull + @Override + public String convert(MongoPersistentProperty source) { + if (!source.isUnwrapped()) { + return source.getFieldName(); + } + return ""; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java index 9069e018d2..3b3a520bc3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,59 +16,67 @@ package org.springframework.data.mongodb.core.mapping; import java.math.BigInteger; -import java.util.Collections; -import java.util.HashSet; +import java.time.Instant; import java.util.Set; import java.util.UUID; import java.util.regex.Pattern; -import org.bson.BsonObjectId; +import org.bson.*; import org.bson.types.Binary; +import org.bson.types.Code; import org.bson.types.CodeWScope; +import org.bson.types.CodeWithScope; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; +import org.bson.types.Symbol; import org.springframework.data.mapping.model.SimpleTypeHolder; -import org.springframework.data.mongodb.util.MongoClientVersion; -import org.springframework.util.ClassUtils; import com.mongodb.DBRef; +import com.mongodb.client.model.geojson.Geometry; +import com.mongodb.client.model.geojson.GeometryCollection; +import com.mongodb.client.model.geojson.LineString; +import com.mongodb.client.model.geojson.MultiLineString; +import com.mongodb.client.model.geojson.MultiPoint; +import com.mongodb.client.model.geojson.MultiPolygon; +import com.mongodb.client.model.geojson.Point; +import com.mongodb.client.model.geojson.Polygon; /** * Simple constant holder for a {@link SimpleTypeHolder} enriched with Mongo specific simple types. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ public abstract class MongoSimpleTypes { - public static final Set> AUTOGENERATED_ID_TYPES; + public static final Set> AUTOGENERATED_ID_TYPES = Set.of(ObjectId.class, String.class, BigInteger.class); + private static final Set> MONGO_SIMPLE_TYPES = Set.of(Binary.class, DBRef.class, Decimal128.class, + org.bson.Document.class, Code.class, CodeWScope.class, CodeWithScope.class, ObjectId.class, Pattern.class, + Symbol.class, UUID.class, Instant.class, BinaryVector.class, BsonValue.class, BsonNumber.class, BsonType.class, + BsonArray.class, BsonSymbol.class, BsonUndefined.class, BsonMinKey.class, BsonMaxKey.class, BsonNull.class, + BsonBinary.class, BsonBoolean.class, BsonDateTime.class, BsonDbPointer.class, BsonDecimal128.class, + BsonDocument.class, BsonDouble.class, BsonInt32.class, BsonInt64.class, BsonJavaScript.class, + BsonJavaScriptWithScope.class, BsonObjectId.class, BsonRegularExpression.class, BsonString.class, + BsonTimestamp.class, Geometry.class, GeometryCollection.class, LineString.class, MultiLineString.class, + MultiPoint.class, MultiPolygon.class, Point.class, Polygon.class); - static { - Set> classes = new HashSet>(); - classes.add(ObjectId.class); - classes.add(String.class); - classes.add(BigInteger.class); - AUTOGENERATED_ID_TYPES = Collections.unmodifiableSet(classes); + public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true) { - Set> simpleTypes = new HashSet>(); - simpleTypes.add(DBRef.class); - simpleTypes.add(ObjectId.class); - simpleTypes.add(BsonObjectId.class); - simpleTypes.add(CodeWScope.class); - simpleTypes.add(org.bson.Document.class); - simpleTypes.add(Pattern.class); - simpleTypes.add(Binary.class); - simpleTypes.add(UUID.class); + @Override + public boolean isSimpleType(Class type) { - if (MongoClientVersion.isMongo34Driver()) { - simpleTypes - .add(ClassUtils.resolveClassName("org.bson.types.Decimal128", MongoSimpleTypes.class.getClassLoader())); - } + if (type.isEnum()) { + return true; + } - MONGO_SIMPLE_TYPES = Collections.unmodifiableSet(simpleTypes); - } + if (type.getName().startsWith("java.time")) { + return false; + } - private static final Set> MONGO_SIMPLE_TYPES; - public static final SimpleTypeHolder HOLDER = new SimpleTypeHolder(MONGO_SIMPLE_TYPES, true); + return super.isSimpleType(type); + } + }; private MongoSimpleTypes() {} } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoVector.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoVector.java new file mode 100644 index 0000000000..f7e0d1ee3f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoVector.java @@ -0,0 +1,220 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.bson.BinaryVector; +import org.bson.Float32BinaryVector; +import org.bson.Int8BinaryVector; +import org.bson.PackedBitBinaryVector; + +import org.springframework.data.domain.Vector; +import org.springframework.util.ObjectUtils; + +/** + * MongoDB-specific extension to {@link Vector} based on Mongo's {@link BinaryVector}. Note that only {@code float32} + * and {@code int8} variants can be represented as floating-point numbers. {@code int1} throws + * {@link UnsupportedOperationException} when calling {@link #toFloatArray()} and {@link #toDoubleArray()}. + * + * @author Mark Paluch + * @since 4.5 + */ +public class MongoVector implements Vector { + + private final BinaryVector v; + + MongoVector(BinaryVector v) { + this.v = v; + } + + /** + * Creates a new binary {@link MongoVector} using the given {@link BinaryVector}. + * + * @param v binary vector representation. + * @return the {@link MongoVector} wrapping {@link BinaryVector}. + */ + public static MongoVector of(BinaryVector v) { + return new MongoVector(v); + } + + /** + * Creates a new binary {@link MongoVector} using the given {@code data}. + *

          + * A {@link BinaryVector.DataType#INT8} vector is a vector of 8-bit signed integers where each byte in the vector + * represents an element of a vector, with values in the range {@code [-128, 127]}. + *

          + * NOTE: The byte array is not copied; changes to the provided array will be referenced in the created + * {@code MongoVector} instance. + * + * @param data the byte array representing the {@link BinaryVector.DataType#INT8} vector data. + * @return the {@link MongoVector} containing the given vector values to be represented as binary {@code int8}. + */ + public static MongoVector ofInt8(byte[] data) { + return of(BinaryVector.int8Vector(data)); + } + + /** + * Creates a new binary {@link MongoVector} using the given {@code data}. + *

          + * A {@link BinaryVector.DataType#FLOAT32} vector is a vector of floating-point numbers, where each element in the + * vector is a {@code float}. + *

          + * NOTE: The float array is not copied; changes to the provided array will be referenced in the created + * {@code MongoVector} instance. + * + * @param data the float array representing the {@link BinaryVector.DataType#FLOAT32} vector data. + * @return the {@link MongoVector} containing the given vector values to be represented as binary {@code float32}. + */ + public static MongoVector ofFloat(float... data) { + return of(BinaryVector.floatVector(data)); + } + + /** + * Creates a new binary {@link MongoVector} from the given {@link Vector}. + *

          + * A {@link BinaryVector.DataType#FLOAT32} vector is a vector of floating-point numbers, where each element in the + * vector is a {@code float}. The given {@link Vector} must be able to return a {@link Vector#toFloatArray() float} + * array. + *

          + * NOTE: The float array is not copied; changes to the provided array will be referenced in the created + * {@code MongoVector} instance. + * + * @param v the + * @return the {@link MongoVector} using vector values from the given {@link Vector} to be represented as binary + * float32. + */ + public static MongoVector fromFloat(Vector v) { + return of(BinaryVector.floatVector(v.toFloatArray())); + } + + @Override + public Class getType() { + + if (v instanceof Float32BinaryVector) { + return Float.class; + } + + if (v instanceof Int8BinaryVector) { + return Byte.class; + } + + if (v instanceof PackedBitBinaryVector) { + return Byte.class; + } + + return Number.class; + } + + @Override + public BinaryVector getSource() { + return v; + } + + @Override + public int size() { + + if (v instanceof Float32BinaryVector f) { + return f.getData().length; + } + + if (v instanceof Int8BinaryVector i) { + return i.getData().length; + } + + if (v instanceof PackedBitBinaryVector p) { + return p.getData().length; + } + + return 0; + } + + /** + * {@inheritDoc} + * + * @throws UnsupportedOperationException if the underlying data type is {@code int1} {@link PackedBitBinaryVector}. + */ + @Override + public float[] toFloatArray() { + + if (v instanceof Float32BinaryVector f) { + + float[] result = new float[f.getData().length]; + System.arraycopy(f.getData(), 0, result, 0, result.length); + return result; + } + + if (v instanceof Int8BinaryVector i) { + + byte[] data = i.getData(); + float[] result = new float[data.length]; + for (int j = 0; j < data.length; j++) { + result[j] = data[j]; + } + return result; + } + + throw new UnsupportedOperationException("Cannot return float array for " + v.getClass()); + } + + /** + * {@inheritDoc} + * + * @throws UnsupportedOperationException if the underlying data type is {@code int1} {@link PackedBitBinaryVector}. + */ + @Override + public double[] toDoubleArray() { + + if (v instanceof Float32BinaryVector f) { + + float[] data = f.getData(); + double[] result = new double[data.length]; + for (int i = 0; i < data.length; i++) { + result[i] = data[i]; + } + + return result; + } + + if (v instanceof Int8BinaryVector i) { + + byte[] data = i.getData(); + double[] result = new double[data.length]; + for (int j = 0; j < data.length; j++) { + result[j] = data[j]; + } + return result; + } + + throw new UnsupportedOperationException("Cannot return double array for " + v.getClass()); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof MongoVector that)) { + return false; + } + return ObjectUtils.nullSafeEquals(v, that.v); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(v); + } + + @Override + public String toString() { + return "MV[" + v + "]"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/PersistentPropertyTranslator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/PersistentPropertyTranslator.java new file mode 100644 index 0000000000..d78494d23b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/PersistentPropertyTranslator.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.function.Predicate; + +import org.springframework.data.util.Predicates; +import org.springframework.lang.Nullable; + +/** + * Utility to translate a {@link MongoPersistentProperty} into a corresponding property from a different + * {@link MongoPersistentEntity} by looking it up by name. + *

          + * Mainly used within the framework. + * + * @author Mark Paluch + * @since 3.4 + */ +public class PersistentPropertyTranslator { + + /** + * Translate a {@link MongoPersistentProperty} into a corresponding property from a different + * {@link MongoPersistentEntity}. + * + * @param property must not be {@literal null}. + * @return the translated property. Can be the original {@code property}. + */ + public MongoPersistentProperty translate(MongoPersistentProperty property) { + return property; + } + + /** + * Create a new {@link PersistentPropertyTranslator}. + * + * @param targetEntity must not be {@literal null}. + * @return the property translator to use. + */ + public static PersistentPropertyTranslator create(@Nullable MongoPersistentEntity targetEntity) { + return create(targetEntity, Predicates.isTrue()); + } + + /** + * Create a new {@link PersistentPropertyTranslator} accepting a {@link Predicate filter predicate} whether the + * translation should happen at all. + * + * @param targetEntity must not be {@literal null}. + * @param translationFilter must not be {@literal null}. + * @return the property translator to use. + */ + public static PersistentPropertyTranslator create(@Nullable MongoPersistentEntity targetEntity, + Predicate translationFilter) { + return targetEntity != null ? new EntityPropertyTranslator(targetEntity, translationFilter) + : new PersistentPropertyTranslator(); + } + + private static class EntityPropertyTranslator extends PersistentPropertyTranslator { + + private final MongoPersistentEntity targetEntity; + private final Predicate translationFilter; + + EntityPropertyTranslator(MongoPersistentEntity targetEntity, + Predicate translationFilter) { + this.targetEntity = targetEntity; + this.translationFilter = translationFilter; + } + + @Override + public MongoPersistentProperty translate(MongoPersistentProperty property) { + + if (!translationFilter.test(property)) { + return property; + } + + MongoPersistentProperty targetProperty = targetEntity.getPersistentProperty(property.getName()); + return targetProperty != null ? targetProperty : property; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java new file mode 100644 index 0000000000..a0c67f7187 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Christoph Strobl + * @since 4.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) +public @interface Queryable { + + /** + * @return empty {@link String} if not set. + */ + String queryType() default ""; + + /** + * @return empty {@link String} if not set. + */ + String queryAttributes() default ""; + + /** + * Set the contention factor + * + * @return the contention factor + */ + long contentionFactor() default -1; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java new file mode 100644 index 0000000000..8b2eccb6ca --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java @@ -0,0 +1,57 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Encrypted(algorithm = "Range") +@Queryable(queryType = "range") +public @interface RangeEncrypted { + + /** + * Set the contention factor. + * + * @return the contention factor + */ + @AliasFor(annotation = Queryable.class, value = "contentionFactor") + long contentionFactor() default -1; + + /** + * Set the {@literal range} options. + *

          + * Should be valid extended {@link org.bson.Document#parse(String) JSON} representing the range options and including + * the following values: {@code min}, {@code max}, {@code trimFactor} and {@code sparsity}. + *

          + * Please note that values are data type sensitive and may require proper identification via eg. {@code $numberLong}. + * + * @return the {@link org.bson.Document#parse(String) JSON} representation of range options. + */ + @AliasFor(annotation = Queryable.class, value = "queryAttributes") + String rangeOptions() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardKey.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardKey.java new file mode 100644 index 0000000000..28a114a918 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardKey.java @@ -0,0 +1,148 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Value object representing an entities Shard + * Key used to distribute documents across a sharded MongoDB cluster. + *
          + * {@link ShardKey#isImmutable() Immutable} shard keys indicates a fixed value that is not updated (see + * MongoDB + * Reference: Change a Document's Shard Key Value), which allows to skip server round trips in cases where a + * potential shard key change might have occurred. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class ShardKey { + + private static final ShardKey NONE = new ShardKey(Collections.emptyList(), null, true); + + private final List propertyNames; + private final @Nullable ShardingStrategy shardingStrategy; + private final boolean immutable; + + private ShardKey(List propertyNames, @Nullable ShardingStrategy shardingStrategy, boolean immutable) { + + this.propertyNames = propertyNames; + this.shardingStrategy = shardingStrategy; + this.immutable = immutable; + } + + /** + * @return the number of properties used to form the shard key. + */ + public int size() { + return propertyNames.size(); + } + + /** + * @return the unmodifiable collection of property names forming the shard key. + */ + public Collection getPropertyNames() { + return propertyNames; + } + + /** + * @return {@literal true} if the shard key of an document does not change. + * @see MongoDB + * Reference: Change a Document's Shard Key Value + */ + public boolean isImmutable() { + return immutable; + } + + /** + * Return whether the shard key represents a sharded key. Return {@literal false} if the key is not sharded. + * + * @return {@literal true} if the key is sharded; {@literal false} otherwise. + */ + public boolean isSharded() { + return !propertyNames.isEmpty(); + } + + /** + * Get the raw MongoDB representation of the {@link ShardKey}. + * + * @return never {@literal null}. + */ + public Document getDocument() { + + Document doc = new Document(); + for (String field : propertyNames) { + doc.append(field, shardingValue()); + } + return doc; + } + + private Object shardingValue() { + return ObjectUtils.nullSafeEquals(ShardingStrategy.HASH, shardingStrategy) ? "hash" : 1; + } + + /** + * {@link ShardKey} indicating no shard key has been defined. + * + * @return {@link #NONE} + */ + public static ShardKey none() { + return NONE; + } + + /** + * Create a new {@link ShardingStrategy#RANGE} shard key. + * + * @param propertyNames must not be {@literal null}. + * @return new instance of {@link ShardKey}. + */ + public static ShardKey range(String... propertyNames) { + return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.RANGE, false); + } + + /** + * Create a new {@link ShardingStrategy#RANGE} shard key. + * + * @param propertyNames must not be {@literal null}. + * @return new instance of {@link ShardKey}. + */ + public static ShardKey hash(String... propertyNames) { + return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.HASH, false); + } + + /** + * Turn the given {@link ShardKey} into an {@link #isImmutable() immutable} one. + * + * @param shardKey must not be {@literal null}. + * @return new instance of {@link ShardKey} if the given shard key is not already immutable. + */ + public static ShardKey immutable(ShardKey shardKey) { + + if (shardKey.isImmutable()) { + return shardKey; + } + + return new ShardKey(shardKey.propertyNames, shardKey.shardingStrategy, true); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Sharded.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Sharded.java new file mode 100644 index 0000000000..da537f7948 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Sharded.java @@ -0,0 +1,95 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.Persistent; + +/** + * The {@link Sharded} annotation provides meta information about the actual distribution of data. The + * {@link #shardKey()} is used to distribute documents across shards.
          + * Please see the MongoDB Documentation for more information + * about requirements and limitations of sharding. + *
          + * Spring Data adds the shard key to filter queries used for + * {@link com.mongodb.client.MongoCollection#replaceOne(org.bson.conversions.Bson, Object)} operations triggered by + * {@code save} operations on {@link org.springframework.data.mongodb.core.MongoOperations} and + * {@link org.springframework.data.mongodb.core.ReactiveMongoOperations} as well as {@code update/upsert} operations + * replacing/upserting a single existing document as long as the given + * {@link org.springframework.data.mongodb.core.query.UpdateDefinition} holds a full copy of the entity. + *
          + * All other operations that require the presence of the {@literal shard key} in the filter query need to provide the + * information via the {@link org.springframework.data.mongodb.core.query.Query} parameter when invoking the method. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +@Persistent +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.ANNOTATION_TYPE }) +public @interface Sharded { + + /** + * Alias for {@link #shardKey()}. + * + * @return {@literal _id} by default. + * @see #shardKey() + */ + @AliasFor("shardKey") + String[] value() default {}; + + /** + * The shard key determines the distribution of the collection's documents among the cluster's shards. The shard key + * is either a single or multiple indexed properties that exist in every document in the collection. + *
          + * By default the {@literal id} property is used for sharding.
          + * NOTE: Required indexes are not created automatically. Create these either externally, via + * {@link org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)} + * or by annotating your domain model with {@link org.springframework.data.mongodb.core.index.Indexed}/ + * {@link org.springframework.data.mongodb.core.index.CompoundIndex} along with enabled + * {@link org.springframework.data.mongodb.config.MongoConfigurationSupport#autoIndexCreation() auto index creation}. + * + * @return an empty key by default. Which indicates to use the entities {@literal id} property. + */ + @AliasFor("value") + String[] shardKey() default {}; + + /** + * The sharding strategy to use for distributing data across sharded clusters. + * + * @return {@link ShardingStrategy#RANGE} by default + */ + ShardingStrategy shardingStrategy() default ShardingStrategy.RANGE; + + /** + * As of MongoDB 4.2 it is possible to change the shard key using update. Using immutable shard keys avoids server + * round trips to obtain an entities actual shard key from the database. + * + * @return {@literal false} by default. + * @see MongoDB + * Reference: Change a Document's Shard Key Value + */ + boolean immutableKey() default false; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardingStrategy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardingStrategy.java new file mode 100644 index 0000000000..6fefbf6913 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ShardingStrategy.java @@ -0,0 +1,35 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +/** + * @author Christoph Strobl + * @since 3.0 + */ +public enum ShardingStrategy { + + /** + * Ranged sharding involves dividing data into ranges based on the shard key values. Each chunk is then assigned a + * range based on the shard key values. + */ + RANGE, + + /** + * Hashed Sharding involves computing a hash of the shard key field’s value. Each chunk is then assigned a range based + * on the hashed shard key values. + */ + HASH +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java index 512e2f7e33..349cc191f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TextScore.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,9 +26,11 @@ /** * {@link TextScore} marks the property to be considered as the on server calculated {@literal textScore} when doing * full text search.
          - * NOTE Property will not be written when saving entity. + * NOTE Property will not be written when saving entity and may be {@literal null} if the document is retrieved + * by a regular (i.e. {@literal $text}) query. * * @author Christoph Strobl + * @author Mark Paluch * @since 1.6 */ @ReadOnlyProperty diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java new file mode 100644 index 0000000000..efe0cd8703 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java @@ -0,0 +1,113 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.timeseries.Granularity; + +/** + * Identifies a domain object to be persisted to a MongoDB Time Series collection. + * + * @author Christoph Strobl + * @author Ben Foster + * @since 3.3 + * @see https://docs.mongodb.com/manual/core/timeseries-collections + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@Document +public @interface TimeSeries { + + /** + * The collection the document representing the entity is supposed to be stored in. If not configured, a default + * collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically + * calculate the collection based on a per operation basis. + * + * @return the name of the collection to be used. + * @see Document#collection() + */ + @AliasFor(annotation = Document.class, attribute = "collection") + String collection() default ""; + + /** + * Name of the property which contains the date in each time series document.
          + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. + * + * @return never {@literal null}. + */ + String timeField(); + + /** + * The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor + * {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}.
          + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. + * + * @return empty {@link String} by default. + */ + String metaField() default ""; + + /** + * Select the {@link Granularity granularity} parameter to define how data in the time series collection is organized. + * + * @return {@link Granularity#DEFAULT server default} by default. + */ + Granularity granularity() default Granularity.DEFAULT; + + /** + * Defines the collation to apply when executing a query or creating indexes. + * + * @return an empty {@link String} by default. + * @see Document#collation() + */ + @AliasFor(annotation = Document.class, attribute = "collation") + String collation() default ""; + + /** + * Configure the timeout after which the document should expire. + * Defaults to an empty {@link String} for no expiry. Accepts numeric values followed by their unit of measure: + *

            + *
          • d: Days
          • + *
          • h: Hours
          • + *
          • m: Minutes
          • + *
          • s: Seconds
          • + *
          • Alternatively: A Spring {@literal template expression}. The expression can result in a + * {@link java.time.Duration} or a valid expiration {@link String} according to the already mentioned + * conventions.
          • + *
          + * Supports ISO-8601 style. + * + *
          +	 * @TimeSeries(expireAfter = "10s") String expireAfterTenSeconds;
          +	 * @TimeSeries(expireAfter = "1d") String expireAfterOneDay;
          +	 * @TimeSeries(expireAfter = "P2D") String expireAfterTwoDays;
          +	 * @TimeSeries(expireAfter = "#{@mySpringBean.timeout}") String expireAfterTimeoutObtainedFromSpringBean;
          +	 * @TimeSeries(expireAfter = "${my.property.timeout}") String expireAfterTimeoutObtainedFromProperty;
          +	 * 
          + * + * @return empty by default. + * @since 4.4 + */ + String expireAfter() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java new file mode 100644 index 0000000000..b3b73397ff --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java @@ -0,0 +1,57 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * @author Christoph Strobl + * @author Rogério Meneguelli Gatto + * @since 3.2 + */ +class UnwrapEntityContext { + + private final MongoPersistentProperty property; + + public UnwrapEntityContext(MongoPersistentProperty property) { + this.property = property; + } + + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + public boolean equals(@Nullable Object obj) { + + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnwrapEntityContext that = (UnwrapEntityContext) obj; + return ObjectUtils.nullSafeEquals(property, that.property); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(property); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Unwrapped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Unwrapped.java new file mode 100644 index 0000000000..10a0639fb6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Unwrapped.java @@ -0,0 +1,136 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import javax.annotation.meta.When; + +import org.springframework.core.annotation.AliasFor; + +/** + * The annotation to configure a value object as flattened out in the target document. + *
          + * Depending on the {@link OnEmpty value} of {@link #onEmpty()} the property is set to {@literal null} or an empty + * instance in the case all unwrapped values are {@literal null} when reading from the result set. + * + * @author Christoph Strobl + * @since 3.2 + */ +@Documented +@Retention(value = RetentionPolicy.RUNTIME) +@Target(value = { ElementType.ANNOTATION_TYPE, ElementType.FIELD, ElementType.METHOD }) +public @interface Unwrapped { + + /** + * Set the load strategy for the unwrapped object if all contained fields yield {@literal null} values. + *
          + * {@link Nullable @Unwrapped.Nullable} and {@link Empty @Unwrapped.Empty} offer shortcuts for this. + * + * @return never {@link} null. + */ + OnEmpty onEmpty(); + + /** + * @return prefix for columns in the unwrapped value object. An empty {@link String} by default. + */ + String prefix() default ""; + + /** + * Load strategy to be used {@link Unwrapped#onEmpty()}. + * + * @author Christoph Strobl + */ + enum OnEmpty { + USE_NULL, USE_EMPTY + } + + /** + * Shortcut for a nullable unwrapped property. + * + *
          +	 * @Unwrapped.Nullable private Address address;
          +	 * 
          + * + * as alternative to the more verbose + * + *
          +	 * @Unwrapped(onEmpty = USE_NULL) @javax.annotation.Nonnull(when = When.MAYBE) private Address address;
          +	 * 
          + * + * @author Christoph Strobl + * @see Unwrapped#onEmpty() + */ + @Unwrapped(onEmpty = OnEmpty.USE_NULL) + @Documented + @Retention(RetentionPolicy.RUNTIME) + @Target({ ElementType.FIELD, ElementType.METHOD }) + @javax.annotation.Nonnull(when = When.MAYBE) + @interface Nullable { + + /** + * @return prefix for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String prefix() default ""; + + /** + * @return value for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String value() default ""; + } + + /** + * Shortcut for an empty unwrapped property. + * + *
          +	 * @Unwrapped.Empty private Address address;
          +	 * 
          + * + * as alternative to the more verbose + * + *
          +	 * @Unwrapped(onEmpty = USE_EMPTY) @javax.annotation.Nonnull(when = When.NEVER) private Address address;
          +	 * 
          + * + * @author Christoph Strobl + * @see Unwrapped#onEmpty() + */ + @Unwrapped(onEmpty = OnEmpty.USE_EMPTY) + @Documented + @Retention(RetentionPolicy.RUNTIME) + @Target({ ElementType.FIELD, ElementType.METHOD }) + @javax.annotation.Nonnull(when = When.NEVER) + @interface Empty { + + /** + * @return prefix for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String prefix() default ""; + + /** + * @return value for columns in the unwrapped value object. An empty {@link String} by default. + */ + @AliasFor(annotation = Unwrapped.class, attribute = "prefix") + String value() default ""; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java new file mode 100644 index 0000000000..fed08815b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java @@ -0,0 +1,344 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Annotation; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Spliterator; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import org.springframework.core.env.Environment; +import org.springframework.data.mapping.*; +import org.springframework.data.mapping.model.PersistentPropertyAccessorFactory; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.util.Streamable; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; + +/** + * Unwrapped variant of {@link MongoPersistentEntity}. + * + * @author Christoph Strobl + * @since 3.2 + * @see Unwrapped + */ +class UnwrappedMongoPersistentEntity implements MongoPersistentEntity { + + private final UnwrapEntityContext context; + private final MongoPersistentEntity delegate; + + public UnwrappedMongoPersistentEntity(MongoPersistentEntity delegate, UnwrapEntityContext context) { + + this.context = context; + this.delegate = delegate; + } + + @Override + public String getCollection() { + return delegate.getCollection(); + } + + @Override + public String getLanguage() { + return delegate.getLanguage(); + } + + @Override + @Nullable + public MongoPersistentProperty getTextScoreProperty() { + return delegate.getTextScoreProperty(); + } + + @Override + public boolean hasTextScoreProperty() { + return delegate.hasTextScoreProperty(); + } + + @Override + @Nullable + public Collation getCollation() { + return delegate.getCollation(); + } + + @Override + public boolean hasCollation() { + return delegate.hasCollation(); + } + + @Override + public ShardKey getShardKey() { + return delegate.getShardKey(); + } + + @Override + public boolean isSharded() { + return delegate.isSharded(); + } + + @Override + public String getName() { + return delegate.getName(); + } + + @Override + @Nullable + @Deprecated + public PreferredConstructor getPersistenceConstructor() { + return delegate.getPersistenceConstructor(); + } + + @Override + public InstanceCreatorMetadata getInstanceCreatorMetadata() { + return delegate.getInstanceCreatorMetadata(); + } + + @Override + public boolean isCreatorArgument(PersistentProperty property) { + return delegate.isCreatorArgument(property); + } + + @Override + public boolean isIdProperty(PersistentProperty property) { + return delegate.isIdProperty(property); + } + + @Override + public boolean isVersionProperty(PersistentProperty property) { + return delegate.isVersionProperty(property); + } + + @Override + @Nullable + public MongoPersistentProperty getIdProperty() { + return delegate.getIdProperty(); + } + + @Override + public MongoPersistentProperty getRequiredIdProperty() { + return delegate.getRequiredIdProperty(); + } + + @Override + @Nullable + public MongoPersistentProperty getVersionProperty() { + return delegate.getVersionProperty(); + } + + @Override + public MongoPersistentProperty getRequiredVersionProperty() { + return delegate.getRequiredVersionProperty(); + } + + @Override + @Nullable + public MongoPersistentProperty getPersistentProperty(String name) { + return wrap(delegate.getPersistentProperty(name)); + } + + @Override + public MongoPersistentProperty getRequiredPersistentProperty(String name) { + + MongoPersistentProperty persistentProperty = getPersistentProperty(name); + if (persistentProperty != null) { + return persistentProperty; + } + + throw new IllegalStateException(String.format("Required property %s not found for %s", name, getType())); + } + + @Override + @Nullable + public MongoPersistentProperty getPersistentProperty(Class annotationType) { + return wrap(delegate.getPersistentProperty(annotationType)); + } + + @Override + public Iterable getPersistentProperties(Class annotationType) { + return Streamable.of(delegate.getPersistentProperties(annotationType)).stream().map(this::wrap) + .collect(Collectors.toList()); + } + + @Override + public boolean hasIdProperty() { + return delegate.hasIdProperty(); + } + + @Override + public boolean hasVersionProperty() { + return delegate.hasVersionProperty(); + } + + @Override + public Class getType() { + return delegate.getType(); + } + + @Override + public Alias getTypeAlias() { + return delegate.getTypeAlias(); + } + + @Override + public TypeInformation getTypeInformation() { + return delegate.getTypeInformation(); + } + + @Override + public void doWithProperties(PropertyHandler handler) { + + delegate.doWithProperties((PropertyHandler) property -> { + handler.doWithPersistentProperty(wrap(property)); + }); + } + + @Override + public void doWithProperties(SimplePropertyHandler handler) { + + delegate.doWithProperties((SimplePropertyHandler) property -> { + if (property instanceof MongoPersistentProperty mongoPersistentProperty) { + handler.doWithPersistentProperty(wrap(mongoPersistentProperty)); + } else { + handler.doWithPersistentProperty(property); + } + }); + } + + @Override + public void doWithAssociations(AssociationHandler handler) { + delegate.doWithAssociations(handler); + } + + @Override + public void doWithAssociations(SimpleAssociationHandler handler) { + delegate.doWithAssociations(handler); + } + + @Override + @Nullable + public A findAnnotation(Class annotationType) { + return delegate.findAnnotation(annotationType); + } + + @Override + public A getRequiredAnnotation(Class annotationType) throws IllegalStateException { + return delegate.getRequiredAnnotation(annotationType); + } + + @Override + public boolean isAnnotationPresent(Class annotationType) { + return delegate.isAnnotationPresent(annotationType); + } + + @Override + public PersistentPropertyAccessor getPropertyAccessor(B bean) { + return delegate.getPropertyAccessor(bean); + } + + @Override + public PersistentPropertyPathAccessor getPropertyPathAccessor(B bean) { + return delegate.getPropertyPathAccessor(bean); + } + + @Override + public IdentifierAccessor getIdentifierAccessor(Object bean) { + return delegate.getIdentifierAccessor(bean); + } + + @Override + public boolean isNew(Object bean) { + return delegate.isNew(bean); + } + + @Override + public boolean isImmutable() { + return delegate.isImmutable(); + } + + @Override + public boolean requiresPropertyPopulation() { + return delegate.requiresPropertyPopulation(); + } + + @Override + public Iterator iterator() { + + List target = new ArrayList<>(); + delegate.iterator().forEachRemaining(it -> target.add(wrap(it))); + return target.iterator(); + } + + @Override + public void forEach(Consumer action) { + delegate.forEach(it -> action.accept(wrap(it))); + } + + @Override + public Spliterator spliterator() { + return delegate.spliterator(); + } + + private MongoPersistentProperty wrap(MongoPersistentProperty source) { + if (source == null) { + return source; + } + return new UnwrappedMongoPersistentProperty(source, context); + } + + @Override + public void addPersistentProperty(MongoPersistentProperty property) { + + } + + @Override + public void addAssociation(Association association) { + + } + + @Override + public void verify() throws MappingException { + + } + + @Override + public void setPersistentPropertyAccessorFactory(PersistentPropertyAccessorFactory factory) { + + } + + @Override + public void setEvaluationContextProvider(EvaluationContextProvider provider) { + + } + + @Override + public void setEnvironment(Environment environment) { + + } + + @Override + public boolean isUnwrapped() { + return context.getProperty().isUnwrapped(); + } + + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java new file mode 100644 index 0000000000..1d4877478f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -0,0 +1,387 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.util.Collection; + +import org.springframework.data.mapping.Association; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +/** + * Unwrapped variant of {@link MongoPersistentProperty}. + * + * @author Christoph Strobl + * @author Rogério Meneguelli Gatto + * @since 3.2 + * @see Unwrapped + */ +class UnwrappedMongoPersistentProperty implements MongoPersistentProperty { + + private final MongoPersistentProperty delegate; + private final UnwrapEntityContext context; + + public UnwrappedMongoPersistentProperty(MongoPersistentProperty delegate, UnwrapEntityContext context) { + + this.delegate = delegate; + this.context = context; + } + + @Override + public String getFieldName() { + + if (!context.getProperty().isUnwrapped()) { + return delegate.getFieldName(); + } + + return context.getProperty().findAnnotation(Unwrapped.class).prefix() + delegate.getFieldName(); + } + + @Override + public boolean hasExplicitFieldName() { + return delegate.hasExplicitFieldName() + || !ObjectUtils.isEmpty(context.getProperty().findAnnotation(Unwrapped.class).prefix()); + } + + @Override + public Class getFieldType() { + return delegate.getFieldType(); + } + + @Override + public int getFieldOrder() { + return delegate.getFieldOrder(); + } + + @Override + public boolean writeNullValues() { + return delegate.writeNullValues(); + } + + @Override + public boolean isDbReference() { + return delegate.isDbReference(); + } + + @Override + public boolean isDocumentReference() { + return delegate.isDocumentReference(); + } + + @Override + public boolean isExplicitIdProperty() { + return delegate.isExplicitIdProperty(); + } + + @Override + public boolean isLanguageProperty() { + return delegate.isLanguageProperty(); + } + + @Override + public boolean isExplicitLanguageProperty() { + return delegate.isExplicitLanguageProperty(); + } + + @Override + public boolean isTextScoreProperty() { + return delegate.isTextScoreProperty(); + } + + @Override + @Nullable + public DBRef getDBRef() { + return delegate.getDBRef(); + } + + @Override + @Nullable + public DocumentReference getDocumentReference() { + return delegate.getDocumentReference(); + } + + @Override + public boolean usePropertyAccess() { + return delegate.usePropertyAccess(); + } + + @Override + public boolean hasExplicitWriteTarget() { + return delegate.hasExplicitWriteTarget(); + } + + @Override + public PersistentEntity getOwner() { + return delegate.getOwner(); + } + + @Override + public String getName() { + return delegate.getName(); + } + + @Override + public Class getType() { + return delegate.getType(); + } + + @Override + public MongoField getMongoField() { + + if (!context.getProperty().isUnwrapped()) { + return delegate.getMongoField(); + } + + return delegate.getMongoField().withPrefix(context.getProperty().findAnnotation(Unwrapped.class).prefix()); + } + + @Override + public TypeInformation getTypeInformation() { + return delegate.getTypeInformation(); + } + + @Override + public Iterable> getPersistentEntityTypeInformation() { + return delegate.getPersistentEntityTypeInformation(); + } + + @Override + @Nullable + public Method getGetter() { + return delegate.getGetter(); + } + + @Override + public Method getRequiredGetter() { + return delegate.getRequiredGetter(); + } + + @Override + @Nullable + public Method getSetter() { + return delegate.getSetter(); + } + + @Override + public Method getRequiredSetter() { + return delegate.getRequiredSetter(); + } + + @Override + @Nullable + public Method getWither() { + return delegate.getWither(); + } + + @Override + public Method getRequiredWither() { + return delegate.getRequiredWither(); + } + + @Override + @Nullable + public Field getField() { + return delegate.getField(); + } + + @Override + public Field getRequiredField() { + return delegate.getRequiredField(); + } + + @Override + @Nullable + public String getSpelExpression() { + return delegate.getSpelExpression(); + } + + @Override + @Nullable + public Association getAssociation() { + return delegate.getAssociation(); + } + + @Override + public Association getRequiredAssociation() { + return delegate.getRequiredAssociation(); + } + + @Override + public boolean isEntity() { + return delegate.isEntity(); + } + + @Override + public boolean isIdProperty() { + return delegate.isIdProperty(); + } + + @Override + public boolean isVersionProperty() { + return delegate.isVersionProperty(); + } + + @Override + public boolean isCollectionLike() { + return delegate.isCollectionLike(); + } + + @Override + public boolean isMap() { + return delegate.isMap(); + } + + @Override + public boolean isArray() { + return delegate.isArray(); + } + + @Override + public boolean isTransient() { + return delegate.isTransient(); + } + + @Override + public boolean isWritable() { + return delegate.isWritable(); + } + + @Override + public boolean isReadable() { + return delegate.isReadable(); + } + + @Override + public boolean isImmutable() { + return delegate.isImmutable(); + } + + @Override + public boolean isAssociation() { + return delegate.isAssociation(); + } + + @Override + public boolean isUnwrapped() { + return delegate.isUnwrapped(); + } + + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } + + @Override + @Nullable + public Class getComponentType() { + return delegate.getComponentType(); + } + + @Override + public Class getRawType() { + return delegate.getRawType(); + } + + @Override + @Nullable + public Class getMapValueType() { + return delegate.getMapValueType(); + } + + @Override + public Class getActualType() { + return delegate.getActualType(); + } + + @Override + @Nullable + public A findAnnotation(Class annotationType) { + return delegate.findAnnotation(annotationType); + } + + @Override + public A getRequiredAnnotation(Class annotationType) throws IllegalStateException { + return delegate.getRequiredAnnotation(annotationType); + } + + @Override + @Nullable + public A findPropertyOrOwnerAnnotation(Class annotationType) { + return delegate.findPropertyOrOwnerAnnotation(annotationType); + } + + @Override + public boolean isAnnotationPresent(Class annotationType) { + return delegate.isAnnotationPresent(annotationType); + } + + @Override + public boolean hasActualTypeAnnotation(Class annotationType) { + return delegate.hasActualTypeAnnotation(annotationType); + } + + @Override + @Nullable + public Class getAssociationTargetType() { + return delegate.getAssociationTargetType(); + } + + @Override + public TypeInformation getAssociationTargetTypeInformation() { + return delegate.getAssociationTargetTypeInformation(); + } + + @Override + public PersistentPropertyAccessor getAccessorForOwner(T owner) { + return delegate.getAccessorForOwner(owner); + } + + @Override + public boolean equals(@Nullable Object obj) { + + if (this == obj) { + return true; + } + + if (obj == delegate) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnwrappedMongoPersistentProperty that = (UnwrappedMongoPersistentProperty) obj; + if (!ObjectUtils.nullSafeEquals(delegate, that.delegate)) { + return false; + } + return ObjectUtils.nullSafeEquals(context, that.context); + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(delegate); + result = 31 * result + ObjectUtils.nullSafeHashCode(context); + return result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java index 096343faa5..73f4890dec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractDeleteEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -47,7 +47,7 @@ public AbstractDeleteEvent(Document document, @Nullable Class type, String co /** * Returns the type for which the {@link AbstractDeleteEvent} shall be invoked for. * - * @return + * @return can be {@literal null}. */ @Nullable public Class getType() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java index 4bea58fded..4e1de58c7a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,12 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.context.ApplicationListener; import org.springframework.core.GenericTypeResolver; +import org.springframework.data.mongodb.core.query.SerializationUtils; /** * Base class to implement domain class specific {@link ApplicationListener}s. @@ -30,7 +32,7 @@ */ public abstract class AbstractMongoEventListener implements ApplicationListener> { - private static final Logger LOG = LoggerFactory.getLogger(AbstractMongoEventListener.class); + private static final Log LOG = LogFactory.getLog(AbstractMongoEventListener.class); private final Class domainClass; /** @@ -41,16 +43,11 @@ public AbstractMongoEventListener() { this.domainClass = typeArgument == null ? Object.class : typeArgument; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent) - */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void onApplicationEvent(MongoMappingEvent event) { - if (event instanceof AfterLoadEvent) { - AfterLoadEvent afterLoadEvent = (AfterLoadEvent) event; + if (event instanceof AfterLoadEvent afterLoadEvent) { if (domainClass.isAssignableFrom(afterLoadEvent.getType())) { onAfterLoad((AfterLoadEvent) event); @@ -59,16 +56,16 @@ public void onApplicationEvent(MongoMappingEvent event) { return; } - if (event instanceof AbstractDeleteEvent) { + if (event instanceof AbstractDeleteEvent deleteEvent) { - Class eventDomainType = ((AbstractDeleteEvent) event).getType(); + Class eventDomainType = deleteEvent.getType(); if (eventDomainType != null && domainClass.isAssignableFrom(eventDomainType)) { - if (event instanceof BeforeDeleteEvent) { - onBeforeDelete((BeforeDeleteEvent) event); + if (event instanceof BeforeDeleteEvent beforeDeleteEvent) { + onBeforeDelete(beforeDeleteEvent); } - if (event instanceof AfterDeleteEvent) { - onAfterDelete((AfterDeleteEvent) event); + if (event instanceof AfterDeleteEvent afterDeleteEvent) { + onAfterDelete(afterDeleteEvent); } } @@ -83,14 +80,14 @@ public void onApplicationEvent(MongoMappingEvent event) { return; } - if (event instanceof BeforeConvertEvent) { - onBeforeConvert((BeforeConvertEvent) event); - } else if (event instanceof BeforeSaveEvent) { - onBeforeSave((BeforeSaveEvent) event); - } else if (event instanceof AfterSaveEvent) { - onAfterSave((AfterSaveEvent) event); - } else if (event instanceof AfterConvertEvent) { - onAfterConvert((AfterConvertEvent) event); + if (event instanceof BeforeConvertEvent beforeConvertEvent) { + onBeforeConvert(beforeConvertEvent); + } else if (event instanceof BeforeSaveEvent beforeSaveEvent) { + onBeforeSave(beforeSaveEvent); + } else if (event instanceof AfterSaveEvent afterSaveEvent) { + onAfterSave(afterSaveEvent); + } else if (event instanceof AfterConvertEvent afterConvertEvent) { + onAfterConvert(afterConvertEvent); } } @@ -103,7 +100,7 @@ public void onApplicationEvent(MongoMappingEvent event) { public void onBeforeConvert(BeforeConvertEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onBeforeConvert({})", event.getSource()); + LOG.debug(String.format("onBeforeConvert(%s)", SerializationUtils.serializeToJsonSafely(event.getSource()))); } } @@ -116,7 +113,7 @@ public void onBeforeConvert(BeforeConvertEvent event) { public void onBeforeSave(BeforeSaveEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onBeforeSave({}, {})", event.getSource(), event.getDocument()); + LOG.debug(String.format("onBeforeSave(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } @@ -129,7 +126,7 @@ public void onBeforeSave(BeforeSaveEvent event) { public void onAfterSave(AfterSaveEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterSave({}, {})", event.getSource(), event.getDocument()); + LOG.debug(String.format("onAfterSave(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } @@ -142,7 +139,7 @@ public void onAfterSave(AfterSaveEvent event) { public void onAfterLoad(AfterLoadEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterLoad({})", event.getDocument()); + LOG.debug(String.format("onAfterLoad(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } @@ -155,7 +152,7 @@ public void onAfterLoad(AfterLoadEvent event) { public void onAfterConvert(AfterConvertEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterConvert({}, {})", event.getDocument(), event.getSource()); + LOG.debug(String.format("onAfterConvert(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getDocument()), SerializationUtils.serializeToJsonSafely(event.getSource()))); } } @@ -168,7 +165,7 @@ public void onAfterConvert(AfterConvertEvent event) { public void onAfterDelete(AfterDeleteEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onAfterDelete({})", event.getDocument()); + LOG.debug(String.format("onAfterDelete(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } @@ -181,7 +178,7 @@ public void onAfterDelete(AfterDeleteEvent event) { public void onBeforeDelete(BeforeDeleteEvent event) { if (LOG.isDebugEnabled()) { - LOG.debug("onBeforeDelete({})", event.getDocument()); + LOG.debug(String.format("onBeforeDelete(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument()))); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertCallback.java new file mode 100644 index 0000000000..be6d6fb5e4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertCallback.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked after a domain object is materialized from a {@link Document} when reading results. + * + * @author Roman Puchkovskiy + * @author Mark Paluch + * @since 3.0 + * @see org.springframework.data.mapping.callback.EntityCallbacks + */ +@FunctionalInterface +public interface AfterConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the + * same or a modified instance of the domain object. + * + * @param entity the domain object (the result of the conversion). + * @param document must not be {@literal null}. + * @param collection name of the collection. + * @return the domain object that is the result of reading it from the {@link Document}. + */ + T onAfterConvert(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java index 9c4dba5a21..9421e9184a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterConvertEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java index 2bd7fc4259..55ccaa5f3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterDeleteEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,7 @@ /** * Event being thrown after a single or a set of documents has/have been deleted. The {@link Document} held in the event - * will be the query document after it has been mapped onto the domain type handled. + * will be the query document after it has been mapped onto the domain type handled. * * @author Martin Baumgartner * @author Christoph Strobl @@ -33,7 +33,7 @@ public class AfterDeleteEvent extends AbstractDeleteEvent { /** * Creates a new {@link AfterDeleteEvent} for the given {@link Document}, type and collectionName. * - * @param dbo must not be {@literal null}. + * @param document must not be {@literal null}. * @param type may be {@literal null}. * @param collectionName must not be {@literal null}. * @since 1.8 diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java index b58f52dbc8..bd808bfecf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterLoadEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -44,14 +44,14 @@ public AfterLoadEvent(Document document, Class type, String collectionName) { super(document, document, collectionName); - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); this.type = type; } /** * Returns the type for which the {@link AfterLoadEvent} shall be invoked for. * - * @return + * @return never {@literal null}. */ public Class getType() { return type; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveCallback.java new file mode 100644 index 0000000000..3489cfce03 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveCallback.java @@ -0,0 +1,40 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Entity callback triggered after save of a {@link Document}. + * + * @author Roman Puchkovskiy + * @since 3.0 + */ +@FunctionalInterface +public interface AfterSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of + * the domain object. + * + * @param entity the domain object that was saved. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return the domain object that was persisted. + */ + T onAfterSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java index 44584c5e3d..a2786ff011 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallback.java similarity index 54% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListener.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallback.java index 5b72f81cf9..df5ecc1e92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2019-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,53 +15,41 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import java.util.Optional; - import org.springframework.beans.factory.ObjectFactory; -import org.springframework.context.ApplicationListener; import org.springframework.core.Ordered; import org.springframework.data.auditing.AuditingHandler; import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.mapping.callback.EntityCallback; import org.springframework.data.mapping.context.MappingContext; import org.springframework.util.Assert; /** - * Event listener to populate auditing related fields on an entity about to be saved. + * {@link EntityCallback} to populate auditing related fields on an entity about to be saved. * - * @author Oliver Gierke - * @author Thomas Darimont + * @author Mark Paluch + * @since 2.2 */ -public class AuditingEventListener implements ApplicationListener>, Ordered { +public class AuditingEntityCallback implements BeforeConvertCallback, Ordered { private final ObjectFactory auditingHandlerFactory; /** - * Creates a new {@link AuditingEventListener} using the given {@link MappingContext} and {@link AuditingHandler} + * Creates a new {@link AuditingEntityCallback} using the given {@link MappingContext} and {@link AuditingHandler} * provided by the given {@link ObjectFactory}. * * @param auditingHandlerFactory must not be {@literal null}. */ - public AuditingEventListener(ObjectFactory auditingHandlerFactory) { + public AuditingEntityCallback(ObjectFactory auditingHandlerFactory) { - Assert.notNull(auditingHandlerFactory, "IsNewAwareAuditingHandler must not be null!"); + Assert.notNull(auditingHandlerFactory, "IsNewAwareAuditingHandler must not be null"); this.auditingHandlerFactory = auditingHandlerFactory; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent) - */ @Override - public void onApplicationEvent(BeforeConvertEvent event) { - - Optional.ofNullable(event.getSource())// - .ifPresent(it -> auditingHandlerFactory.getObject().markAudited(it)); + public Object onBeforeConvert(Object entity, String collection) { + return auditingHandlerFactory.getObject().markAudited(entity); } - /* - * (non-Javadoc) - * @see org.springframework.core.Ordered#getOrder() - */ @Override public int getOrder() { return 100; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeanValidationDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeanValidationDelegate.java new file mode 100644 index 0000000000..91107834f3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeanValidationDelegate.java @@ -0,0 +1,72 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolation; +import jakarta.validation.Validator; + +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.springframework.util.Assert; + +/** + * Delegate to handle common calls to Bean {@link Validator Validation}. + * + * @author Mark Paluch + * @since 4.5 + */ +class BeanValidationDelegate { + + private static final Log LOG = LogFactory.getLog(BeanValidationDelegate.class); + + private final Validator validator; + + /** + * Creates a new {@link BeanValidationDelegate} using the given {@link Validator}. + * + * @param validator must not be {@literal null}. + */ + public BeanValidationDelegate(Validator validator) { + Assert.notNull(validator, "Validator must not be null"); + this.validator = validator; + } + + /** + * Validate the given object. + * + * @param object + * @return set of constraint violations. + */ + public Set> validate(Object object) { + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Validating object: %s", object)); + } + + Set> violations = validator.validate(object); + + if (!violations.isEmpty()) { + if (LOG.isDebugEnabled()) { + LOG.info(String.format("During object: %s validation violations found: %s", object, violations)); + } + } + + return violations; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertCallback.java new file mode 100644 index 0000000000..3315a1d360 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertCallback.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked before a domain object is converted to be persisted. + * + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mapping.callback.EntityCallbacks + */ +@FunctionalInterface +public interface BeforeConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is converted to be persisted. Can return either the same or a + * modified instance of the domain object. + * + * @param entity the domain object to save. + * @param collection name of the collection. + * @return the domain object to be persisted. + */ + T onBeforeConvert(T entity, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java index cb425f6f94..33d992d9ab 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeConvertEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java index d3dceb578c..49d509fb43 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeDeleteEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveCallback.java new file mode 100644 index 0000000000..27ea5dce69 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveCallback.java @@ -0,0 +1,46 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Entity callback triggered before save of a document. + * + * @author Mark Paluch + * @author Michael J. Simons + * @since 2.2 + * @see org.springframework.data.mapping.callback.EntityCallbacks + */ +@FunctionalInterface +public interface BeforeSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is saved. Can return either the same or a modified instance + * of the domain object and can modify {@link Document} contents. This method is called after converting the + * {@code entity} to a {@link Document} so effectively the document is used as outcome of invoking this callback. + * Changes to the domain object are not taken into account for saving, only changes to the document. Only transient + * fields of the entity should be changed in this callback. To change persistent the entity before being converted, + * use the {@link BeforeConvertCallback}. + * + * @param entity the domain object to save. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return the domain object to be persisted. + */ + T onBeforeSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java index 6c053d9a6a..2aa3317f9e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/BeforeSaveEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java index 09c3925d89..c36e33e777 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,8 +17,9 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.context.ApplicationListener; /** @@ -31,68 +32,54 @@ */ public class LoggingEventListener extends AbstractMongoEventListener { - private static final Logger LOGGER = LoggerFactory.getLogger(LoggingEventListener.class); + private static final Log LOGGER = LogFactory.getLog(LoggingEventListener.class); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeConvert(org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent) - */ @Override public void onBeforeConvert(BeforeConvertEvent event) { - LOGGER.info("onBeforeConvert: {}", event.getSource()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onBeforeConvert: %s", event.getSource())); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent) - */ @Override public void onBeforeSave(BeforeSaveEvent event) { - LOGGER.info("onBeforeSave: {}, {}", event.getSource(), serializeToJsonSafely(event.getDocument())); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onBeforeSave: %s, %s", event.getSource(), serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterSave(org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent) - */ @Override public void onAfterSave(AfterSaveEvent event) { - LOGGER.info("onAfterSave: {}, {}", event.getSource(), serializeToJsonSafely(event.getDocument())); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterSave: %s, %s", event.getSource(), serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterLoad(org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent) - */ @Override public void onAfterLoad(AfterLoadEvent event) { - LOGGER.info("onAfterLoad: {}", serializeToJsonSafely(event.getDocument())); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterLoad: %s", serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterConvert(org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent) - */ @Override public void onAfterConvert(AfterConvertEvent event) { - LOGGER.info("onAfterConvert: {}, {}", serializeToJsonSafely(event.getDocument()), event.getSource()); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterConvert: %s, %s", serializeToJsonSafely(event.getDocument()), event.getSource())); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onAfterDelete(org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent) - */ @Override public void onAfterDelete(AfterDeleteEvent event) { - LOGGER.info("onAfterDelete: {}", serializeToJsonSafely(event.getDocument())); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onAfterDelete: %s", serializeToJsonSafely(event.getDocument()))); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeDelete(org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent) - */ @Override public void onBeforeDelete(BeforeDeleteEvent event) { - LOGGER.info("onBeforeDelete: {}", serializeToJsonSafely(event.getDocument())); + if(LOGGER.isInfoEnabled()) { + LOGGER.info(String.format("onBeforeDelete: %s", serializeToJsonSafely(event.getDocument()))); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java index a59ab657f9..eec9a3edf1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/MongoMappingEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.mapping.event; +import java.util.function.Function; + import org.bson.Document; import org.springframework.context.ApplicationEvent; import org.springframework.lang.Nullable; @@ -63,13 +65,24 @@ public MongoMappingEvent(T source, @Nullable Document document, @Nullable String return collectionName; } - /* - * (non-Javadoc) - * @see java.util.EventObject#getSource() - */ @SuppressWarnings({ "unchecked" }) @Override public T getSource() { return (T) super.getSource(); } + + /** + * Allows client code to change the underlying source instance by applying the given {@link Function}. + * + * @param mapper the {@link Function} to apply, will only be applied if the source is not {@literal null}. + * @since 2.1 + */ + final void mapSource(Function mapper) { + + if (source == null) { + return; + } + + this.source = mapper.apply(getSource()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterConvertCallback.java new file mode 100644 index 0000000000..5dc1f7c69f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterConvertCallback.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked after a domain object is materialized from a {@link Document} when reading results. + * + * @author Roman Puchkovskiy + * @author Mark Paluch + * @since 3.0 + * @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveAfterConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the + * same or a modified instance of the domain object. + * + * @param entity the domain object (the result of the conversion). + * @param document must not be {@literal null}. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object that is the result of reading it from the {@link Document}. + */ + Publisher onAfterConvert(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterSaveCallback.java new file mode 100644 index 0000000000..4e8302d84c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAfterSaveCallback.java @@ -0,0 +1,44 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; + +/** + * Entity callback triggered after save of a {@link Document}. + * + * @author Roman Puchkovskiy + * @since 3.0 + * @see ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveAfterSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of + * the domain object. + * + * @param entity the domain object that was saved. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object to be returned to the caller. + */ + Publisher onAfterSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAuditingEntityCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAuditingEntityCallback.java new file mode 100644 index 0000000000..62fe054145 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveAuditingEntityCallback.java @@ -0,0 +1,59 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.reactivestreams.Publisher; + +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.core.Ordered; +import org.springframework.data.auditing.AuditingHandler; +import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.util.Assert; + +/** + * Reactive {@link EntityCallback} to populate auditing related fields on an entity about to be saved. + * + * @author Mark Paluch + * @since 2.2 + */ +public class ReactiveAuditingEntityCallback implements ReactiveBeforeConvertCallback, Ordered { + + private final ObjectFactory auditingHandlerFactory; + + /** + * Creates a new {@link ReactiveAuditingEntityCallback} using the given {@link MappingContext} and + * {@link AuditingHandler} provided by the given {@link ObjectFactory}. + * + * @param auditingHandlerFactory must not be {@literal null}. + */ + public ReactiveAuditingEntityCallback(ObjectFactory auditingHandlerFactory) { + + Assert.notNull(auditingHandlerFactory, "IsNewAwareAuditingHandler must not be null"); + this.auditingHandlerFactory = auditingHandlerFactory; + } + + @Override + public Publisher onBeforeConvert(Object entity, String collection) { + return auditingHandlerFactory.getObject().markAudited(entity); + } + + @Override + public int getOrder() { + return 100; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeConvertCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeConvertCallback.java new file mode 100644 index 0000000000..842c734744 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeConvertCallback.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Callback being invoked before a domain object is converted to be persisted. + * + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveBeforeConvertCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is converted to be persisted. Can return either the same of a + * modified instance of the domain object. + * + * @param entity the domain object to save. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object to be persisted. + */ + Publisher onBeforeConvert(T entity, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeSaveCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeSaveCallback.java new file mode 100644 index 0000000000..e353cb8ecf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveBeforeSaveCallback.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mapping.callback.EntityCallback; + +/** + * Entity callback triggered before save of a document. + * + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mapping.callback.ReactiveEntityCallbacks + */ +@FunctionalInterface +public interface ReactiveBeforeSaveCallback extends EntityCallback { + + /** + * Entity callback method invoked before a domain object is saved. Can return either the same or a modified instance + * of the domain object and can modify {@link Document} contents. This method is called after converting the + * {@code entity} to {@link Document} so effectively the document is used as outcome of invoking this callback. + * + * @param entity the domain object to save. + * @param document {@link Document} representing the {@code entity}. + * @param collection name of the collection. + * @return a {@link Publisher} emitting the domain object to be persisted. + */ + Publisher onBeforeSave(T entity, Document document, String collection); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallback.java new file mode 100644 index 0000000000..7011da90b4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallback.java @@ -0,0 +1,69 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validator; +import reactor.core.publisher.Mono; + +import java.util.Set; + +import org.bson.Document; + +import org.springframework.core.Ordered; + +/** + * Reactive variant of JSR-303 dependant entities validator. + *

          + * When it is registered as Spring component its automatically invoked after object to {@link Document} conversion and + * before entities are saved to the database. + * + * @author Mark Paluch + * @author Rene Felgenträger + * @since 4.5 + */ +public class ReactiveValidatingEntityCallback implements ReactiveBeforeSaveCallback, Ordered { + + private final BeanValidationDelegate delegate; + + /** + * Creates a new {@link ReactiveValidatingEntityCallback} using the given {@link Validator}. + * + * @param validator must not be {@literal null}. + */ + public ReactiveValidatingEntityCallback(Validator validator) { + this.delegate = new BeanValidationDelegate(validator); + } + + @Override + public Mono onBeforeSave(Object entity, Document document, String collection) { + + Set> violations = delegate.validate(entity); + + if (!violations.isEmpty()) { + return Mono.error(new ConstraintViolationException(violations)); + } + + return Mono.just(entity); + } + + @Override + public int getOrder() { + return 100; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallback.java new file mode 100644 index 0000000000..260652616e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallback.java @@ -0,0 +1,68 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validator; + +import java.util.Set; + +import org.bson.Document; + +import org.springframework.core.Ordered; + +/** + * JSR-303 dependant entities validator. + *

          + * When it is registered as Spring component its automatically invoked after object to {@link Document} conversion and + * before entities are saved to the database. + * + * @author Rene Felgenträger + * @author Mark Paluch + * @since 4.5 + */ +public class ValidatingEntityCallback implements BeforeSaveCallback, Ordered { + + private final BeanValidationDelegate delegate; + + /** + * Creates a new {@link ValidatingEntityCallback} using the given {@link Validator}. + * + * @param validator must not be {@literal null}. + */ + public ValidatingEntityCallback(Validator validator) { + this.delegate = new BeanValidationDelegate(validator); + } + + @Override + public Object onBeforeSave(Object entity, Document document, String collection) { + + Set> violations = delegate.validate(entity); + + if (!violations.isEmpty()) { + throw new ConstraintViolationException(violations); + } + + return entity; + } + + @Override + public int getOrder() { + return 100; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java index 6e2344f81d..1854c486f8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,30 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import java.util.Set; +import jakarta.validation.ConstraintViolation; +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validator; -import javax.validation.ConstraintViolationException; -import javax.validation.Validator; +import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.util.Assert; +import org.bson.Document; /** - * javax.validation dependant entities validator. When it is registered as Spring component its automatically invoked - * before entities are saved in database. + * JSR-303 dependant entities validator. + *

          + * When it is registered as Spring component its automatically invoked after object to {@link Document} conversion and + * before entities are saved to the database. * * @author Maciej Walkowiak * @author Oliver Gierke * @author Christoph Strobl + * @deprecated since 4.5, use {@link ValidatingEntityCallback} respectively {@link ReactiveValidatingEntityCallback} + * instead to ensure ordering and interruption of saving when encountering validation constraint violations. */ +@Deprecated(since = "4.5") public class ValidatingMongoEventListener extends AbstractMongoEventListener { - private static final Logger LOG = LoggerFactory.getLogger(ValidatingMongoEventListener.class); - - private final Validator validator; + private final BeanValidationDelegate delegate; /** * Creates a new {@link ValidatingMongoEventListener} using the given {@link Validator}. @@ -44,26 +46,17 @@ public class ValidatingMongoEventListener extends AbstractMongoEventListener event) { - LOG.debug("Validating object: {}", event.getSource()); - Set violations = validator.validate(event.getSource()); + Set> violations = delegate.validate(event.getSource()); if (!violations.isEmpty()) { - - LOG.info("During object: {} validation violations found: {}", event.getSource(), violations); throw new ConstraintViolationException(violations); } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupBy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupBy.java deleted file mode 100644 index c1a96d851a..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupBy.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright 2010-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapreduce; - -import java.util.Optional; - -import org.bson.Document; -import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.lang.Nullable; - -/** - * Collects the parameters required to perform a group operation on a collection. The query condition and the input - * collection are specified on the group method as method arguments to be consistent with other operations, e.g. - * map-reduce. - * - * @author Mark Pollack - * @author Christoph Strobl - * @author Mark Paluch - */ -public class GroupBy { - - private @Nullable Document initialDocument; - private @Nullable String reduce; - - private Optional keys = Optional.empty(); - private Optional keyFunction = Optional.empty(); - private Optional initial = Optional.empty(); - private Optional finalize = Optional.empty(); - private Optional collation = Optional.empty(); - - public GroupBy(String... keys) { - - Document document = new Document(); - for (String key : keys) { - document.put(key, 1); - } - - this.keys = Optional.of(document); - } - - // NOTE GroupByCommand does not handle keyfunction. - - public GroupBy(@Nullable String key, boolean isKeyFunction) { - - Document document = new Document(); - if (isKeyFunction) { - keyFunction = Optional.ofNullable(key); - } else { - document.put(key, 1); - keys = Optional.of(document); - } - } - - /** - * Create new {@link GroupBy} with the field to group. - * - * @param key - * @return - */ - public static GroupBy keyFunction(String key) { - return new GroupBy(key, true); - } - - /** - * Create new {@link GroupBy} with the fields to group. - * - * @param keys - * @return - */ - public static GroupBy key(String... keys) { - return new GroupBy(keys); - } - - /** - * Define the aggregation result document. - * - * @param initialDocument can be {@literal null}. - * @return - */ - public GroupBy initialDocument(@Nullable String initialDocument) { - - initial = Optional.ofNullable(initialDocument); - return this; - } - - /** - * Define the aggregation result document. - * - * @param initialDocument can be {@literal null}. - * @return - */ - public GroupBy initialDocument(@Nullable Document initialDocument) { - - this.initialDocument = initialDocument; - return this; - } - - /** - * Define the aggregation function that operates on the documents during the grouping operation - * - * @param reduceFunction - * @return - */ - public GroupBy reduceFunction(String reduceFunction) { - - reduce = reduceFunction; - return this; - } - - /** - * Define the function that runs each item in the result set before db.collection.group() returns the final value. - * - * @param finalizeFunction - * @return - */ - public GroupBy finalizeFunction(@Nullable String finalizeFunction) { - - finalize = Optional.ofNullable(finalizeFunction); - return this; - } - - /** - * Define the Collation specifying language-specific rules for string comparison. - * - * @param collation can be {@literal null}. - * @return - * @since 2.0 - */ - public GroupBy collation(@Nullable Collation collation) { - - this.collation = Optional.ofNullable(collation); - return this; - } - - /** - * Get the {@link Document} representation of the {@link GroupBy}. - * - * @return - */ - public Document getGroupByObject() { - - Document document = new Document(); - - keys.ifPresent(val -> document.append("key", val)); - keyFunction.ifPresent(val -> document.append("$keyf", val)); - - document.put("$reduce", reduce); - document.put("initial", initialDocument); - - initial.ifPresent(val -> document.append("initial", val)); - finalize.ifPresent(val -> document.append("finalize", val)); - collation.ifPresent(val -> document.append("collation", val.toDocument())); - - return document; - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupByResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupByResults.java deleted file mode 100644 index 9dd913d86f..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/GroupByResults.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapreduce; - -import java.util.Iterator; -import java.util.List; - -import org.bson.Document; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; - -/** - * Collects the results of executing a group operation. - * - * @author Mark Pollack - * @author Christoph Strobl - * @author Mark Paluch - * @param The class in which the results are mapped onto, accessible via an {@link Iterator}. - */ -public class GroupByResults implements Iterable { - - private final List mappedResults; - private final Document rawResults; - - private double count; - private int keys; - private @Nullable String serverUsed; - - public GroupByResults(List mappedResults, Document rawResults) { - - Assert.notNull(mappedResults, "List of mapped results must not be null!"); - Assert.notNull(rawResults, "Raw results must not be null!"); - - this.mappedResults = mappedResults; - this.rawResults = rawResults; - - parseKeys(); - parseCount(); - parseServerUsed(); - } - - public double getCount() { - return count; - } - - public int getKeys() { - return keys; - } - - @Nullable - public String getServerUsed() { - return serverUsed; - } - - public Iterator iterator() { - return mappedResults.iterator(); - } - - public Document getRawResults() { - return rawResults; - } - - private void parseCount() { - - Object object = rawResults.get("count"); - if (object instanceof Number) { - count = ((Number) object).doubleValue(); - } - - } - - private void parseKeys() { - - Object object = rawResults.get("keys"); - if (object instanceof Number) { - keys = ((Number) object).intValue(); - } - } - - private void parseServerUsed() { - - // "serverUsed" : "127.0.0.1:27017" - Object object = rawResults.get("serverUsed"); - if (object instanceof String) { - serverUsed = (String) object; - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java index 86291bbe6a..32a9ed5118 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCounts.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,16 @@ */ package org.springframework.data.mongodb.core.mapreduce; +import org.springframework.lang.Nullable; + /** * Value object to encapsulate results of a map-reduce count. * * @author Mark Pollack * @author Oliver Gierke + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ +@Deprecated public class MapReduceCounts { public static final MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1); @@ -55,20 +59,12 @@ public long getOutputCount() { return outputCount; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return "MapReduceCounts [inputCount=" + inputCount + ", emitCount=" + emitCount + ", outputCount=" + outputCount + "]"; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -82,12 +78,8 @@ public int hashCode() { return Long.valueOf(result).intValue(); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java index 0f9ce1c32e..9f34ec44e4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,22 +23,24 @@ import org.springframework.data.mongodb.core.query.Collation; import org.springframework.lang.Nullable; -import com.mongodb.MapReduceCommand; +import com.mongodb.client.model.MapReduceAction; /** * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ +@Deprecated public class MapReduceOptions { private @Nullable String outputCollection; private Optional outputDatabase = Optional.empty(); - private MapReduceCommand.OutputType outputType = MapReduceCommand.OutputType.REPLACE; - private Map scopeVariables = new HashMap(); - private Map extraOptions = new HashMap(); + private @Nullable MapReduceAction mapReduceAction = MapReduceAction.REPLACE; + private Map scopeVariables = new HashMap<>(); + private Map extraOptions = new HashMap<>(); private @Nullable Boolean jsMode; private Boolean verbose = Boolean.TRUE; private @Nullable Integer limit; @@ -100,23 +102,26 @@ public MapReduceOptions outputDatabase(@Nullable String outputDatabase) { * results of the map-reduce will be returned within the result object. Note that this option is possible only when * the result set fits within the 16MB limit of a single document. * - * @return MapReduceOptions so that methods can be chained in a fluent API style + * @return this. + * @since 3.0 */ - public MapReduceOptions outputTypeInline() { + public MapReduceOptions actionInline() { - this.outputType = MapReduceCommand.OutputType.INLINE; + this.mapReduceAction = null; return this; } + /** * This option will merge new data into the old output collection. In other words, if the same key exists in both the * result set and the old collection, the new key will overwrite the old one. * - * @return MapReduceOptions so that methods can be chained in a fluent API style + * @return this. + * @since 3.0 */ - public MapReduceOptions outputTypeMerge() { + public MapReduceOptions actionMerge() { - this.outputType = MapReduceCommand.OutputType.MERGE; + this.mapReduceAction = MapReduceAction.MERGE; return this; } @@ -125,22 +130,25 @@ public MapReduceOptions outputTypeMerge() { * specified reduce function) will be performed on the two values and the result will be written to the output * collection. If a finalize function was provided, this will be run after the reduce as well. * - * @return + * @return this. + * @since 3.0 */ - public MapReduceOptions outputTypeReduce() { - this.outputType = MapReduceCommand.OutputType.REDUCE; + public MapReduceOptions actionReduce() { + + this.mapReduceAction = MapReduceAction.REDUCE; return this; } /** * The output will be inserted into a collection which will atomically replace any existing collection with the same - * name. Note, the default is MapReduceCommand.OutputType.REPLACE + * name. Note, the default is {@link MapReduceAction#REPLACE}. * * @return MapReduceOptions so that methods can be chained in a fluent API style + * @since 3.0 */ - public MapReduceOptions outputTypeReplace() { + public MapReduceOptions actionReplace() { - this.outputType = MapReduceCommand.OutputType.REPLACE; + this.mapReduceAction = MapReduceAction.REPLACE; return this; } @@ -206,23 +214,6 @@ public MapReduceOptions verbose(boolean verbose) { return this; } - /** - * Add additional extra options that may not have a method on this class. This method will help if you use a version - * of this client library with a server version that has added additional map-reduce options that do not yet have an - * method for use in setting them. options - * - * @param key The key option - * @param value The value of the option - * @return MapReduceOptions so that methods can be chained in a fluent API style - * @deprecated since 1.7. - */ - @Deprecated - public MapReduceOptions extraOption(String key, Object value) { - - extraOptions.put(key, value); - return this; - } - /** * Define the Collation specifying language-specific rules for string comparison. * @@ -236,15 +227,6 @@ public MapReduceOptions collation(@Nullable Collation collation) { return this; } - /** - * @return - * @deprecated since 1.7 - */ - @Deprecated - public Map getExtraOptions() { - return extraOptions; - } - public Optional getFinalizeFunction() { return this.finalizeFunction; } @@ -267,10 +249,6 @@ public Optional getOutputSharded() { return this.outputSharded; } - public MapReduceCommand.OutputType getOutputType() { - return this.outputType; - } - public Map getScopeVariables() { return this.scopeVariables; } @@ -295,6 +273,25 @@ public Optional getCollation() { return collation; } + /** + * Return the {@link MapReduceAction}. + * + * @return the mapped action or {@literal null} if the action maps to inline output. + * @since 2.0.10 + */ + @Nullable + public MapReduceAction getMapReduceAction() { + return mapReduceAction; + } + + /** + * @return {@literal true} if {@literal inline} output is used. + * @since 2.0.10 + */ + public boolean usesInlineOutput() { + return null == mapReduceAction; + } + public Document getOptionsObject() { Document cmd = new Document(); @@ -328,19 +325,14 @@ protected Document createOutObject() { Document out = new Document(); - switch (outputType) { - case INLINE: - out.put("inline", 1); - break; - case REPLACE: - out.put("replace", outputCollection); - break; - case MERGE: - out.put("merge", outputCollection); - break; - case REDUCE: - out.put("reduce", outputCollection); - break; + if (getMapReduceAction() == null) { + out.put("inline", 1); + } else { + switch (getMapReduceAction()) { + case REPLACE -> out.put("replace", outputCollection); + case MERGE -> out.put("merge", outputCollection); + case REDUCE -> out.put("reduce", outputCollection); + } } outputDatabase.ifPresent(val -> out.append("db", val)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java index b7cbd9ee4e..865a4e9438 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResults.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,6 @@ import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.MapReduceOutput; - /** * Collects the results of performing a MapReduce operations. * @@ -32,7 +30,9 @@ * @author Christoph Strobl * @author Mark Paluch * @param The class in which the results are mapped onto, accessible via an iterator. + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ +@Deprecated public class MapReduceResults implements Iterable { private final List mappedResults; @@ -46,13 +46,11 @@ public class MapReduceResults implements Iterable { * * @param mappedResults must not be {@literal null}. * @param rawResults must not be {@literal null}. - * @deprecated since 1.7. Please use {@link #MapReduceResults(List, MapReduceOutput)} */ - @Deprecated public MapReduceResults(List mappedResults, Document rawResults) { - Assert.notNull(mappedResults, "List of mapped results must not be null!"); - Assert.notNull(rawResults, "Raw results must not be null!"); + Assert.notNull(mappedResults, "List of mapped results must not be null"); + Assert.notNull(rawResults, "Raw results must not be null"); this.mappedResults = mappedResults; this.rawResults = rawResults; @@ -61,29 +59,6 @@ public MapReduceResults(List mappedResults, Document rawResults) { this.outputCollection = parseOutputCollection(rawResults); } - /** - * Creates a new {@link MapReduceResults} from the given mapped results and the {@link MapReduceOutput}. - * - * @param mappedResults must not be {@literal null}. - * @param mapReduceOutput must not be {@literal null}. - * @since 1.7 - */ - public MapReduceResults(List mappedResults, MapReduceOutput mapReduceOutput) { - - Assert.notNull(mappedResults, "MappedResults must not be null!"); - Assert.notNull(mapReduceOutput, "MapReduceOutput must not be null!"); - - this.mappedResults = mappedResults; - this.rawResults = null; - this.mapReduceTiming = parseTiming(mapReduceOutput); - this.mapReduceCounts = parseCounts(mapReduceOutput); - this.outputCollection = parseOutputCollection(mapReduceOutput); - } - - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ public Iterator iterator() { return mappedResults.iterator(); } @@ -101,6 +76,7 @@ public String getOutputCollection() { return outputCollection; } + @Nullable public Document getRawResults() { return rawResults; } @@ -171,20 +147,7 @@ private static String parseOutputCollection(Document rawResults) { return null; } - return resultField instanceof Document ? ((Document) resultField).get("collection").toString() + return resultField instanceof Document document ? document.get("collection").toString() : resultField.toString(); } - - private static MapReduceCounts parseCounts(final MapReduceOutput mapReduceOutput) { - return new MapReduceCounts(mapReduceOutput.getInputCount(), mapReduceOutput.getEmitCount(), - mapReduceOutput.getOutputCount()); - } - - private static String parseOutputCollection(final MapReduceOutput mapReduceOutput) { - return mapReduceOutput.getCollectionName(); - } - - private static MapReduceTiming parseTiming(MapReduceOutput mapReduceOutput) { - return new MapReduceTiming(-1, -1, mapReduceOutput.getDuration()); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java index b983135395..28de7fe850 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTiming.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,12 @@ */ package org.springframework.data.mongodb.core.mapreduce; +import org.springframework.lang.Nullable; + +/** + * @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}. + */ +@Deprecated public class MapReduceTiming { private long mapTime, emitLoopTime, totalTime; @@ -38,19 +44,11 @@ public long getTotalTime() { return totalTime; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return "MapReduceTiming [mapTime=" + mapTime + ", emitLoopTime=" + emitLoopTime + ", totalTime=" + totalTime + "]"; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -64,24 +62,17 @@ public int hashCode() { return result; } - /* - * - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof MapReduceTiming)) { + if (!(obj instanceof MapReduceTiming that)) { return false; } - MapReduceTiming that = (MapReduceTiming) obj; - return this.emitLoopTime == that.emitLoopTime && // this.mapTime == that.mapTime && // this.totalTime == that.totalTime; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java index 54781e78c0..65522d8613 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapreduce/package-info.java @@ -1,6 +1,8 @@ /** * Support for MongoDB map-reduce operations. + * @deprecated since MongoDB server version 5.0 */ +@Deprecated @org.springframework.lang.NonNullApi package org.springframework.data.mongodb.core.mapreduce; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java index b848fd7f7c..172ecbbe74 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Cancelable.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,7 +28,7 @@ public interface Cancelable { /** * Abort and free resources. * - * @throws DataAccessResourceFailureException + * @throws DataAccessResourceFailureException if operation cannot be canceled. */ void cancel() throws DataAccessResourceFailureException; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java index fd24ef31ad..fec7fa60ef 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamRequest.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,9 @@ */ package org.springframework.data.mongodb.core.messaging; +import java.time.Duration; +import java.time.Instant; + import org.bson.BsonValue; import org.bson.Document; import org.springframework.data.mongodb.core.ChangeStreamOptions; @@ -27,18 +30,28 @@ import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; /** * {@link SubscriptionRequest} implementation to be used for listening to * Change Streams via a {@link MessageListenerContainer} * using the synchronous MongoDB Java driver. - *

          + *
          * The most trivial use case is subscribing to all events of a specific {@link com.mongodb.client.MongoCollection - * collection}. + * collection} + * + *

          + * 
          + *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(System.out::println, () -> "collection-name");
          + * 
          + * 
          + * + * or {@link com.mongodb.client.MongoDatabase} which receives events from all {@link com.mongodb.client.MongoCollection + * collections} in that database. * *
            * 
          - *     ChangeStreamRequest request = new ChangeStreamRequest<>(System.out::println, () -> "collection-name");
          + *     ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(System.out::println, RequestOptions.justDatabase("test"));
            * 
            * 
          * @@ -51,7 +64,7 @@ * .returnFullDocumentOnUpdate() * .build(); * - * ChangeStreamRequest request = new ChangeStreamRequest<>(System.out::println, new ChangeStreamRequestOptions("collection-name", options)); + * ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(System.out::println, new ChangeStreamRequestOptions("collection-name", options)); * * * @@ -60,7 +73,7 @@ * *
            * 
          - *     ChangeStreamRequest request = ChangeStreamRequest.builder()
          + *     ChangeStreamRequest<Document> request = ChangeStreamRequest.builder()
            *         .collection("collection-name")
            *         .publishTo(System.out::println)
            *         .filter(newAggregation(match(where("age").is(7))))
          @@ -78,6 +91,7 @@
            *
            * @author Christoph Strobl
            * @author Mark Paluch
          + * @author Myroslav Kosinskyi
            * @since 2.1
            */
           public class ChangeStreamRequest
          @@ -96,28 +110,20 @@ public class ChangeStreamRequest
           	public ChangeStreamRequest(MessageListener, ? super T> messageListener,
           			RequestOptions options) {
           
          -		Assert.notNull(messageListener, "MessageListener must not be null!");
          -		Assert.notNull(options, "Options must not be null!");
          +		Assert.notNull(messageListener, "MessageListener must not be null");
          +		Assert.notNull(options, "Options must not be null");
           
          -		this.options = options instanceof ChangeStreamRequestOptions ? (ChangeStreamRequestOptions) options
          -				: ChangeStreamRequestOptions.of(options);
          +		this.options = options instanceof ChangeStreamRequestOptions changeStreamRequestOptions ?
          +				changeStreamRequestOptions : ChangeStreamRequestOptions.of(options);
           
           		this.messageListener = messageListener;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.monitor.SubscriptionRequest#getMessageListener()
          -	 */
           	@Override
           	public MessageListener, ? super T> getMessageListener() {
           		return messageListener;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.monitor.SubscriptionRequest#getRequestOptions()
          -	 */
           	@Override
           	public ChangeStreamRequestOptions getRequestOptions() {
           		return options;
          @@ -125,7 +131,7 @@ public ChangeStreamRequestOptions getRequestOptions() {
           
           	/**
           	 * Obtain a shiny new {@link ChangeStreamRequestBuilder} and start defining your {@link ChangeStreamRequest} in this
          -	 * fancy fluent way. Just don't forget to call {@link ChangeStreamRequestBuilder#build() build()} when your're done.
          +	 * fancy fluent way. Just don't forget to call {@link ChangeStreamRequestBuilder#build() build()} when done.
           	 *
           	 * @return new instance of {@link ChangeStreamRequest}.
           	 */
          @@ -135,7 +141,7 @@ public static ChangeStreamRequestBuilder builder() {
           
           	/**
           	 * Obtain a shiny new {@link ChangeStreamRequestBuilder} and start defining your {@link ChangeStreamRequest} in this
          -	 * fancy fluent way. Just don't forget to call {@link ChangeStreamRequestBuilder#build() build()} when your're done.
          +	 * fancy fluent way. Just don't forget to call {@link ChangeStreamRequestBuilder#build() build()} when done.
           	 *
           	 * @return new instance of {@link ChangeStreamRequest}.
           	 */
          @@ -154,29 +160,49 @@ public static  ChangeStreamRequestBuilder builder(
           	 */
           	public static class ChangeStreamRequestOptions implements SubscriptionRequest.RequestOptions {
           
          -		private final String collectionName;
          +		private final @Nullable String databaseName;
          +		private final @Nullable String collectionName;
          +		private final @Nullable Duration maxAwaitTime;
           		private final ChangeStreamOptions options;
           
           		/**
           		 * Create new {@link ChangeStreamRequestOptions}.
           		 *
          -		 * @param collectionName must not be {@literal null}.
          +		 * @param databaseName can be {@literal null}.
          +		 * @param collectionName can be {@literal null}.
          +		 * @param options must not be {@literal null}.
          +		 */
          +		public ChangeStreamRequestOptions(@Nullable String databaseName, @Nullable String collectionName,
          +				ChangeStreamOptions options) {
          +			this(databaseName, collectionName, null, options);
          +		}
          +
          +		/**
          +		 * Create new {@link ChangeStreamRequestOptions}.
          +		 *
          +		 * @param databaseName can be {@literal null}.
          +		 * @param collectionName can be {@literal null}.
          +		 * @param maxAwaitTime can be {@literal null}.
           		 * @param options must not be {@literal null}.
          +		 * @since 3.0
           		 */
          -		public ChangeStreamRequestOptions(String collectionName, ChangeStreamOptions options) {
          +		public ChangeStreamRequestOptions(@Nullable String databaseName, @Nullable String collectionName,
          +				@Nullable Duration maxAwaitTime, ChangeStreamOptions options) {
           
          -			Assert.notNull(collectionName, "CollectionName must not be null!");
          -			Assert.notNull(options, "Options must not be null!");
          +			Assert.notNull(options, "Options must not be null");
           
           			this.collectionName = collectionName;
          +			this.databaseName = databaseName;
          +			this.maxAwaitTime = maxAwaitTime;
           			this.options = options;
           		}
           
           		public static ChangeStreamRequestOptions of(RequestOptions options) {
           
          -			Assert.notNull(options, "Options must not be null!");
          +			Assert.notNull(options, "Options must not be null");
           
          -			return new ChangeStreamRequestOptions(options.getCollectionName(), ChangeStreamOptions.builder().build());
          +			return new ChangeStreamRequestOptions(options.getDatabaseName(), options.getCollectionName(),
          +					ChangeStreamOptions.builder().build());
           		}
           
           		/**
          @@ -188,14 +214,20 @@ public ChangeStreamOptions getChangeStreamOptions() {
           			return options;
           		}
           
          -		/*
          -		 * (non-Javadoc)
          -		 * @see org.springframework.data.mongodb.monitor.SubscriptionRequest.RequestOptions#getCollectionName()
          -		 */
           		@Override
           		public String getCollectionName() {
           			return collectionName;
           		}
          +
          +		@Override
          +		public String getDatabaseName() {
          +			return databaseName;
          +		}
          +
          +		@Override
          +		public Duration maxAwaitTime() {
          +			return maxAwaitTime != null ? maxAwaitTime : RequestOptions.super.maxAwaitTime();
          +		}
           	}
           
           	/**
          @@ -207,12 +239,28 @@ public String getCollectionName() {
           	 */
           	public static class ChangeStreamRequestBuilder {
           
          +		private @Nullable String databaseName;
           		private @Nullable String collectionName;
          +		private @Nullable Duration maxAwaitTime;
           		private @Nullable MessageListener, ? super T> listener;
          -		private ChangeStreamOptionsBuilder delegate = ChangeStreamOptions.builder();
          +		private final ChangeStreamOptionsBuilder delegate = ChangeStreamOptions.builder();
           
           		private ChangeStreamRequestBuilder() {}
           
          +		/**
          +		 * Set the name of the {@link com.mongodb.client.MongoDatabase} to listen to.
          +		 *
          +		 * @param databaseName must not be {@literal null} nor empty.
          +		 * @return this.
          +		 */
          +		public ChangeStreamRequestBuilder database(String databaseName) {
          +
          +			Assert.hasText(databaseName, "DatabaseName must not be null");
          +
          +			this.databaseName = databaseName;
          +			return this;
          +		}
          +
           		/**
           		 * Set the name of the {@link com.mongodb.client.MongoCollection} to listen to.
           		 *
          @@ -221,7 +269,7 @@ private ChangeStreamRequestBuilder() {}
           		 */
           		public ChangeStreamRequestBuilder collection(String collectionName) {
           
          -			Assert.hasText(collectionName, "CollectionName must not be null!");
          +			Assert.hasText(collectionName, "CollectionName must not be null");
           
           			this.collectionName = collectionName;
           			return this;
          @@ -236,7 +284,7 @@ public ChangeStreamRequestBuilder collection(String collectionName) {
           		public ChangeStreamRequestBuilder publishTo(
           				MessageListener, ? super T> messageListener) {
           
          -			Assert.notNull(messageListener, "MessageListener must not be null!");
          +			Assert.notNull(messageListener, "MessageListener must not be null");
           
           			this.listener = messageListener;
           			return this;
          @@ -244,13 +292,13 @@ public ChangeStreamRequestBuilder publishTo(
           
           		/**
           		 * Set the filter to apply.
          -		 * 

          + *
          * Fields on aggregation expression root level are prefixed to map to fields contained in * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken * as given, during the mapping procedure. You may want to have a look at the * structure of Change Events. - *

          + *
          * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are * mapped to domain type fields. * @@ -262,7 +310,7 @@ public ChangeStreamRequestBuilder publishTo( */ public ChangeStreamRequestBuilder filter(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); this.delegate.filter(aggregation); return this; @@ -277,8 +325,8 @@ public ChangeStreamRequestBuilder filter(Aggregation aggregation) { */ public ChangeStreamRequestBuilder filter(Document... pipeline) { - Assert.notNull(pipeline, "Aggregation pipeline must not be null!"); - Assert.noNullElements(pipeline, "Aggregation pipeline must not contain null elements!"); + Assert.notNull(pipeline, "Aggregation pipeline must not be null"); + Assert.noNullElements(pipeline, "Aggregation pipeline must not contain null elements"); this.delegate.filter(pipeline); return this; @@ -292,9 +340,9 @@ public ChangeStreamRequestBuilder filter(Document... pipeline) { * @see ChangeStreamOptions#getCollation() * @see ChangeStreamOptionsBuilder#collation(Collation) */ - public ChangeStreamRequestBuilder collation(Collation collation) { + public ChangeStreamRequestBuilder collation(Collation collation) { - Assert.notNull(collation, "Collation must not be null!"); + Assert.notNull(collation, "Collation must not be null"); this.delegate.collation(collation); return this; @@ -311,37 +359,112 @@ public ChangeStreamRequestBuilder collation(Collation collation) { */ public ChangeStreamRequestBuilder resumeToken(BsonValue resumeToken) { - Assert.notNull(resumeToken, "Resume token not be null!"); + Assert.notNull(resumeToken, "Resume token not be null"); this.delegate.resumeToken(resumeToken); return this; } + /** + * Set the cluster time at which to resume listening. + * + * @param clusterTime must not be {@literal null}. + * @return this. + * @see ChangeStreamOptions#getResumeTimestamp() + * @see ChangeStreamOptionsBuilder#resumeAt(java.time.Instant) + */ + public ChangeStreamRequestBuilder resumeAt(Instant clusterTime) { + + Assert.notNull(clusterTime, "ClusterTime must not be null"); + + this.delegate.resumeAt(clusterTime); + return this; + } + + /** + * Set the resume token after which to continue emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamRequestBuilder resumeAfter(BsonValue resumeToken) { + + Assert.notNull(resumeToken, "ResumeToken must not be null"); + this.delegate.resumeAfter(resumeToken); + + return this; + } + + /** + * Set the resume token after which to start emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + public ChangeStreamRequestBuilder startAfter(BsonValue resumeToken) { + + Assert.notNull(resumeToken, "ResumeToken must not be null"); + this.delegate.startAfter(resumeToken); + + return this; + } + /** * Set the {@link FullDocument} lookup to {@link FullDocument#UPDATE_LOOKUP}. * * @return this. - * @see #fullDocumentLookup(FullDocument) * @see ChangeStreamOptions#getFullDocumentLookup() * @see ChangeStreamOptionsBuilder#fullDocumentLookup(FullDocument) */ public ChangeStreamRequestBuilder fullDocumentLookup(FullDocument lookup) { - Assert.notNull(lookup, "FullDocument not be null!"); + Assert.notNull(lookup, "FullDocument not be null"); this.delegate.fullDocumentLookup(lookup); return this; } + /** + * Set the {@link FullDocumentBeforeChange} lookup to the given value. + * + * @return this. + * @since 4.0 + * @see ChangeStreamOptions#getFullDocumentBeforeChangeLookup() + * @see ChangeStreamOptionsBuilder#fullDocumentBeforeChangeLookup(FullDocumentBeforeChange) + */ + public ChangeStreamRequestBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) { + + Assert.notNull(lookup, "FullDocumentBeforeChange not be null"); + + this.delegate.fullDocumentBeforeChangeLookup(lookup); + return this; + } + + /** + * Set the cursors maximum wait time on the server (for a new Document to be emitted). + * + * @param timeout must not be {@literal null}. + * @since 3.0 + */ + public ChangeStreamRequestBuilder maxAwaitTime(Duration timeout) { + + Assert.notNull(timeout, "timeout not be null"); + + this.maxAwaitTime = timeout; + return this; + } + /** * @return the build {@link ChangeStreamRequest}. */ public ChangeStreamRequest build() { - Assert.notNull(listener, "MessageListener must not be null!"); - Assert.hasText(collectionName, "CollectionName must not be null!"); + Assert.notNull(listener, "MessageListener must not be null"); - return new ChangeStreamRequest<>(listener, new ChangeStreamRequestOptions(collectionName, delegate.build())); + return new ChangeStreamRequest<>(listener, + new ChangeStreamRequestOptions(databaseName, collectionName, maxAwaitTime, delegate.build())); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java index 5c604fbfbf..fc8372613b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTask.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,17 @@ */ package org.springframework.data.mongodb.core.messaging; -import lombok.AllArgsConstructor; - +import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Optional; import java.util.Set; +import java.util.concurrent.TimeUnit; import org.bson.BsonDocument; +import org.bson.BsonTimestamp; +import org.bson.BsonValue; import org.bson.Document; import org.springframework.data.mongodb.core.ChangeStreamEvent; import org.springframework.data.mongodb.core.ChangeStreamOptions; @@ -36,29 +37,33 @@ import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; -import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; import org.springframework.util.ErrorHandler; +import org.springframework.util.StringUtils; import com.mongodb.MongoNamespace; import com.mongodb.client.ChangeStreamIterable; import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; import com.mongodb.client.model.Collation; import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; /** * {@link Task} implementation for obtaining {@link ChangeStreamDocument ChangeStreamDocuments} from MongoDB. * * @author Christoph Strobl + * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ class ChangeStreamTask extends CursorReadingTask, Object> { - private final Set blacklist = new HashSet<>( + private final Set denylist = new HashSet<>( Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns")); private final QueryMapper queryMapper; @@ -73,10 +78,6 @@ class ChangeStreamTask extends CursorReadingTask, mongoConverter = template.getConverter(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.CursorReadingTask#initCursor(org.springframework.data.mongodb.core.MongoTemplate, org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions, java.lang.Class) - */ @Override protected MongoCursor> initCursor(MongoTemplate template, RequestOptions options, Class targetType) { @@ -84,37 +85,70 @@ protected MongoCursor> initCursor(MongoTemplate t List filter = Collections.emptyList(); BsonDocument resumeToken = new BsonDocument(); Collation collation = null; - FullDocument fullDocument = FullDocument.DEFAULT; + FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT + : FullDocument.UPDATE_LOOKUP; + FullDocumentBeforeChange fullDocumentBeforeChange = null; + BsonTimestamp startAt = null; + boolean resumeAfter = true; - if (options instanceof ChangeStreamRequest.ChangeStreamRequestOptions) { + if (options instanceof ChangeStreamRequest.ChangeStreamRequestOptions requestOptions) { - ChangeStreamOptions changeStreamOptions = ((ChangeStreamRequestOptions) options).getChangeStreamOptions(); + ChangeStreamOptions changeStreamOptions = requestOptions.getChangeStreamOptions(); filter = prepareFilter(template, changeStreamOptions); if (changeStreamOptions.getFilter().isPresent()) { Object val = changeStreamOptions.getFilter().get(); - if (val instanceof Aggregation) { - collation = ((Aggregation) val).getOptions().getCollation() + if (val instanceof Aggregation aggregation) { + collation = aggregation.getOptions().getCollation() .map(org.springframework.data.mongodb.core.query.Collation::toMongoCollation).orElse(null); } } if (changeStreamOptions.getResumeToken().isPresent()) { + resumeToken = changeStreamOptions.getResumeToken().get().asDocument(); + resumeAfter = changeStreamOptions.isResumeAfter(); } fullDocument = changeStreamOptions.getFullDocumentLookup() .orElseGet(() -> ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT : FullDocument.UPDATE_LOOKUP); + + fullDocumentBeforeChange = changeStreamOptions.getFullDocumentBeforeChangeLookup().orElse(null); + + startAt = changeStreamOptions.getResumeBsonTimestamp().orElse(null); + } + + MongoDatabase db = StringUtils.hasText(options.getDatabaseName()) + ? template.getMongoDatabaseFactory().getMongoDatabase(options.getDatabaseName()) + : template.getDb(); + + ChangeStreamIterable iterable; + + if (StringUtils.hasText(options.getCollectionName())) { + iterable = filter.isEmpty() ? db.getCollection(options.getCollectionName()).watch(Document.class) + : db.getCollection(options.getCollectionName()).watch(filter, Document.class); + + } else { + iterable = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); } - ChangeStreamIterable iterable = filter.isEmpty() - ? template.getCollection(options.getCollectionName()).watch(Document.class) - : template.getCollection(options.getCollectionName()).watch(filter, Document.class); + if (!options.maxAwaitTime().isZero()) { + iterable = iterable.maxAwaitTime(options.maxAwaitTime().toMillis(), TimeUnit.MILLISECONDS); + } if (!resumeToken.isEmpty()) { - iterable = iterable.resumeAfter(resumeToken); + + if (resumeAfter) { + iterable = iterable.resumeAfter(resumeToken); + } else { + iterable = iterable.startAfter(resumeToken); + } + } + + if (startAt != null) { + iterable = iterable.startAtOperationTime(startAt); } if (collation != null) { @@ -122,83 +156,122 @@ protected MongoCursor> initCursor(MongoTemplate t } iterable = iterable.fullDocument(fullDocument); + if(fullDocumentBeforeChange != null) { + iterable = iterable.fullDocumentBeforeChange(fullDocumentBeforeChange); + } return iterable.iterator(); } + @SuppressWarnings("unchecked") List prepareFilter(MongoTemplate template, ChangeStreamOptions options) { if (!options.getFilter().isPresent()) { return Collections.emptyList(); } - Object filter = options.getFilter().get(); - if (filter instanceof Aggregation) { - Aggregation agg = (Aggregation) filter; - AggregationOperationContext context = agg instanceof TypedAggregation - ? new TypeBasedAggregationOperationContext(((TypedAggregation) agg).getInputType(), + Object filter = options.getFilter().orElse(null); + + if (filter instanceof Aggregation aggregation) { + AggregationOperationContext context = aggregation instanceof TypedAggregation typedAggregation + ? new TypeBasedAggregationOperationContext(typedAggregation.getInputType(), template.getConverter().getMappingContext(), queryMapper) : Aggregation.DEFAULT_CONTEXT; - return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", blacklist)); - } else if (filter instanceof List) { + return aggregation.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", denylist)); + } + + if (filter instanceof List) { return (List) filter; - } else { - throw new IllegalArgumentException( - "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); } + + throw new IllegalArgumentException( + "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); } @Override protected Message, Object> createMessage(ChangeStreamDocument source, Class targetType, RequestOptions options) { - // namespace might be null for eg. OperationType.INVALIDATE - MongoNamespace namespace = Optional.ofNullable(source.getNamespace()) - .orElse(new MongoNamespace("unknown", options.getCollectionName())); + MongoNamespace namespace = source.getNamespace() != null ? source.getNamespace() + : createNamespaceFromOptions(options); return new ChangeStreamEventMessage<>(new ChangeStreamEvent<>(source, targetType, mongoConverter), MessageProperties .builder().databaseName(namespace.getDatabaseName()).collectionName(namespace.getCollectionName()).build()); } + MongoNamespace createNamespaceFromOptions(RequestOptions options) { + + String collectionName = StringUtils.hasText(options.getCollectionName()) ? options.getCollectionName() : "unknown"; + String databaseName = StringUtils.hasText(options.getDatabaseName()) ? options.getDatabaseName() : "unknown"; + + return new MongoNamespace(databaseName, collectionName); + } + /** * {@link Message} implementation for ChangeStreams * * @since 2.1 */ - @AllArgsConstructor static class ChangeStreamEventMessage implements Message, T> { private final ChangeStreamEvent delegate; private final MessageProperties messageProperties; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getRaw() - */ + ChangeStreamEventMessage(ChangeStreamEvent delegate, MessageProperties messageProperties) { + + this.delegate = delegate; + this.messageProperties = messageProperties; + } + @Nullable @Override public ChangeStreamDocument getRaw() { return delegate.getRaw(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getBody() - */ @Nullable @Override public T getBody() { return delegate.getBody(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getProperties() - */ + @Nullable + @Override + public T getBodyBeforeChange() { + return delegate.getBodyBeforeChange(); + } + @Override public MessageProperties getProperties() { return this.messageProperties; } + + /** + * @return the resume token or {@literal null} if not set. + * @see ChangeStreamEvent#getResumeToken() + */ + @Nullable + BsonValue getResumeToken() { + return delegate.getResumeToken(); + } + + /** + * @return the cluster time of the event or {@literal null}. + * @see ChangeStreamEvent#getTimestamp() + */ + @Nullable + Instant getTimestamp() { + return delegate.getTimestamp(); + } + + /** + * Get the {@link ChangeStreamEvent} from the message. + * + * @return never {@literal null}. + */ + ChangeStreamEvent getChangeStreamEvent() { + return delegate; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java index c46488a949..41b5fed4f5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/CursorReadingTask.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,16 +18,19 @@ import java.time.Duration; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.util.Lock; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import org.springframework.util.ErrorHandler; import com.mongodb.client.MongoCursor; -import com.mysema.commons.lang.Assert; /** * @author Christoph Strobl @@ -38,7 +41,7 @@ */ abstract class CursorReadingTask implements Task { - private final Object lifecycleMonitor = new Object(); + private final Lock lock = Lock.of(new ReentrantLock()); private final MongoTemplate template; private final SubscriptionRequest request; @@ -65,74 +68,73 @@ abstract class CursorReadingTask implements Task { this.errorHandler = errorHandler; } - /* - * (non-Javadoc) - * @see java.lang.Runnable - */ @Override public void run() { - start(); + try { - while (isRunning()) { - try { - T next = getNext(); - if (next != null) { - emitMessage(createMessage(next, targetType, request.getRequestOptions())); - } else { - Thread.sleep(10); - } - } catch (InterruptedException e) { + start(); - synchronized (lifecycleMonitor) { - state = State.CANCELLED; - } - Thread.interrupted(); - } catch (RuntimeException e) { + while (isRunning()) { + + try { - Exception translated = template.getExceptionTranslator().translateExceptionIfPossible(e); - Exception toHandle = translated != null ? translated : e; + T next = execute(this::getNext); - errorHandler.handleError(toHandle); + if (next != null) { + emitMessage(createMessage(next, targetType, request.getRequestOptions())); + } else { + Thread.sleep(10); + } + } catch (InterruptedException e) { + + lock.executeWithoutResult(() -> state = State.CANCELLED); + Thread.currentThread().interrupt(); + break; + } } + } catch (RuntimeException e) { + + lock.executeWithoutResult(() -> state = State.CANCELLED); + errorHandler.handleError(e); } } /** * Initialize the Task by 1st setting the current state to {@link State#STARTING starting} indicating the * initialization procedure.
          - * Moving on the underlying {@link MongoCursor} gets {@link #initCursor(MongoTemplate, RequestOptions) created} and is - * {@link #isValidCursor(MongoCursor) health checked}. Once a valid {@link MongoCursor} is created the {@link #state} - * is set to {@link State#RUNNING running}. If the health check is not passed the {@link MongoCursor} is immediately - * {@link MongoCursor#close() closed} and a new {@link MongoCursor} is requested until a valid one is retrieved or the - * {@link #state} changes. + * Moving on the underlying {@link MongoCursor} gets {@link #initCursor(MongoTemplate, RequestOptions, Class) created} + * and is {@link #isValidCursor(MongoCursor) health checked}. Once a valid {@link MongoCursor} is created the + * {@link #state} is set to {@link State#RUNNING running}. If the health check is not passed the {@link MongoCursor} + * is immediately {@link MongoCursor#close() closed} and a new {@link MongoCursor} is requested until a valid one is + * retrieved or the {@link #state} changes. */ private void start() { - synchronized (lifecycleMonitor) { + lock.executeWithoutResult(() -> { if (!State.RUNNING.equals(state)) { state = State.STARTING; } - } + }); do { - boolean valid = false; - - synchronized (lifecycleMonitor) { + boolean valid = lock.execute(() -> { - if (State.STARTING.equals(state)) { + if (!State.STARTING.equals(state)) { + return false; + } - MongoCursor cursor = initCursor(template, request.getRequestOptions(), targetType); - valid = isValidCursor(cursor); - if (valid) { - this.cursor = cursor; - state = State.RUNNING; - } else { - cursor.close(); - } + MongoCursor cursor = execute(() -> initCursor(template, request.getRequestOptions(), targetType)); + boolean isValid = isValidCursor(cursor); + if (isValid) { + this.cursor = cursor; + state = State.RUNNING; + } else if (cursor != null) { + cursor.close(); } - } + return isValid; + }); if (!valid) { @@ -140,10 +142,8 @@ private void start() { Thread.sleep(100); } catch (InterruptedException e) { - synchronized (lifecycleMonitor) { - state = State.CANCELLED; - } - Thread.interrupted(); + lock.executeWithoutResult(() -> state = State.CANCELLED); + Thread.currentThread().interrupt(); } } } while (State.STARTING.equals(getState())); @@ -155,14 +155,10 @@ private void start() { protected abstract MongoCursor initCursor(MongoTemplate template, RequestOptions options, Class targetType); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Cancelable#cancel() - */ @Override public void cancel() throws DataAccessResourceFailureException { - synchronized (lifecycleMonitor) { + lock.executeWithoutResult(() -> { if (State.RUNNING.equals(state) || State.STARTING.equals(state)) { this.state = State.CANCELLED; @@ -170,39 +166,24 @@ public void cancel() throws DataAccessResourceFailureException { cursor.close(); } } - } + }); } - /* - * (non-Javadoc) - * @see org.springframework.scheduling.SchedulingAwareRunnable#isLongLived() - */ @Override public boolean isLongLived() { return true; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Task#getState() - */ @Override public State getState() { - - synchronized (lifecycleMonitor) { - return state; - } + return lock.execute(() -> state); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Task#awaitStart(java.time.Duration) - */ @Override public boolean awaitStart(Duration timeout) throws InterruptedException { - Assert.notNull(timeout, "Timeout must not be null!"); - Assert.isFalse(timeout.isNegative(), "Timeout must not be negative!"); + Assert.notNull(timeout, "Timeout must not be null"); + Assert.isTrue(!timeout.isNegative(), "Timeout must not be negative"); return awaitStart.await(timeout.toNanos(), TimeUnit.NANOSECONDS); } @@ -221,19 +202,22 @@ private boolean isRunning() { @SuppressWarnings("unchecked") private void emitMessage(Message message) { - request.getMessageListener().onMessage((Message) message); + try { + request.getMessageListener().onMessage((Message) message); + } catch (Exception e) { + errorHandler.handleError(e); + } } @Nullable private T getNext() { - synchronized (lifecycleMonitor) { + return lock.execute(() -> { if (State.RUNNING.equals(state)) { return cursor.tryNext(); } - } - - throw new IllegalStateException(String.format("Cursor %s is not longer open.", cursor)); + throw new IllegalStateException(String.format("Cursor %s is not longer open", cursor)); + }); } private static boolean isValidCursor(@Nullable MongoCursor cursor) { @@ -242,10 +226,29 @@ private static boolean isValidCursor(@Nullable MongoCursor cursor) { return false; } - if (cursor.getServerCursor() == null || cursor.getServerCursor().getId() == 0) { - return false; - } + return cursor.getServerCursor() != null && cursor.getServerCursor().getId() != 0; + } - return true; + /** + * Execute an operation and take care of translating exceptions using the {@link MongoTemplate templates} + * {@link org.springframework.data.mongodb.core.MongoExceptionTranslator} rethrowing the potentially translated + * exception. + * + * @param callback must not be {@literal null}. + * @param + * @return can be {@literal null}. + * @throws RuntimeException The potentially translated exception. + */ + @Nullable + private V execute(Supplier callback) { + + try { + return callback.get(); + } catch (RuntimeException e) { + + RuntimeException translated = template.getExceptionTranslator().translateExceptionIfPossible(e); + throw translated != null ? translated : e; + } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java index 9ad5b48b35..546f3fdd33 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainer.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,13 @@ */ package org.springframework.data.mongodb.core.messaging; -import lombok.AccessLevel; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - import java.time.Duration; import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; import java.util.concurrent.Executor; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -31,15 +29,16 @@ import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.util.Lock; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ErrorHandler; +import org.springframework.util.ObjectUtils; /** * Simple {@link Executor} based {@link MessageListenerContainer} implementation for running {@link Task tasks} like * listening to MongoDB Change Streams and tailable - * cursors. - *

          + * cursors.
          * This message container creates long-running tasks that are executed on {@link Executor}. * * @author Christoph Strobl @@ -52,9 +51,16 @@ public class DefaultMessageListenerContainer implements MessageListenerContainer private final TaskFactory taskFactory; private final Optional errorHandler; - private final Object lifecycleMonitor = new Object(); private final Map subscriptions = new LinkedHashMap<>(); + private final ReadWriteLock lifecycleMonitor = new ReentrantReadWriteLock(); + private final Lock lifecycleRead = Lock.of(lifecycleMonitor.readLock()); + private final Lock lifecycleWrite = Lock.of(lifecycleMonitor.writeLock()); + + private final ReadWriteLock subscriptionMonitor = new ReentrantReadWriteLock(); + private final Lock subscriptionRead = Lock.of(subscriptionMonitor.readLock()); + private final Lock subscriptionWrite = Lock.of(subscriptionMonitor.writeLock()); + private boolean running = false; /** @@ -89,27 +95,19 @@ public DefaultMessageListenerContainer(MongoTemplate template, Executor taskExec public DefaultMessageListenerContainer(MongoTemplate template, Executor taskExecutor, @Nullable ErrorHandler errorHandler) { - Assert.notNull(template, "Template must not be null!"); - Assert.notNull(taskExecutor, "TaskExecutor must not be null!"); + Assert.notNull(template, "Template must not be null"); + Assert.notNull(taskExecutor, "TaskExecutor must not be null"); this.taskExecutor = taskExecutor; this.taskFactory = new TaskFactory(template); this.errorHandler = Optional.ofNullable(errorHandler); } - /* - * (non-Javadoc) - * @see org.springframework.context.SmartLifecycle#isAutoStartup() - */ @Override public boolean isAutoStartup() { return false; } - /* - * (non-Javadoc) - * @see org.springframework.context.SmartLifecycle#stop(java.lang.Runnable) - */ @Override public void stop(Runnable callback) { @@ -117,73 +115,43 @@ public void stop(Runnable callback) { callback.run(); } - /* - * (non-Javadoc) - * @see org.springframework.context.Lifecycle#start() - */ @Override public void start() { - synchronized (lifecycleMonitor) { + lifecycleWrite.executeWithoutResult(() -> { + if (!this.running) { + subscriptions.values().stream() // + .filter(it -> !it.isActive()) // + .filter(TaskSubscription.class::isInstance) // + .map(TaskSubscription.class::cast) // + .map(TaskSubscription::getTask) // + .forEach(taskExecutor::execute); - if (this.running) { - return; + running = true; } - - subscriptions.values().stream() // - .filter(it -> !it.isActive()) // - .filter(it -> it instanceof TaskSubscription) // - .map(TaskSubscription.class::cast) // - .map(TaskSubscription::getTask) // - .forEach(taskExecutor::execute); - - running = true; - } + }); } - /* - * (non-Javadoc) - * @see org.springframework.context.Lifecycle#stop() - */ @Override public void stop() { - - synchronized (lifecycleMonitor) { - + lifecycleWrite.executeWithoutResult(() -> { if (this.running) { - subscriptions.values().forEach(Cancelable::cancel); - running = false; } - } + }); } - /* - * (non-Javadoc) - * @see org.springframework.context.Lifecycle#isRunning() - */ @Override public boolean isRunning() { - - synchronized (this.lifecycleMonitor) { - return running; - } + return lifecycleRead.execute(() -> running); } - /* - * (non-Javadoc) - * @see org.springframework.context.Phased#getPhase() - */ @Override public int getPhase() { return Integer.MAX_VALUE; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.monitor.MessageListenerContainer#register(org.springframework.data.mongodb.monitor.SubscriptionRequest, java.lang.Class) - */ @Override public Subscription register(SubscriptionRequest request, Class bodyType) { @@ -192,10 +160,6 @@ public Subscription register(SubscriptionRequest new DecoratingLoggingErrorHandler((exception) -> lookup(request).ifPresent(Subscription::cancel)))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.monitor.MessageListenerContainer#register(org.springframework.data.mongodb.monitor.SubscriptionRequest, java.lang.Class, org.springframework.util.ErrorHandler) - */ @Override public Subscription register(SubscriptionRequest request, Class bodyType, ErrorHandler errorHandler) { @@ -203,46 +167,32 @@ public Subscription register(SubscriptionRequest lookup(SubscriptionRequest request) { - - synchronized (lifecycleMonitor) { - return Optional.ofNullable(subscriptions.get(request)); - } + return subscriptionRead.execute(() -> Optional.ofNullable(subscriptions.get(request))); } public Subscription register(SubscriptionRequest request, Task task) { - Subscription subscription = new TaskSubscription(task); - - synchronized (lifecycleMonitor) { - + return subscriptionWrite.execute(() -> { if (subscriptions.containsKey(request)) { return subscriptions.get(request); } + Subscription subscription = new TaskSubscription(task); this.subscriptions.put(request, subscription); - if (this.running) { + if (this.isRunning()) { taskExecutor.execute(task); } - } + return subscription; + }); - return subscription; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.monitor.MessageListenerContainer#remove(org.springframework.data.mongodb.monitor.Subscription) - */ @Override public void remove(Subscription subscription) { - - synchronized (lifecycleMonitor) { + subscriptionWrite.executeWithoutResult(() -> { if (subscriptions.containsValue(subscription)) { @@ -252,14 +202,13 @@ public void remove(Subscription subscription) { subscriptions.values().remove(subscription); } - } + }); } /** * @author Christoph Strobl * @since 2.1 */ - @EqualsAndHashCode static class TaskSubscription implements Subscription { private final Task task; @@ -286,24 +235,44 @@ public boolean await(Duration timeout) throws InterruptedException { public void cancel() throws DataAccessResourceFailureException { task.cancel(); } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + TaskSubscription that = (TaskSubscription) o; + + return ObjectUtils.nullSafeEquals(this.task, that.task); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(task); + } } /** * @author Christoph Strobl * @since 2.1 */ - @RequiredArgsConstructor(access = AccessLevel.PACKAGE) private static class DecoratingLoggingErrorHandler implements ErrorHandler { private final Log logger = LogFactory.getLog(DecoratingLoggingErrorHandler.class); private final ErrorHandler delegate; + DecoratingLoggingErrorHandler(ErrorHandler delegate) { + this.delegate = delegate; + } + @Override public void handleError(Throwable t) { if (logger.isErrorEnabled()) { - logger.error("Unexpected error occurred while listening to MongoDB.", t); + logger.error("Unexpected error occurred while listening to MongoDB", t); } delegate.handleError(t); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java index fa57b041dd..1c934e8302 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/LazyMappingDelegatingMessage.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.core.messaging; -import lombok.ToString; - import org.bson.Document; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.util.ClassUtils; @@ -26,7 +24,6 @@ * @author Mark Paluch * @since 2.1 */ -@ToString(of = { "delegate", "targetType" }) class LazyMappingDelegatingMessage implements Message { private final Message delegate; @@ -40,19 +37,11 @@ class LazyMappingDelegatingMessage implements Message { this.converter = converter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getRaw() - */ @Override public S getRaw() { return delegate.getRaw(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getBody() - */ @Override public T getBody() { @@ -74,12 +63,12 @@ public T getBody() { String.format("No converter found capable of converting %s to %s", messageBody.getClass(), targetType)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getProperties() - */ @Override public MessageProperties getProperties() { return delegate.getProperties(); } + + public String toString() { + return "LazyMappingDelegatingMessage(delegate=" + this.delegate + ", targetType=" + this.targetType + ")"; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java index 7a7a36c4e5..46db068096 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Message.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ package org.springframework.data.mongodb.core.messaging; -import lombok.EqualsAndHashCode; -import lombok.ToString; - import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * General message abstraction for any type of Event / Message published by MongoDB server to the client. This might be @@ -33,6 +31,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @see MessageProperties * @since 2.1 */ @@ -54,6 +53,17 @@ public interface Message { @Nullable T getBody(); + /** + * The converted message body before change if available. + * + * @return can be {@literal null}. + * @since 4.0 + */ + @Nullable + default T getBodyBeforeChange() { + return null; + } + /** * {@link MessageProperties} containing information about the {@link Message} origin and other metadata. * @@ -65,8 +75,6 @@ public interface Message { * @author Christoph Strobl * @since 2.1 */ - @ToString - @EqualsAndHashCode class MessageProperties { private static final MessageProperties EMPTY = new MessageProperties(); @@ -77,7 +85,7 @@ class MessageProperties { /** * The database name the message originates from. * - * @return + * @return can be {@literal null}. */ @Nullable public String getDatabaseName() { @@ -87,7 +95,7 @@ public String getDatabaseName() { /** * The collection name the message originates from. * - * @return + * @return can be {@literal null}. */ @Nullable public String getCollectionName() { @@ -103,7 +111,7 @@ public static MessageProperties empty() { /** * Obtain a shiny new {@link MessagePropertiesBuilder} and start defining options in this fancy fluent way. Just - * don't forget to call {@link MessagePropertiesBuilder#build() build()} when your're done. + * don't forget to call {@link MessagePropertiesBuilder#build() build()} when done. * * @return new instance of {@link MessagePropertiesBuilder}. */ @@ -111,6 +119,34 @@ public static MessagePropertiesBuilder builder() { return new MessagePropertiesBuilder(); } + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + MessageProperties that = (MessageProperties) o; + + if (!ObjectUtils.nullSafeEquals(this.databaseName, that.databaseName)) { + return false; + } + + return ObjectUtils.nullSafeEquals(this.collectionName, that.collectionName); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(databaseName); + result = 31 * result + ObjectUtils.nullSafeHashCode(collectionName); + return result; + } + + public String toString() { + return "Message.MessageProperties(databaseName=" + this.getDatabaseName() + ", collectionName=" + + this.getCollectionName() + ")"; + } + /** * Builder for {@link MessageProperties}. * @@ -128,7 +164,7 @@ public static class MessagePropertiesBuilder { */ public MessagePropertiesBuilder databaseName(String dbName) { - Assert.notNull(dbName, "Database name must not be null!"); + Assert.notNull(dbName, "Database name must not be null"); this.databaseName = dbName; return this; @@ -140,7 +176,7 @@ public MessagePropertiesBuilder databaseName(String dbName) { */ public MessagePropertiesBuilder collectionName(String collectionName) { - Assert.notNull(collectionName, "Collection name must not be null!"); + Assert.notNull(collectionName, "Collection name must not be null"); this.collectionName = collectionName; return this; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java index 4bb6ec7c2d..e23b6f39dc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java index 44ce4b327c..5d244fb171 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/MessageListenerContainer.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -50,8 +50,8 @@ static MessageListenerContainer create(MongoTemplate template) { * * MessageListenerContainer container = ... * - * MessageListener, Object> messageListener = (message) -> message.... - * ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "collection-name"); + * MessageListener<ChangeStreamDocument<Document>, Object> messageListener = (message) -> message.... + * ChangeStreamRequest<Object> request = new ChangeStreamRequest<>(messageListener, () -> "collection-name"); * * Subscription subscription = container.register(request); * @@ -75,8 +75,8 @@ default Subscription register(SubscriptionRequest * MessageListenerContainer container = ... * - * MessageListener, Document> messageListener = (message) -> message.getBody().toJson(); - * ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "collection-name"); + * MessageListener<ChangeStreamDocument<Document>, Document> messageListener = (message) -> message.getBody().toJson(); + * ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(messageListener, () -> "collection-name"); * * Subscription subscription = container.register(request, Document.class); * @@ -84,17 +84,17 @@ default Subscription register(SubscriptionRequest + *
          * Registering the very same {@link SubscriptionRequest} more than once simply returns the already existing * {@link Subscription}. - *

          + *
          * Unless a {@link Subscription} is {@link #remove(Subscription) removed} form the container, the {@link Subscription} * is restarted once the container itself is restarted. - *

          + *
          * Errors during {@link Message} retrieval lead to {@link Subscription#cancel() cannelation} of the underlying task. * * @param request must not be {@literal null}. - * @param type the exact target or a more concrete type of the {@link Message#getBody()}. + * @param bodyType the exact target or a more concrete type of the {@link Message#getBody()}. * @return never {@literal null}. */ Subscription register(SubscriptionRequest request, Class bodyType); @@ -108,8 +108,8 @@ default Subscription register(SubscriptionRequest * MessageListenerContainer container = ... * - * MessageListener, Document> messageListener = (message) -> message.getBody().toJson(); - * ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "collection-name"); + * MessageListener<ChangeStreamDocument<Document>, Document> messageListener = (message) -> message.getBody().toJson(); + * ChangeStreamRequest<Document> request = new ChangeStreamRequest<>(messageListener, () -> "collection-name"); * * Subscription subscription = container.register(request, Document.class); * @@ -117,17 +117,17 @@ default Subscription register(SubscriptionRequest + *
          * Registering the very same {@link SubscriptionRequest} more than once simply returns the already existing * {@link Subscription}. - *

          + *
          * Unless a {@link Subscription} is {@link #remove(Subscription) removed} form the container, the {@link Subscription} * is restarted once the container itself is restarted. - *

          + *
          * Errors during {@link Message} retrieval are delegated to the given {@link ErrorHandler}. * * @param request must not be {@literal null}. - * @param type the exact target or a more concrete type of the {@link Message#getBody()}. Must not be {@literal null}. + * @param bodyType the exact target or a more concrete type of the {@link Message#getBody()}. Must not be {@literal null}. * @param errorHandler the callback to invoke when retrieving the {@link Message} from the data source fails for some * reason. * @return never {@literal null}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java index 2ce547389d..be5308e3cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SimpleMessage.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ package org.springframework.data.mongodb.core.messaging; -import lombok.EqualsAndHashCode; -import lombok.ToString; - import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * Trivial {@link Message} implementation. @@ -27,8 +25,6 @@ * @author Christoph Strobl * @since 2.1 */ -@EqualsAndHashCode -@ToString class SimpleMessage implements Message { private @Nullable final S raw; @@ -42,37 +38,56 @@ class SimpleMessage implements Message { */ SimpleMessage(@Nullable S raw, @Nullable T body, MessageProperties properties) { - Assert.notNull(properties, "Properties must not be null! Use MessageProperties.empty() instead."); + Assert.notNull(properties, "Properties must not be null Use MessageProperties.empty() instead"); this.raw = raw; this.body = body; this.properties = properties; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getRaw() - */ @Override public S getRaw() { return raw; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getBody() - */ @Override public T getBody() { return body; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.messaging.Message#getProperties() - */ @Override public MessageProperties getProperties() { return properties; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + SimpleMessage that = (SimpleMessage) o; + + if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.body, that.body)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.properties, that.properties); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(raw); + result = 31 * result + ObjectUtils.nullSafeHashCode(body); + result = 31 * result + ObjectUtils.nullSafeHashCode(properties); + return result; + } + + public String toString() { + return "SimpleMessage(raw=" + this.getRaw() + ", body=" + this.getBody() + ", properties=" + this.getProperties() + + ")"; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java index b50c8f3fd3..5e928fe49d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Subscription.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,10 +19,10 @@ /** * The {@link Subscription} is the link between the {@link SubscriptionRequest} and the actual running {@link Task}. - *

          + *
          * Due to the asynchronous nature of the {@link Task} execution a {@link Subscription} might not immediately become * active. {@link #isActive()} provides an answer if the underlying {@link Task} is already running. - *

          + *
          * * @author Christoph Strobl * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java index 854aa92487..287ba293b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/SubscriptionRequest.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,12 @@ */ package org.springframework.data.mongodb.core.messaging; +import java.time.Duration; + +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** * The actual {@link SubscriptionRequest} sent to the {@link MessageListenerContainer}. This wrapper type allows passing @@ -51,8 +56,103 @@ public interface SubscriptionRequest { interface RequestOptions { /** - * @return the name of the collection to subscribe to. Never {@literal null}. + * Get the database name of the db. + * + * @return the name of the database to subscribe to. Can be {@literal null} in which case the default + * {@link MongoDatabaseFactory#getMongoDatabase() database} is used. + */ + @Nullable + default String getDatabaseName() { + return null; + } + + /** + * Get the collection name. + * + * @return the name of the collection to subscribe to. Can be {@literal null}. */ + @Nullable String getCollectionName(); + + /** + * Get the maximum wait time (the time till the next Document is emitted) to apply when reading from the collection. + * + * @return never {@literal null}. {@link Duration#ZERO} by default. + * @since 3.0 + */ + default Duration maxAwaitTime() { + return Duration.ZERO; + } + + /** + * Create empty options. + * + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions none() { + return () -> null; + } + + /** + * Create options with the provided database. + * + * @param database must not be {@literal null}. + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions justDatabase(String database) { + + Assert.notNull(database, "Database must not be null"); + + return new RequestOptions() { + + @Override + public String getCollectionName() { + return null; + } + + @Override + public String getDatabaseName() { + return database; + } + }; + } + + /** + * Create options with the provided collection. + * + * @param collection must not be {@literal null}. + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions justCollection(String collection) { + + Assert.notNull(collection, "Collection must not be null"); + return () -> collection; + } + + /** + * Create options with the provided database and collection. + * + * @param database must not be {@literal null}. + * @param collection must not be {@literal null}. + * @return new instance of empty {@link RequestOptions}. + */ + static RequestOptions of(String database, String collection) { + + Assert.notNull(database, "Database must not be null"); + Assert.notNull(collection, "Collection must not be null"); + + return new RequestOptions() { + + @Override + public String getCollectionName() { + return collection; + } + + @Override + public String getDatabaseName() { + return database; + } + }; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java index f03ac231c9..c6caef12fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequest.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,13 +28,13 @@ * {@link SubscriptionRequest} implementation to be used to listen to query results in a * Capped Collection using a * Tailable Cursor. - *

          + *
          * The most trivial use case is subscribing to all events of a specific {@link com.mongodb.client.MongoCollection * collection}. * *

            * 
          - *     TailableCursorRequest request = new TailableCursorRequest<>(System.out::println, () -> "collection-name");
          + *     TailableCursorRequest<Document> request = new TailableCursorRequest<>(System.out::println, () -> "collection-name");
            * 
            * 
          * @@ -43,7 +43,7 @@ * *
            *   
          - *       TailableCursorRequest request = TailableCursorRequest.builder()
          + *       TailableCursorRequest<Document> request = TailableCursorRequest.builder()
            *           .collection("collection-name")
            *           .publishTo(System.out::println)
            *           .build();
          @@ -68,27 +68,19 @@ public class TailableCursorRequest implements SubscriptionRequest messageListener, RequestOptions options) {
           
          -		Assert.notNull(messageListener, "MessageListener must not be null!");
          -		Assert.notNull(options, "Options must not be null!");
          +		Assert.notNull(messageListener, "MessageListener must not be null");
          +		Assert.notNull(options, "Options must not be null");
           
           		this.messageListener = messageListener;
          -		this.options = options instanceof TailableCursorRequestOptions ? (TailableCursorRequestOptions) options
          -				: TailableCursorRequestOptions.of(options);
          +		this.options = options instanceof TailableCursorRequestOptions tailableCursorRequestOptions ?
          +				tailableCursorRequestOptions : TailableCursorRequestOptions.of(options);
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.monitor.SubscriptionRequest#getMessageListener()
          -	 */
           	@Override
           	public MessageListener getMessageListener() {
           		return messageListener;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.monitor.SubscriptionRequest#getRequestOptions()
          -	 */
           	@Override
           	public TailableCursorRequestOptions getRequestOptions() {
           		return options;
          @@ -96,7 +88,7 @@ public TailableCursorRequestOptions getRequestOptions() {
           
           	/**
           	 * Obtain a shiny new {@link TailableCursorRequestBuilder} and start defining options in this fancy fluent way. Just
          -	 * don't forget to call {@link TailableCursorRequestBuilder#build() build()} when your're done.
          +	 * don't forget to call {@link TailableCursorRequestBuilder#build() build()} when done.
           	 *
           	 * @return new instance of {@link TailableCursorRequestBuilder}.
           	 */
          @@ -106,7 +98,7 @@ public static TailableCursorRequestBuilder builder() {
           
           	/**
           	 * Obtain a shiny new {@link TailableCursorRequestBuilder} and start defining options in this fancy fluent way. Just
          -	 * don't forget to call {@link TailableCursorRequestBuilder#build() build()} when your're done.
          +	 * don't forget to call {@link TailableCursorRequestBuilder#build() build()} when done.
           	 *
           	 * @return new instance of {@link TailableCursorRequestBuilder}.
           	 */
          @@ -135,7 +127,7 @@ public static TailableCursorRequestOptions of(RequestOptions options) {
           
           		/**
           		 * Obtain a shiny new {@link TailableCursorRequestOptionsBuilder} and start defining options in this fancy fluent
          -		 * way. Just don't forget to call {@link TailableCursorRequestOptionsBuilder#build() build()} when your're done.
          +		 * way. Just don't forget to call {@link TailableCursorRequestOptionsBuilder#build() build()} when done.
           		 *
           		 * @return new instance of {@link TailableCursorRequestOptionsBuilder}.
           		 */
          @@ -173,7 +165,7 @@ private TailableCursorRequestOptionsBuilder() {}
           			 */
           			public TailableCursorRequestOptionsBuilder collection(String collection) {
           
          -				Assert.hasText(collection, "Collection must not be null nor empty!");
          +				Assert.hasText(collection, "Collection must not be null nor empty");
           
           				this.collectionName = collection;
           				return this;
          @@ -187,7 +179,7 @@ public TailableCursorRequestOptionsBuilder collection(String collection) {
           			 */
           			public TailableCursorRequestOptionsBuilder filter(Query filter) {
           
          -				Assert.notNull(filter, "Filter must not be null!");
          +				Assert.notNull(filter, "Filter must not be null");
           
           				this.query = filter;
           				return this;
          @@ -230,7 +222,7 @@ private TailableCursorRequestBuilder() {}
           		 */
           		public TailableCursorRequestBuilder collection(String collectionName) {
           
          -			Assert.hasText(collectionName, "CollectionName must not be null!");
          +			Assert.hasText(collectionName, "CollectionName must not be null");
           
           			delegate.collection(collectionName);
           			return this;
          @@ -244,7 +236,7 @@ public TailableCursorRequestBuilder collection(String collectionName) {
           		 */
           		public TailableCursorRequestBuilder publishTo(MessageListener messageListener) {
           
          -			Assert.notNull(messageListener, "MessageListener must not be null!");
          +			Assert.notNull(messageListener, "MessageListener must not be null");
           
           			this.listener = messageListener;
           			return this;
          @@ -258,7 +250,7 @@ public TailableCursorRequestBuilder publishTo(MessageListener filter(Query filter) {
           
          -			Assert.notNull(filter, "Filter must not be null!");
          +			Assert.notNull(filter, "Filter must not be null");
           
           			delegate.filter(filter);
           			return this;
          @@ -269,7 +261,7 @@ public TailableCursorRequestBuilder filter(Query filter) {
           		 */
           		public TailableCursorRequest build() {
           
          -			Assert.notNull(listener, "MessageListener must not be null!");
          +			Assert.notNull(listener, "MessageListener must not be null");
           
           			return new TailableCursorRequest<>(listener, delegate.build());
           		}
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java
          index b07e89ef9b..43bd7bd55d 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TailableCursorTask.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2018 the original author or authors.
          + * Copyright 2018-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -15,11 +15,12 @@
            */
           package org.springframework.data.mongodb.core.messaging;
           
          +import java.util.concurrent.TimeUnit;
          +
           import org.bson.Document;
           import org.springframework.data.mongodb.core.MongoTemplate;
           import org.springframework.data.mongodb.core.convert.QueryMapper;
           import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions;
          -import org.springframework.data.mongodb.core.messaging.TailableCursorRequest.TailableCursorRequestOptions;
           import org.springframework.data.mongodb.core.query.Query;
           import org.springframework.util.ErrorHandler;
           
          @@ -43,19 +44,14 @@ public TailableCursorTask(MongoTemplate template, TailableCursorRequest reque
           		queryMapper = new QueryMapper(template.getConverter());
           	}
           
          -	/* 
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.messaging.CursorReadingTask#initCursor(org.springframework.data.mongodb.core.MongoTemplate, org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions, java.lang.Class)
          -	 */
           	@Override
           	protected MongoCursor initCursor(MongoTemplate template, RequestOptions options, Class targetType) {
           
           		Document filter = new Document();
           		Collation collation = null;
           
          -		if (options instanceof TailableCursorRequest.TailableCursorRequestOptions) {
          +		if (options instanceof TailableCursorRequest.TailableCursorRequestOptions requestOptions) {
           
          -			TailableCursorRequestOptions requestOptions = (TailableCursorRequestOptions) options;
           			if (requestOptions.getQuery().isPresent()) {
           
           				Query query = requestOptions.getQuery().get();
          @@ -75,6 +71,10 @@ protected MongoCursor initCursor(MongoTemplate template, RequestOption
           			iterable = iterable.collation(collation);
           		}
           
          +		if (!options.maxAwaitTime().isZero()) {
          +			iterable = iterable.maxAwaitTime(options.maxAwaitTime().toMillis(), TimeUnit.MILLISECONDS);
          +		}
          +
           		return iterable.iterator();
           	}
           }
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java
          index 3e5101fbfc..e8b9c018b1 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/Task.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2018 the original author or authors.
          + * Copyright 2018-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -57,6 +57,6 @@ default boolean isActive() {
           	 * @since 2.1
           	 */
           	enum State {
          -		CREATED, STARTING, RUNNING, CANCELLED;
          +		CREATED, STARTING, RUNNING, CANCELLED
           	}
           }
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java
          index 17724ec404..0bfaa1c574 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/messaging/TaskFactory.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2018 the original author or authors.
          + * Copyright 2018-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -28,16 +28,16 @@
            */
           class TaskFactory {
           
          -	private final MongoTemplate tempate;
          +	private final MongoTemplate template;
           
           	/**
           	 * @param template must not be {@literal null}.
           	 */
           	TaskFactory(MongoTemplate template) {
           
          -		Assert.notNull(template, "Template must not be null!");
          +		Assert.notNull(template, "Template must not be null");
           
          -		this.tempate = template;
          +		this.template = template;
           	}
           
           	/**
          @@ -52,16 +52,16 @@ class TaskFactory {
           	 Task forRequest(SubscriptionRequest request, Class targetType,
           			ErrorHandler errorHandler) {
           
          -		Assert.notNull(request, "Request must not be null!");
          -		Assert.notNull(targetType, "TargetType must not be null!");
          +		Assert.notNull(request, "Request must not be null");
          +		Assert.notNull(targetType, "TargetType must not be null");
           
          -		if (request instanceof ChangeStreamRequest) {
          -			return new ChangeStreamTask(tempate, (ChangeStreamRequest) request, targetType, errorHandler);
          -		} else if (request instanceof TailableCursorRequest) {
          -			return new TailableCursorTask(tempate, (TailableCursorRequest) request, targetType, errorHandler);
          +		if (request instanceof ChangeStreamRequest changeStreamRequest) {
          +			return new ChangeStreamTask(template, changeStreamRequest, targetType, errorHandler);
          +		} else if (request instanceof TailableCursorRequest tailableCursorRequest) {
          +			return new TailableCursorTask(template, tailableCursorRequest, targetType, errorHandler);
           		}
           
           		throw new IllegalArgumentException(
          -				"oh wow - seems you're using some fancy new feature we do not support. Please be so kind and leave us a note in the issue tracker so we can get this fixed.\nThank you!");
          +				"oh wow - seems you're using some fancy new feature we do not support; Please be so kind and leave us a note in the issue tracker so we can get this fixed\nThank you");
           	}
           }
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java
          index eac745e056..8b1620b320 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicQuery.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2010-2018 the original author or authors.
          + * Copyright 2010-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -73,7 +73,7 @@ public BasicQuery(@Nullable String query, @Nullable String fields) {
           	 *
           	 * @param queryObject must not be {@literal null}.
           	 * @param fieldsObject must not be {@literal null}.
          -	 * @throws IllegalArgumentException when {@code sortObject} or {@code fieldsObject} is {@literal null}.
          +	 * @throws IllegalArgumentException when {@code queryObject} or {@code fieldsObject} is {@literal null}.
           	 */
           	public BasicQuery(Document queryObject, Document fieldsObject) {
           
          @@ -85,10 +85,21 @@ public BasicQuery(Document queryObject, Document fieldsObject) {
           		this.sortObject = new Document();
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.query.Query#addCriteria(org.springframework.data.mongodb.core.query.CriteriaDefinition)
          +	/**
          +	 * Create a BasicQuery given a {@link Query}. The resulting query is a copy of {@link Query}.
          +	 *
          +	 * @param query the query to copy.
          +	 * @since 4.4
           	 */
          +	public BasicQuery(Query query) {
          +
          +		super(query);
          +		this.queryObject = query.getQueryObject();
          +		this.setFieldsObject(query.getFieldsObject());
          +		this.setSortObject(query.getSortObject());
          +		this.setMeta(query.getMeta());
          +	}
          +
           	@Override
           	public Query addCriteria(CriteriaDefinition criteria) {
           
          @@ -97,19 +108,11 @@ public Query addCriteria(CriteriaDefinition criteria) {
           		return this;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.query.Query#getQueryObject()
          -	 */
           	@Override
           	public Document getQueryObject() {
           		return this.queryObject;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.query.Query#getFieldsObject()
          -	 */
           	@Override
           	public Document getFieldsObject() {
           
          @@ -119,10 +122,6 @@ public Document getFieldsObject() {
           		return combinedFieldsObject;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.query.Query#getSortObject()
          -	 */
           	@Override
           	public Document getSortObject() {
           
          @@ -148,6 +147,11 @@ public void setSortObject(Document sortObject) {
           		this.sortObject = sortObject;
           	}
           
          +	@Override
          +	public boolean isSorted() {
          +		return super.isSorted() || !sortObject.isEmpty();
          +	}
          +
           	/**
           	 * Set the fields (projection) {@link Document}.
           	 *
          @@ -155,40 +159,30 @@ public void setSortObject(Document sortObject) {
           	 * @throws IllegalArgumentException when {@code fieldsObject} is {@literal null}.
           	 * @since 1.6
           	 */
          -	protected void setFieldsObject(Document fieldsObject) {
          +	public void setFieldsObject(Document fieldsObject) {
           
          -		Assert.notNull(sortObject, "Field document must not be null");
          +		Assert.notNull(fieldsObject, "Field document must not be null");
           
           		this.fieldsObject = fieldsObject;
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.query.Query#equals(java.lang.Object)
          -	 */
           	@Override
          -	public boolean equals(Object o) {
          +	public boolean equals(@Nullable Object o) {
           
           		if (this == o) {
           			return true;
           		}
           
          -		if (!(o instanceof BasicQuery)) {
          +		if (!(o instanceof BasicQuery that)) {
           			return false;
           		}
           
          -		BasicQuery that = (BasicQuery) o;
          -
           		return querySettingsEquals(that) && //
           				nullSafeEquals(fieldsObject, that.fieldsObject) && //
           				nullSafeEquals(queryObject, that.queryObject) && //
           				nullSafeEquals(sortObject, that.sortObject);
           	}
           
          -	/*
          -	 * (non-Javadoc)
          -	 * @see org.springframework.data.mongodb.core.query.Query#hashCode()
          -	 */
           	@Override
           	public int hashCode() {
           
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java
          index 8c5e505fa6..12843ce622 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/BasicUpdate.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2010-2018 the original author or authors.
          + * Copyright 2010-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -15,12 +15,21 @@
            */
           package org.springframework.data.mongodb.core.query;
           
          -import java.util.Arrays;
          +import java.util.ArrayList;
           import java.util.Collections;
          +import java.util.LinkedHashMap;
          +import java.util.List;
          +import java.util.Map;
          +import java.util.function.BiFunction;
           
           import org.bson.Document;
           
          +import org.springframework.lang.Nullable;
          +import org.springframework.util.ClassUtils;
          +
           /**
          + * {@link Document}-based {@link Update} variant.
          + *
            * @author Thomas Risberg
            * @author John Brisbin
            * @author Oliver Gierke
          @@ -29,85 +38,117 @@
            */
           public class BasicUpdate extends Update {
           
          -	private Document updateObject;
          +	private final Document updateObject;
           
           	public BasicUpdate(String updateString) {
          -		super();
          -		this.updateObject = Document.parse(updateString);
          +		this(Document.parse(updateString));
           	}
           
           	public BasicUpdate(Document updateObject) {
          -		super();
           		this.updateObject = updateObject;
           	}
           
           	@Override
          -	public Update set(String key, Object value) {
          -		updateObject.put("$set", Collections.singletonMap(key, value));
          +	public Update set(String key, @Nullable Object value) {
          +		setOperationValue("$set", key, value);
           		return this;
           	}
           
           	@Override
           	public Update unset(String key) {
          -		updateObject.put("$unset", Collections.singletonMap(key, 1));
          +		setOperationValue("$unset", key, 1);
           		return this;
           	}
           
           	@Override
           	public Update inc(String key, Number inc) {
          -		updateObject.put("$inc", Collections.singletonMap(key, inc));
          -		return this;
          -	}
          -
          -	@Override
          -	public Update push(String key, Object value) {
          -		updateObject.put("$push", Collections.singletonMap(key, value));
          +		setOperationValue("$inc", key, inc);
           		return this;
           	}
           
           	@Override
          -	public Update pushAll(String key, Object[] values) {
          -		Document keyValue = new Document();
          -		keyValue.put(key, values);
          -		updateObject.put("$pushAll", keyValue);
          +	public Update push(String key, @Nullable Object value) {
          +		setOperationValue("$push", key, value);
           		return this;
           	}
           
           	@Override
          -	public Update addToSet(String key, Object value) {
          -		updateObject.put("$addToSet", Collections.singletonMap(key, value));
          +	public Update addToSet(String key, @Nullable Object value) {
          +		setOperationValue("$addToSet", key, value);
           		return this;
           	}
           
           	@Override
           	public Update pop(String key, Position pos) {
          -		updateObject.put("$pop", Collections.singletonMap(key, (pos == Position.FIRST ? -1 : 1)));
          +		setOperationValue("$pop", key, (pos == Position.FIRST ? -1 : 1));
           		return this;
           	}
           
           	@Override
          -	public Update pull(String key, Object value) {
          -		updateObject.put("$pull", Collections.singletonMap(key, value));
          +	public Update pull(String key, @Nullable Object value) {
          +		setOperationValue("$pull", key, value);
           		return this;
           	}
           
           	@Override
           	public Update pullAll(String key, Object[] values) {
          -		Document keyValue = new Document();
          -		keyValue.put(key, Arrays.copyOf(values, values.length));
          -		updateObject.put("$pullAll", keyValue);
          +		setOperationValue("$pullAll", key, List.of(values), (o, o2) -> {
          +
          +			if (o instanceof List prev && o2 instanceof List currentValue) {
          +				List merged = new ArrayList<>(prev.size() + currentValue.size());
          +				merged.addAll(prev);
          +				merged.addAll(currentValue);
          +				return merged;
          +			}
          +
          +			return o2;
          +		});
           		return this;
           	}
           
           	@Override
           	public Update rename(String oldName, String newName) {
          -		updateObject.put("$rename", Collections.singletonMap(oldName, newName));
          +		setOperationValue("$rename", oldName, newName);
           		return this;
           	}
           
          +	@Override
          +	public boolean modifies(String key) {
          +		return super.modifies(key) || Update.fromDocument(getUpdateObject()).modifies(key);
          +	}
          +
           	@Override
           	public Document getUpdateObject() {
           		return updateObject;
           	}
           
          +	void setOperationValue(String operator, String key, @Nullable Object value) {
          +		setOperationValue(operator, key, value, (o, o2) -> o2);
          +	}
          +
          +	void setOperationValue(String operator, String key, @Nullable Object value,
          +			BiFunction mergeFunction) {
          +
          +		if (!updateObject.containsKey(operator)) {
          +			updateObject.put(operator, Collections.singletonMap(key, value));
          +		} else {
          +			Object o = updateObject.get(operator);
          +			if (o instanceof Map existing) {
          +				Map target = new LinkedHashMap<>(existing);
          +
          +				if (target.containsKey(key)) {
          +					target.put(key, mergeFunction.apply(target.get(key), value));
          +				} else {
          +					target.put(key, value);
          +				}
          +				updateObject.put(operator, target);
          +			} else {
          +				throw new IllegalStateException(
          +						"Cannot add ['%s' : { '%s' : ... }]. Operator already exists with value of type [%s] which is not suitable for appending"
          +								.formatted(operator, key,
          +										o != null ? ClassUtils.getShortName(o.getClass()) : "null"));
          +			}
          +		}
          +	}
          +
           }
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java
          index d8ec957d97..de24c0511d 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Collation.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2017-2018 the original author or authors.
          + * Copyright 2017-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -15,16 +15,12 @@
            */
           package org.springframework.data.mongodb.core.query;
           
          -import lombok.AccessLevel;
          -import lombok.AllArgsConstructor;
          -import lombok.Getter;
          -import lombok.RequiredArgsConstructor;
          -
           import java.util.Locale;
           import java.util.Optional;
           
           import org.bson.Document;
           import org.springframework.core.convert.converter.Converter;
          +import org.springframework.lang.Nullable;
           import org.springframework.util.Assert;
           import org.springframework.util.StringUtils;
           
          @@ -38,7 +34,7 @@
            * Central abstraction for MongoDB collation support. 
          * Allows fluent creation of a collation {@link Document} that can be used for creating collections & indexes as well as * querying data. - *

          + *
          * NOTE: Please keep in mind that queries will only make use of an index with collation settings if the * query itself specifies the same collation. * @@ -63,7 +59,7 @@ public class Collation { private Collation(CollationLocale locale) { - Assert.notNull(locale, "ICULocale must not be null!"); + Assert.notNull(locale, "ICULocale must not be null"); this.locale = locale; } @@ -81,11 +77,11 @@ public static Collation simple() { * {@link java.util.Locale#getVariant()}. * * @param locale must not be {@literal null}. - * @return + * @return new instance of {@link Collation}. */ public static Collation of(Locale locale) { - Assert.notNull(locale, "Locale must not be null!"); + Assert.notNull(locale, "Locale must not be null"); String format; @@ -102,7 +98,7 @@ public static Collation of(Locale locale) { * Create new {@link Collation} with locale set to the given ICU language. * * @param language must not be {@literal null}. - * @return + * @return new instance of {@link Collation}. */ public static Collation of(String language) { return of(CollationLocale.of(language)); @@ -112,23 +108,40 @@ public static Collation of(String language) { * Create new {@link Collation} with locale set to the given {@link CollationLocale}. * * @param locale must not be {@literal null}. - * @return + * @return new instance of {@link Collation}. */ public static Collation of(CollationLocale locale) { return new Collation(locale); } + /** + * Parse the given {@code collation} string into a {@link Collation}. + * + * @param collation the collation to parse. Can be a simple string like {@code en_US} or a + * {@link Document#parse(String) parsable} document like { 'locale' : '?0' } . + * @return never {@literal null}. + * @throws IllegalArgumentException if {@literal collation} is null. + * @since 2.2 + */ + public static Collation parse(String collation) { + + Assert.notNull(collation, "Collation must not be null"); + + return collation.stripLeading().startsWith("{") ? from(Document.parse(collation)) + : of(collation); + } + /** * Create new {@link Collation} from values in {@link Document}. * * @param source must not be {@literal null}. - * @return + * @return new instance of {@link Collation}. * @see MongoDB Reference - * Collation Document */ public static Collation from(Document source) { - Assert.notNull(source, "Source must not be null!"); + Assert.notNull(source, "Source must not be null"); Collation collation = Collation.of(source.getString("locale")); if (source.containsKey("strength")) { @@ -164,7 +177,7 @@ public static Collation from(Document source) { /** * Set the level of comparison to perform. * - * @param strength + * @param strength comparison level. * @return new {@link Collation}. */ public Collation strength(int strength) { @@ -189,7 +202,7 @@ public Collation strength(ComparisonLevel comparisonLevel) { /** * Set whether to include {@code caseLevel} comparison.
          * - * @param caseLevel + * @param caseLevel use {@literal true} to enable {@code caseLevel} comparison. * @return new {@link Collation}. */ public Collation caseLevel(boolean caseLevel) { @@ -203,7 +216,7 @@ public Collation caseLevel(boolean caseLevel) { * Set the flag that determines sort order of case differences during tertiary level comparisons. * * @param caseFirst must not be {@literal null}. - * @return + * @return new instance of {@link Collation}. */ public Collation caseFirst(String caseFirst) { return caseFirst(new CaseFirst(caseFirst)); @@ -212,8 +225,8 @@ public Collation caseFirst(String caseFirst) { /** * Set the flag that determines sort order of case differences during tertiary level comparisons. * - * @param caseFirst must not be {@literal null}. - * @return + * @param sort must not be {@literal null}. + * @return new instance of {@link Collation}. */ public Collation caseFirst(CaseFirst sort) { @@ -355,7 +368,7 @@ public Collation maxVariable(String maxVariable) { /** * Get the {@link Document} representation of the {@link Collation}. * - * @return + * @return the native MongoDB {@link Document} representation of the {@link Collation}. */ public Document toDocument() { return map(toMongoDocumentConverter()); @@ -364,7 +377,7 @@ public Document toDocument() { /** * Get the {@link com.mongodb.client.model.Collation} representation of the {@link Collation}. * - * @return + * @return he native MongoDB representation of the {@link Collation}. */ public com.mongodb.client.model.Collation toMongoCollation() { return map(toMongoCollationConverter()); @@ -373,9 +386,9 @@ public com.mongodb.client.model.Collation toMongoCollation() { /** * Transform {@code this} {@link Collation} by applying a {@link Converter}. * - * @param mapper + * @param mapper must not be {@literal null}. * @param - * @return + * @return the converted result. */ public R map(Converter mapper) { return mapper.convert(this); @@ -386,6 +399,26 @@ public String toString() { return toDocument().toJson(); } + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + Collation that = (Collation) o; + return this.toDocument().equals(that.toDocument()); + } + + @Override + public int hashCode() { + return toDocument().hashCode(); + } + private Collation copy() { Collation collation = new Collation(locale); @@ -478,8 +511,6 @@ default Optional getCaseLevel() { * * @since 2.0 */ - @AllArgsConstructor(access = AccessLevel.PACKAGE) - @Getter static class ICUComparisonLevel implements ComparisonLevel { private final int level; @@ -489,6 +520,24 @@ static class ICUComparisonLevel implements ComparisonLevel { ICUComparisonLevel(int level) { this(level, Optional.empty(), Optional.empty()); } + + ICUComparisonLevel(int level, Optional caseFirst, Optional caseLevel) { + this.level = level; + this.caseFirst = caseFirst; + this.caseLevel = caseLevel; + } + + public int getLevel() { + return this.level; + } + + public Optional getCaseFirst() { + return this.caseFirst; + } + + public Optional getCaseLevel() { + return this.caseLevel; + } } /** @@ -605,7 +654,7 @@ private TertiaryICUComparisonLevel(CaseFirst caseFirst) { */ public ComparisonLevel caseFirst(CaseFirst caseFirst) { - Assert.notNull(caseFirst, "CaseFirst must not be null!"); + Assert.notNull(caseFirst, "CaseFirst must not be null"); return new TertiaryICUComparisonLevel(caseFirst); } } @@ -613,7 +662,6 @@ public ComparisonLevel caseFirst(CaseFirst caseFirst) { /** * @since 2.0 */ - @RequiredArgsConstructor(access = AccessLevel.PRIVATE) public static class CaseFirst { private static final CaseFirst UPPER = new CaseFirst("upper"); @@ -622,6 +670,10 @@ public static class CaseFirst { private final String state; + private CaseFirst(String state) { + this.state = state; + } + /** * Sort uppercase before lowercase. * @@ -653,7 +705,6 @@ public static CaseFirst off() { /** * @since 2.0 */ - @RequiredArgsConstructor(access = AccessLevel.PACKAGE) public static class Alternate { private static final Alternate NON_IGNORABLE = new Alternate("non-ignorable", Optional.empty()); @@ -661,6 +712,11 @@ public static class Alternate { final String alternate; final Optional maxVariable; + Alternate(String alternate, Optional maxVariable) { + this.alternate = alternate; + this.maxVariable = maxVariable; + } + /** * Consider Whitespace and punctuation as base characters. * @@ -724,21 +780,26 @@ public Alternate space() { * @see ICU - International Components for Unicode * @since 2.0 */ - @RequiredArgsConstructor(access = AccessLevel.PRIVATE) public static class CollationLocale { private final String language; private final Optional variant; + private CollationLocale(String language, Optional variant) { + + this.language = language; + this.variant = variant; + } + /** * Create new {@link CollationLocale} for given language. * * @param language must not be {@literal null}. - * @return + * @return new instance of {@link CollationLocale}. */ public static CollationLocale of(String language) { - Assert.notNull(language, "Code must not be null!"); + Assert.notNull(language, "Code must not be null"); return new CollationLocale(language, Optional.empty()); } @@ -750,14 +811,14 @@ public static CollationLocale of(String language) { */ public CollationLocale variant(String variant) { - Assert.notNull(variant, "Variant must not be null!"); + Assert.notNull(variant, "Variant must not be null"); return new CollationLocale(language, Optional.of(variant)); } /** * Get the string representation. * - * @return + * @return the collation {@link String} in Mongo ICU format. */ public String asString() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index df83362859..8d4cb703bb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,26 +19,32 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collection; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.bson.BSON; import org.bson.BsonRegularExpression; +import org.bson.BsonType; import org.bson.Document; +import org.bson.types.Binary; import org.springframework.data.domain.Example; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Point; import org.springframework.data.geo.Shape; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.MongoExpression; import org.springframework.data.mongodb.core.geo.GeoJson; import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.util.RegexFlags; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; @@ -49,13 +55,17 @@ /** * Central class for creating queries. It follows a fluent API style so that you can easily chain together multiple - * criteria. Static import of the 'Criteria.where' method will improve readability. + * criteria. Static import of the {@link Criteria#where Criteria.where} method improves readability. * * @author Thomas Risberg * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Andreas Zink + * @author Ziemowit Stolarczyk + * @author Clément Petit + * @author James McNee */ public class Criteria implements CriteriaDefinition { @@ -88,8 +98,8 @@ protected Criteria(List criteriaChain, String key) { /** * Static factory method to create a Criteria using the provided key * - * @param key - * @return + * @param key the property or field name. + * @return new instance of {@link Criteria}. */ public static Criteria where(String key) { return new Criteria(key); @@ -99,7 +109,7 @@ public static Criteria where(String key) { * Static factory method to create a {@link Criteria} matching an example object. * * @param example must not be {@literal null}. - * @return + * @return new instance of {@link Criteria}. * @see Criteria#alike(Example) * @since 1.8 */ @@ -108,10 +118,15 @@ public static Criteria byExample(Object example) { } /** - * Static factory method to create a {@link Criteria} matching an example object. + * Static factory method to create a {@link Criteria} matching an example object.
          + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] } .
          + * To avoid the above-mentioned type restriction use an {@link UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. * * @param example must not be {@literal null}. - * @return + * @return new instance of {@link Criteria}. * @see Criteria#alike(Example) * @since 1.8 */ @@ -133,10 +148,41 @@ public static Criteria matchingDocumentStructure(MongoJsonSchema schema) { return new Criteria().andDocumentStructureMatches(schema); } + /** + * Static factory method to create a {@link Criteria} matching a documents against the given {@link MongoExpression + * expression}. + *

          + * The {@link MongoExpression expression} can be either something that directly renders to the store native + * representation like + * + *

          +	 * expr(() -> Document.parse("{ $gt : [ '$spent', '$budget'] }")))
          +	 * 
          + * + * or an {@link org.springframework.data.mongodb.core.aggregation.AggregationExpression} which will be subject to + * context (domain type) specific field mapping. + * + *
          +	 * expr(valueOf("amountSpent").greaterThan("budget"))
          +	 * 
          + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Criteria}. + * @since 4.1 + */ + public static Criteria expr(MongoExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + Criteria criteria = new Criteria(); + criteria.criteria.put("$expr", expression); + return criteria; + } + /** * Static factory method to create a Criteria using the provided key * - * @return + * @return new instance of {@link Criteria}. */ public Criteria and(String key) { return new Criteria(this.criteriaChain, key); @@ -145,21 +191,55 @@ public Criteria and(String key) { /** * Creates a criterion using equality * - * @param o - * @return + * @param value can be {@literal null}. + * @return this. */ - public Criteria is(@Nullable Object o) { + public Criteria is(@Nullable Object value) { - if (!isValue.equals(NOT_SET)) { + if (!NOT_SET.equals(isValue)) { throw new InvalidMongoDbApiUsageException( - "Multiple 'is' values declared. You need to use 'and' with multiple criteria"); + "Multiple 'is' values declared; You need to use 'and' with multiple criteria"); } if (lastOperatorWasNot()) { - throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead."); + throw new InvalidMongoDbApiUsageException("Invalid query: 'not' can't be used with 'is' - use 'ne' instead"); } - this.isValue = o; + this.isValue = value; + return this; + } + + /** + * Creates a criterion using {@literal null} equality comparison which matches documents that either contain the item + * field whose value is {@literal null} or that do not contain the item field.
          + * Use {@link #isNullValue()} to only query for documents that contain the field whose value is equal to + * {@link org.bson.BsonType#NULL}.
          + * Use {@link #exists(boolean)} to query for documents that do (not) contain the field. + * + * @return this. + * @see Query for Null or + * Missing Fields: Equality Filter + * @since 3.3 + */ + public Criteria isNull() { + return is(null); + } + + /** + * Creates a criterion using a {@link org.bson.BsonType} comparison which matches only documents that contain the item + * field whose value is equal to {@link org.bson.BsonType#NULL}.
          + * Use {@link #isNull()} to query for documents that contain the field with a {@literal null} value or do not contain + * the field at all.
          + * Use {@link #exists(boolean)} to query for documents that do (not) contain the field. + * + * @return this. + * @see Query for Null or Missing + * Fields: Type Check + * @since 3.3 + */ + public Criteria isNullValue() { + + criteria.put("$type", BsonType.NULL.getValue()); return this; } @@ -170,124 +250,124 @@ private boolean lastOperatorWasNot() { /** * Creates a criterion using the {@literal $ne} operator. * - * @param o - * @return + * @param value can be {@literal null}. + * @return this. * @see MongoDB Query operator: $ne */ - public Criteria ne(@Nullable Object o) { - criteria.put("$ne", o); + public Criteria ne(@Nullable Object value) { + criteria.put("$ne", value); return this; } /** * Creates a criterion using the {@literal $lt} operator. * - * @param o - * @return + * @param value must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $lt */ - public Criteria lt(Object o) { - criteria.put("$lt", o); + public Criteria lt(Object value) { + criteria.put("$lt", value); return this; } /** * Creates a criterion using the {@literal $lte} operator. * - * @param o - * @return + * @param value must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $lte */ - public Criteria lte(Object o) { - criteria.put("$lte", o); + public Criteria lte(Object value) { + criteria.put("$lte", value); return this; } /** * Creates a criterion using the {@literal $gt} operator. * - * @param o - * @return + * @param value must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $gt */ - public Criteria gt(Object o) { - criteria.put("$gt", o); + public Criteria gt(Object value) { + criteria.put("$gt", value); return this; } /** * Creates a criterion using the {@literal $gte} operator. * - * @param o - * @return + * @param value can be {@literal null}. + * @return this. * @see MongoDB Query operator: $gte */ - public Criteria gte(Object o) { - criteria.put("$gte", o); + public Criteria gte(Object value) { + criteria.put("$gte", value); return this; } /** * Creates a criterion using the {@literal $in} operator. * - * @param o the values to match against - * @return + * @param values the values to match against + * @return this. * @see MongoDB Query operator: $in */ - public Criteria in(Object... o) { - if (o.length > 1 && o[1] instanceof Collection) { + public Criteria in(Object... values) { + if (values.length > 1 && values[1] instanceof Collection) { throw new InvalidMongoDbApiUsageException( - "You can only pass in one argument of type " + o[1].getClass().getName()); + "You can only pass in one argument of type " + values[1].getClass().getName()); } - criteria.put("$in", Arrays.asList(o)); + criteria.put("$in", Arrays.asList(values)); return this; } /** * Creates a criterion using the {@literal $in} operator. * - * @param c the collection containing the values to match against - * @return + * @param values the collection containing the values to match against + * @return this. * @see MongoDB Query operator: $in */ - public Criteria in(Collection c) { - criteria.put("$in", c); + public Criteria in(Collection values) { + criteria.put("$in", values); return this; } /** * Creates a criterion using the {@literal $nin} operator. * - * @param o - * @return + * @param values + * @return this. * @see MongoDB Query operator: $nin */ - public Criteria nin(Object... o) { - return nin(Arrays.asList(o)); + public Criteria nin(Object... values) { + return nin(Arrays.asList(values)); } /** * Creates a criterion using the {@literal $nin} operator. * - * @param o - * @return + * @param values must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $nin */ - public Criteria nin(Collection o) { - criteria.put("$nin", o); + public Criteria nin(Collection values) { + criteria.put("$nin", values); return this; } /** * Creates a criterion using the {@literal $mod} operator. * - * @param value - * @param remainder - * @return + * @param value must not be {@literal null}. + * @param remainder must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $mod */ public Criteria mod(Number value, Number remainder) { - List l = new ArrayList(); + List l = new ArrayList<>(2); l.add(value); l.add(remainder); criteria.put("$mod", l); @@ -297,83 +377,117 @@ public Criteria mod(Number value, Number remainder) { /** * Creates a criterion using the {@literal $all} operator. * - * @param o - * @return + * @param values must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $all */ - public Criteria all(Object... o) { - return all(Arrays.asList(o)); + public Criteria all(Object... values) { + return all(Arrays.asList(values)); } /** * Creates a criterion using the {@literal $all} operator. * - * @param o - * @return + * @param values must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $all */ - public Criteria all(Collection o) { - criteria.put("$all", o); + public Criteria all(Collection values) { + criteria.put("$all", values); return this; } /** * Creates a criterion using the {@literal $size} operator. * - * @param s - * @return + * @param size + * @return this. * @see MongoDB Query operator: $size */ - public Criteria size(int s) { - criteria.put("$size", s); + public Criteria size(int size) { + criteria.put("$size", size); return this; } /** * Creates a criterion using the {@literal $exists} operator. * - * @param b - * @return + * @param value + * @return this. * @see MongoDB Query operator: $exists */ - public Criteria exists(boolean b) { - criteria.put("$exists", b); + public Criteria exists(boolean value) { + criteria.put("$exists", value); + return this; + } + + /** + * Creates a criterion using the {@literal $sampleRate} operator. + * + * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. Must be + * between {@code 0} and {@code 1}. + * @return this. + * @see MongoDB Query operator: + * $sampleRate + * @since 3.3 + */ + public Criteria sampleRate(double sampleRate) { + + Assert.isTrue(sampleRate >= 0, "The sample rate must be greater than zero"); + Assert.isTrue(sampleRate <= 1, "The sample rate must not be greater than one"); + + criteria.put("$sampleRate", sampleRate); return this; } /** * Creates a criterion using the {@literal $type} operator. * - * @param t - * @return + * @param typeNumber + * @return this. * @see MongoDB Query operator: $type */ - public Criteria type(int t) { - criteria.put("$type", t); + public Criteria type(int typeNumber) { + criteria.put("$type", typeNumber); return this; } /** * Creates a criterion using the {@literal $type} operator. * - * @param type must not be {@literal null}. - * @return this + * @param types must not be {@literal null}. + * @return this. * @since 2.1 * @see MongoDB Query operator: $type */ public Criteria type(Type... types) { - Assert.notNull(types, "Types must not be null!"); - Assert.noNullElements(types, "Types must not contain null."); + Assert.notNull(types, "Types must not be null"); + Assert.noNullElements(types, "Types must not contain null"); + + return type(Arrays.asList(types)); + } + + /** + * Creates a criterion using the {@literal $type} operator. + * + * @param types must not be {@literal null}. + * @return this. + * @since 3.2 + * @see MongoDB Query operator: $type + */ + public Criteria type(Collection types) { + + Assert.notNull(types, "Types must not be null"); - criteria.put("$type", Arrays.asList(types).stream().map(Type::value).collect(Collectors.toList())); + criteria.put("$type", types.stream().map(Type::toBsonType).map(Type::value).collect(Collectors.toList())); return this; } /** * Creates a criterion using the {@literal $not} meta operator which affects the clause directly following * - * @return + * @return this. * @see MongoDB Query operator: $not */ public Criteria not() { @@ -383,8 +497,8 @@ public Criteria not() { /** * Creates a criterion using the {@literal $not} operator. * - * @param value - * @return + * @param value can be {@literal null}. + * @return this. * @see MongoDB Query operator: $not */ private Criteria not(@Nullable Object value) { @@ -395,35 +509,35 @@ private Criteria not(@Nullable Object value) { /** * Creates a criterion using a {@literal $regex} operator. * - * @param re - * @return + * @param regex must not be {@literal null}. + * @return this. * @see MongoDB Query operator: $regex */ - public Criteria regex(String re) { - return regex(re, null); + public Criteria regex(String regex) { + return regex(regex, null); } /** * Creates a criterion using a {@literal $regex} and {@literal $options} operator. * - * @param re - * @param options - * @return + * @param regex must not be {@literal null}. + * @param options can be {@literal null}. + * @return this. * @see MongoDB Query operator: $regex */ - public Criteria regex(String re, @Nullable String options) { - return regex(toPattern(re, options)); + public Criteria regex(String regex, @Nullable String options) { + return regex(toPattern(regex, options)); } /** * Syntactical sugar for {@link #is(Object)} making obvious that we create a regex predicate. * - * @param pattern - * @return + * @param pattern must not be {@literal null}. + * @return this. */ public Criteria regex(Pattern pattern) { - Assert.notNull(pattern, "Pattern must not be null!"); + Assert.notNull(pattern, "Pattern must not be null"); if (lastOperatorWasNot()) { return not(pattern); @@ -433,6 +547,12 @@ public Criteria regex(Pattern pattern) { return this; } + /** + * Use a MongoDB native {@link BsonRegularExpression}. + * + * @param regex must not be {@literal null}. + * @return this. + */ public Criteria regex(BsonRegularExpression regex) { if (lastOperatorWasNot()) { @@ -445,9 +565,9 @@ public Criteria regex(BsonRegularExpression regex) { private Pattern toPattern(String regex, @Nullable String options) { - Assert.notNull(regex, "Regex string must not be null!"); + Assert.notNull(regex, "Regex string must not be null"); - return Pattern.compile(regex, options == null ? 0 : BSON.regexFlags(options)); + return Pattern.compile(regex, RegexFlags.toRegexFlags(options)); } /** @@ -455,7 +575,7 @@ private Pattern toPattern(String regex, @Nullable String options) { * Mongo 2.4 and higher. * * @param circle must not be {@literal null} - * @return + * @return this. * @see MongoDB Query operator: * $geoWithin * @see MongoDB Query operator: @@ -463,7 +583,7 @@ private Pattern toPattern(String regex, @Nullable String options) { */ public Criteria withinSphere(Circle circle) { - Assert.notNull(circle, "Circle must not be null!"); + Assert.notNull(circle, "Circle must not be null"); criteria.put("$geoWithin", new GeoCommand(new Sphere(circle))); return this; @@ -472,14 +592,14 @@ public Criteria withinSphere(Circle circle) { /** * Creates a geospatial criterion using a {@literal $geoWithin} operation. * - * @param shape - * @return + * @param shape must not be {@literal null}. + * @return this. * @see MongoDB Query operator: * $geoWithin */ public Criteria within(Shape shape) { - Assert.notNull(shape, "Shape must not be null!"); + Assert.notNull(shape, "Shape must not be null"); criteria.put("$geoWithin", new GeoCommand(shape)); return this; @@ -489,12 +609,12 @@ public Criteria within(Shape shape) { * Creates a geospatial criterion using a {@literal $near} operation. * * @param point must not be {@literal null} - * @return + * @return this. * @see MongoDB Query operator: $near */ public Criteria near(Point point) { - Assert.notNull(point, "Point must not be null!"); + Assert.notNull(point, "Point must not be null"); criteria.put("$near", point); return this; @@ -505,13 +625,13 @@ public Criteria near(Point point) { * higher. * * @param point must not be {@literal null} - * @return + * @return this. * @see MongoDB Query operator: * $nearSphere */ public Criteria nearSphere(Point point) { - Assert.notNull(point, "Point must not be null!"); + Assert.notNull(point, "Point must not be null"); criteria.put("$nearSphere", point); return this; @@ -522,22 +642,26 @@ public Criteria nearSphere(Point point) { * structure and the documents one. Requires MongoDB 2.4 or better. * * @param geoJson must not be {@literal null}. - * @return + * @return this. * @since 1.8 */ @SuppressWarnings("rawtypes") public Criteria intersects(GeoJson geoJson) { - Assert.notNull(geoJson, "GeoJson must not be null!"); + Assert.notNull(geoJson, "GeoJson must not be null"); criteria.put("$geoIntersects", geoJson); return this; } /** - * Creates a geo-spatial criterion using a {@literal $maxDistance} operation, for use with $near + * Creates a geo-spatial criterion using a {@literal $maxDistance} operation, for use with {@literal $near} or + * {@literal $nearSphere}. + *

          + * NOTE: The unit of measure for distance may depends on the used coordinate representation (legacy + * vs. geoJson) as well as the target operation. * - * @param maxDistance - * @return + * @param maxDistance radians or meters + * @return this. * @see MongoDB Query operator: * $maxDistance */ @@ -555,9 +679,12 @@ public Criteria maxDistance(double maxDistance) { /** * Creates a geospatial criterion using a {@literal $minDistance} operation, for use with {@literal $near} or * {@literal $nearSphere}. + *

          + * NOTE: The unit of measure for distance may depends on the used coordinate representation (legacy + * vs. geoJson) as well as the target operation. * - * @param minDistance - * @return + * @param minDistance radians or meters + * @return this. * @since 1.7 */ public Criteria minDistance(double minDistance) { @@ -574,28 +701,34 @@ public Criteria minDistance(double minDistance) { /** * Creates a criterion using the {@literal $elemMatch} operator * - * @param c - * @return + * @param criteria must not be {@literal null}. + * @return this. * @see MongoDB Query operator: * $elemMatch */ - public Criteria elemMatch(Criteria c) { - criteria.put("$elemMatch", c.getCriteriaObject()); + public Criteria elemMatch(Criteria criteria) { + this.criteria.put("$elemMatch", criteria.getCriteriaObject()); return this; } /** * Creates a criterion using the given object as a pattern. * - * @param sample - * @return + * @param sample must not be {@literal null}. + * @return this. * @since 1.8 */ public Criteria alike(Example sample) { - criteria.put("$example", sample); - this.criteriaChain.add(this); - return this; + if (StringUtils.hasText(this.getKey())) { + + criteria.put("$example", sample); + return this; + } + + Criteria exampleCriteria = new Criteria(); + exampleCriteria.criteria.put("$example", sample); + return registerCriteriaChainElement(exampleCriteria); } /** @@ -614,7 +747,7 @@ public Criteria alike(Example sample) { */ public Criteria andDocumentStructureMatches(MongoJsonSchema schema) { - Assert.notNull(schema, "Schema must not be null!"); + Assert.notNull(schema, "Schema must not be null"); Criteria schemaCriteria = new Criteria(); schemaCriteria.criteria.putAll(schema.toDocument()); @@ -623,43 +756,115 @@ public Criteria andDocumentStructureMatches(MongoJsonSchema schema) { } /** - * Creates an 'or' criteria using the $or operator for all of the provided criteria - *

          - * Note that mongodb doesn't support an $or operator to be wrapped in a $not operator. + * Use {@link BitwiseCriteriaOperators} as gateway to create a criterion using one of the + * bitwise operators like + * {@code $bitsAllClear}. + * + * @return new instance of {@link BitwiseCriteriaOperators}. Never {@literal null}. + * @since 2.1 + */ + public BitwiseCriteriaOperators bits() { + return new BitwiseCriteriaOperatorsImpl(this); + } + + /** + * Creates a criteria using the {@code $or} operator for all of the provided criteria. *

          + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. * - * @throws IllegalArgumentException if {@link #orOperator(Criteria...)} follows a not() call directly. - * @param criteria + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. */ public Criteria orOperator(Criteria... criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return orOperator(Arrays.asList(criteria)); + } + + /** + * Creates a criteria using the {@code $or} operator for all of the provided criteria. + *

          + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. + * @since 3.2 + */ + public Criteria orOperator(Collection criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + BasicDBList bsonList = createCriteriaList(criteria); return registerCriteriaChainElement(new Criteria("$or").is(bsonList)); } /** - * Creates a 'nor' criteria using the $nor operator for all of the provided criteria. - *

          - * Note that mongodb doesn't support an $nor operator to be wrapped in a $not operator. + * Creates a criteria using the {@code $nor} operator for all of the provided criteria. *

          + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. * - * @throws IllegalArgumentException if {@link #norOperator(Criteria...)} follows a not() call directly. - * @param criteria + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. */ public Criteria norOperator(Criteria... criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return norOperator(Arrays.asList(criteria)); + } + + /** + * Creates a criteria using the {@code $nor} operator for all of the provided criteria. + *

          + * Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. + * @since 3.2 + */ + public Criteria norOperator(Collection criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + BasicDBList bsonList = createCriteriaList(criteria); return registerCriteriaChainElement(new Criteria("$nor").is(bsonList)); } /** - * Creates an 'and' criteria using the $and operator for all of the provided criteria. - *

          - * Note that mongodb doesn't support an $and operator to be wrapped in a $not operator. + * Creates a criteria using the {@code $and} operator for all of the provided criteria. *

          + * Note that MongoDB doesn't support an {@code $and} operator to be wrapped in a {@code $not} operator. * - * @throws IllegalArgumentException if {@link #andOperator(Criteria...)} follows a not() call directly. - * @param criteria + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. */ public Criteria andOperator(Criteria... criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + + return andOperator(Arrays.asList(criteria)); + } + + /** + * Creates a criteria using the {@code $and} operator for all of the provided criteria. + *

          + * Note that MongoDB doesn't support an {@code $and} operator to be wrapped in a {@code $not} operator. + * + * @throws IllegalArgumentException if this method follows a {@link #not()} call directly. + * @param criteria must not be {@literal null}. + * @return this. + * @since 3.2 + */ + public Criteria andOperator(Collection criteria) { + + Assert.notNull(criteria, "Criteria must not be null"); + BasicDBList bsonList = createCriteriaList(criteria); return registerCriteriaChainElement(new Criteria("$and").is(bsonList)); } @@ -684,10 +889,6 @@ public String getKey() { return this.key; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject() - */ public Document getCriteriaObject() { if (this.criteriaChain.size() == 1) { @@ -744,8 +945,16 @@ protected Document getSingleCriteriaObject() { Document queryCriteria = new Document(); if (!NOT_SET.equals(isValue)) { - queryCriteria.put(this.key, this.isValue); - queryCriteria.putAll(document); + if (document.isEmpty()) { + queryCriteria.put(this.key, this.isValue); + } else { + if (isValue instanceof Pattern || isValue instanceof BsonRegularExpression) { + document.put("$regex", isValue); + } else { + document.put("$eq", isValue); + } + queryCriteria.put(this.key, document); + } } else { queryCriteria.put(this.key, document); } @@ -753,7 +962,7 @@ protected Document getSingleCriteriaObject() { return queryCriteria; } - private BasicDBList createCriteriaList(Criteria[] criteria) { + private BasicDBList createCriteriaList(Collection criteria) { BasicDBList bsonList = new BasicDBList(); for (Criteria c : criteria) { bsonList.add(c.getCriteriaObject()); @@ -762,13 +971,15 @@ private BasicDBList createCriteriaList(Criteria[] criteria) { } private void setValue(Document document, String key, Object value) { + Object existing = document.get(key); + if (existing == null) { document.put(key, value); } else { - throw new InvalidMongoDbApiUsageException("Due to limitations of the com.mongodb.BasicDocument, " - + "you can't add a second '" + key + "' expression specified as '" + key + " : " + value + "'. " - + "Criteria already contains '" + key + " : " + existing + "'."); + throw new InvalidMongoDbApiUsageException("Due to limitations of the org.bson.Document, " + + "you can't add a second '" + key + "' expression specified as '" + key + " : " + value + "';" + + " Criteria already contains '" + key + " : " + existing + "'"); } } @@ -780,9 +991,9 @@ private boolean createNearCriteriaForCommand(String command, String operation, d Object existingNearOperationValue = criteria.get(command); - if (existingNearOperationValue instanceof Document) { + if (existingNearOperationValue instanceof Document document) { - ((Document) existingNearOperationValue).put(operation, maxDistance); + document.put(operation, maxDistance); return true; @@ -797,12 +1008,8 @@ private boolean createNearCriteriaForCommand(String command, String operation, d return false; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -847,23 +1054,69 @@ private boolean simpleCriteriaEquals(Criteria left, Criteria right) { * @param right * @return */ - private boolean isEqual(Object left, Object right) { + private boolean isEqual(@Nullable Object left, @Nullable Object right) { if (left == null) { return right == null; } - if (left instanceof Pattern) { - return right instanceof Pattern ? ((Pattern) left).pattern().equals(((Pattern) right).pattern()) : false; + if (left instanceof Pattern leftPattern) { + + if (!(right instanceof Pattern rightPattern)) { + return false; + } + + return leftPattern.pattern().equals(rightPattern.pattern()) // + && leftPattern.flags() == rightPattern.flags(); + } + + if (left instanceof Document leftDocument) { + + if (!(right instanceof Document rightDocument)) { + return false; + } + + Iterator> leftIterator = leftDocument.entrySet().iterator(); + Iterator> rightIterator = rightDocument.entrySet().iterator(); + + while (leftIterator.hasNext() && rightIterator.hasNext()) { + + Map.Entry leftEntry = leftIterator.next(); + Map.Entry rightEntry = rightIterator.next(); + + if (!isEqual(leftEntry.getKey(), rightEntry.getKey()) + || !isEqual(leftEntry.getValue(), rightEntry.getValue())) { + return false; + } + } + + return !leftIterator.hasNext() && !rightIterator.hasNext(); + } + + if (Collection.class.isAssignableFrom(left.getClass())) { + + if (!Collection.class.isAssignableFrom(right.getClass())) { + return false; + } + + Collection leftCollection = (Collection) left; + Collection rightCollection = (Collection) right; + Iterator leftIterator = leftCollection.iterator(); + Iterator rightIterator = rightCollection.iterator(); + + while (leftIterator.hasNext() && rightIterator.hasNext()) { + + if (!isEqual(leftIterator.next(), rightIterator.next())) { + return false; + } + } + + return !leftIterator.hasNext() && !rightIterator.hasNext(); } return ObjectUtils.nullSafeEquals(left, right); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -878,6 +1131,278 @@ public int hashCode() { private static boolean requiresGeoJsonFormat(Object value) { return value instanceof GeoJson - || (value instanceof GeoCommand && ((GeoCommand) value).getShape() instanceof GeoJson); + || (value instanceof GeoCommand geoCommand && geoCommand.getShape() instanceof GeoJson); + } + + /** + * MongoDB specific bitwise query + * operators like {@code $bitsAllClear, $bitsAllSet,...} for usage with {@link Criteria#bits()} and {@link Query}. + * + * @author Christoph Strobl + * @since 2.1 + * @see https://docs.mongodb.com/manual/reference/operator/query-bitwise/ + * @currentRead Beyond the Shadows - Brent Weeks + */ + public interface BitwiseCriteriaOperators { + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where all given bit positions are clear + * (i.e. 0). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAllClear + * @since 2.1 + */ + Criteria allClear(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where all given bit positions are clear + * (i.e. 0). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAllClear + * @since 2.1 + */ + Criteria allClear(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where all given bit positions are clear + * (i.e. 0). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAllClear + * @since 2.1 + */ + Criteria allClear(List positions); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where all given bit positions are set (i.e. + * 1). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAllSet + * @since 2.1 + */ + Criteria allSet(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where all given bit positions are set (i.e. + * 1). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAllSet + * @since 2.1 + */ + Criteria allSet(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where all given bit positions are set (i.e. + * 1). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAllSet + * @since 2.1 + */ + Criteria allSet(List positions); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where any given bit positions are clear + * (i.e. 0). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAnyClear + * @since 2.1 + */ + Criteria anyClear(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where any given bit positions are clear + * (i.e. 0). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAnyClear + * @since 2.1 + */ + Criteria anyClear(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAllClear} matching documents where any given bit positions are clear + * (i.e. 0). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAnyClear + * @since 2.1 + */ + Criteria anyClear(List positions); + + /** + * Creates a criterion using {@literal $bitsAllSet} matching documents where any given bit positions are set (i.e. + * 1). + * + * @param numericBitmask non-negative numeric bitmask. + * @return target {@link Criteria}. + * @see MongoDB Query operator: + * $bitsAnySet + * @since 2.1 + */ + Criteria anySet(int numericBitmask); + + /** + * Creates a criterion using {@literal $bitsAnySet} matching documents where any given bit positions are set (i.e. + * 1). + * + * @param bitmask string representation of a bitmask that will be converted to its base64 encoded {@link Binary} + * representation. Must not be {@literal null} nor empty. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when bitmask is {@literal null} or empty. + * @see MongoDB Query operator: + * $bitsAnySet + * @since 2.1 + */ + Criteria anySet(String bitmask); + + /** + * Creates a criterion using {@literal $bitsAnySet} matching documents where any given bit positions are set (i.e. + * 1). + * + * @param positions list of non-negative integer positions. Positions start at 0 from the least significant bit. + * Must not be {@literal null} nor contain {@literal null} elements. + * @return target {@link Criteria}. + * @throws IllegalArgumentException when positions is {@literal null} or contains {@literal null} elements. + * @see MongoDB Query operator: + * $bitsAnySet + * @since 2.1 + */ + Criteria anySet(List positions); + + } + + /** + * Default implementation of {@link BitwiseCriteriaOperators}. + * + * @author Christoph Strobl + * @currentRead Beyond the Shadows - Brent Weeks + */ + private static class BitwiseCriteriaOperatorsImpl implements BitwiseCriteriaOperators { + + private final Criteria target; + + BitwiseCriteriaOperatorsImpl(Criteria target) { + this.target = target; + } + + @Override + public Criteria allClear(int numericBitmask) { + return numericBitmask("$bitsAllClear", numericBitmask); + } + + @Override + public Criteria allClear(String bitmask) { + return stringBitmask("$bitsAllClear", bitmask); + } + + @Override + public Criteria allClear(List positions) { + return positions("$bitsAllClear", positions); + } + + @Override + public Criteria allSet(int numericBitmask) { + return numericBitmask("$bitsAllSet", numericBitmask); + } + + @Override + public Criteria allSet(String bitmask) { + return stringBitmask("$bitsAllSet", bitmask); + } + + @Override + public Criteria allSet(List positions) { + return positions("$bitsAllSet", positions); + } + + @Override + public Criteria anyClear(int numericBitmask) { + return numericBitmask("$bitsAnyClear", numericBitmask); + } + + @Override + public Criteria anyClear(String bitmask) { + return stringBitmask("$bitsAnyClear", bitmask); + } + + @Override + public Criteria anyClear(List positions) { + return positions("$bitsAnyClear", positions); + } + + @Override + public Criteria anySet(int numericBitmask) { + return numericBitmask("$bitsAnySet", numericBitmask); + } + + @Override + public Criteria anySet(String bitmask) { + return stringBitmask("$bitsAnySet", bitmask); + } + + @Override + public Criteria anySet(List positions) { + return positions("$bitsAnySet", positions); + } + + private Criteria positions(String operator, List positions) { + + Assert.notNull(positions, "Positions must not be null"); + Assert.noNullElements(positions.toArray(), "Positions must not contain null values"); + + target.criteria.put(operator, positions); + return target; + } + + private Criteria stringBitmask(String operator, String bitmask) { + + Assert.hasText(bitmask, "Bitmask must not be null"); + + target.criteria.put(operator, new Binary(Base64.getDecoder().decode(bitmask))); + return target; + } + + private Criteria numericBitmask(String operator, int bitmask) { + + target.criteria.put(operator, bitmask); + return target; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java index 038e7dae62..c00b1d4b82 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/CriteriaDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java index 55dc016d09..3540a5a836 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,54 +15,209 @@ */ package org.springframework.data.mongodb.core.query; -import lombok.EqualsAndHashCode; - +import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.bson.Document; +import org.springframework.data.mongodb.MongoExpression; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** + * Field projection. + * * @author Thomas Risberg * @author Oliver Gierke * @author Patryk Wasik * @author Christoph Strobl * @author Mark Paluch + * @author Owen Q + * @author Kirill Egorov */ -@EqualsAndHashCode public class Field { - private final Map criteria = new HashMap(); - private final Map slices = new HashMap(); - private final Map elemMatchs = new HashMap(); - private @Nullable String postionKey; + private final Map criteria = new HashMap<>(); + private final Map slices = new HashMap<>(); + private final Map elemMatches = new HashMap<>(); + private @Nullable String positionKey; private int positionValue; - public Field include(String key) { - criteria.put(key, Integer.valueOf(1)); + /** + * Include a single {@code field} to be returned by the query operation. + * + * @param field the document field name to be included. + * @return {@code this} field projection instance. + */ + public Field include(String field) { + + Assert.notNull(field, "Key must not be null"); + + criteria.put(field, 1); + return this; } - public Field exclude(String key) { - criteria.put(key, Integer.valueOf(0)); + /** + * Project a given {@link MongoExpression} to a {@link FieldProjectionExpression#as(String) field} included in the + * result. + * + *

          +	 *
          +	 * // { 'name' : { '$toUpper' : '$name' } }
          +	 *
          +	 * // native MongoDB expression
          +	 * .project(MongoExpression.expressionFromString("'$toUpper' : '$name'")).as("name");
          +	 *
          +	 * // Aggregation Framework expression
          +	 * .project(StringOperators.valueOf("name").toUpper()).as("name");
          +	 *
          +	 * // Aggregation Framework SpEL expression
          +	 * .project(AggregationSpELExpression.expressionOf("toUpper(name)")).as("name");
          +	 * 
          + * + * @param expression must not be {@literal null}. + * @return new instance of {@link FieldProjectionExpression}. Define the target field name through + * {@link FieldProjectionExpression#as(String) as(String)}. + * @since 3.2 + */ + public FieldProjectionExpression project(MongoExpression expression) { + return field -> Field.this.projectAs(expression, field); + } + + /** + * Project a given {@link MongoExpression} to a {@link FieldProjectionExpression#as(String) field} included in the + * result. + * + *
          +	 *
          +	 * // { 'name' : { '$toUpper' : '$name' } }
          +	 *
          +	 * // native MongoDB expression
          +	 * .projectAs(MongoExpression.expressionFromString("'$toUpper' : '$name'"), "name");
          +	 *
          +	 * // Aggregation Framework expression
          +	 * .projectAs(StringOperators.valueOf("name").toUpper(), "name");
          +	 *
          +	 * // Aggregation Framework SpEL expression
          +	 * .projectAs(AggregationSpELExpression.expressionOf("toUpper(name)"), "name");
          +	 * 
          + * + * @param expression must not be {@literal null}. + * @param field the field name used in the result. + * @return new instance of {@link FieldProjectionExpression}. + * @since 3.2 + */ + public Field projectAs(MongoExpression expression, String field) { + + criteria.put(field, expression); return this; } - public Field slice(String key, int size) { - slices.put(key, Integer.valueOf(size)); + /** + * Include one or more {@code fields} to be returned by the query operation. + * + * @param fields the document field names to be included. + * @return {@code this} field projection instance. + * @since 3.1 + */ + public Field include(String... fields) { + return include(Arrays.asList(fields)); + } + + /** + * Include one or more {@code fields} to be returned by the query operation. + * + * @param fields the document field names to be included. + * @return {@code this} field projection instance. + * @since 4.4 + */ + public Field include(Collection fields) { + + Assert.notNull(fields, "Keys must not be null"); + + fields.forEach(this::include); return this; } - public Field slice(String key, int offset, int size) { - slices.put(key, new Integer[] { Integer.valueOf(offset), Integer.valueOf(size) }); + /** + * Exclude a single {@code field} from being returned by the query operation. + * + * @param field the document field name to be excluded. + * @return {@code this} field projection instance. + */ + public Field exclude(String field) { + + Assert.notNull(field, "Key must not be null"); + + criteria.put(field, 0); + return this; } - public Field elemMatch(String key, Criteria elemMatchCriteria) { - elemMatchs.put(key, elemMatchCriteria); + /** + * Exclude one or more {@code fields} from being returned by the query operation. + * + * @param fields the document field names to be excluded. + * @return {@code this} field projection instance. + * @since 3.1 + */ + public Field exclude(String... fields) { + return exclude(Arrays.asList(fields)); + } + + /** + * Exclude one or more {@code fields} from being returned by the query operation. + * + * @param fields the document field names to be excluded. + * @return {@code this} field projection instance. + * @since 4.4 + */ + public Field exclude(Collection fields) { + + Assert.notNull(fields, "Keys must not be null"); + + fields.forEach(this::exclude); + return this; + } + + /** + * Project a {@code $slice} of the array {@code field} using the first {@code size} elements. + * + * @param field the document field name to project, must be an array field. + * @param size the number of elements to include. + * @return {@code this} field projection instance. + */ + public Field slice(String field, int size) { + + Assert.notNull(field, "Key must not be null"); + + slices.put(field, size); + + return this; + } + + /** + * Project a {@code $slice} of the array {@code field} using the first {@code size} elements starting at + * {@code offset}. + * + * @param field the document field name to project, must be an array field. + * @param offset the offset to start at. + * @param size the number of elements to include. + * @return {@code this} field projection instance. + */ + public Field slice(String field, int offset, int size) { + + slices.put(field, Arrays.asList(offset, size)); + return this; + } + + public Field elemMatch(String field, Criteria elemMatchCriteria) { + + elemMatches.put(field, elemMatchCriteria); return this; } @@ -72,13 +227,13 @@ public Field elemMatch(String key, Criteria elemMatchCriteria) { * * @param field query array field, must not be {@literal null} or empty. * @param value - * @return + * @return {@code this} field projection instance. */ public Field position(String field, int value) { - Assert.hasText(field, "DocumentField must not be null or empty!"); + Assert.hasText(field, "DocumentField must not be null or empty"); - postionKey = field; + positionKey = field; positionValue = value; return this; @@ -86,21 +241,73 @@ public Field position(String field, int value) { public Document getFieldsObject() { - @SuppressWarnings({ "unchecked", "rawtypes" }) - Document document = new Document((Map) criteria); + Document document = new Document(criteria); for (Entry entry : slices.entrySet()) { document.put(entry.getKey(), new Document("$slice", entry.getValue())); } - for (Entry entry : elemMatchs.entrySet()) { + for (Entry entry : elemMatches.entrySet()) { document.put(entry.getKey(), new Document("$elemMatch", entry.getValue().getCriteriaObject())); } - if (postionKey != null) { - document.put(postionKey + ".$", positionValue); + if (positionKey != null) { + document.put(positionKey + ".$", positionValue); } return document; } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + Field field = (Field) o; + + if (positionValue != field.positionValue) { + return false; + } + if (!ObjectUtils.nullSafeEquals(criteria, field.criteria)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(slices, field.slices)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(elemMatches, field.elemMatches)) { + return false; + } + return ObjectUtils.nullSafeEquals(positionKey, field.positionKey); + } + + @Override + public int hashCode() { + + int result = ObjectUtils.nullSafeHashCode(criteria); + result = 31 * result + ObjectUtils.nullSafeHashCode(slices); + result = 31 * result + ObjectUtils.nullSafeHashCode(elemMatches); + result = 31 * result + ObjectUtils.nullSafeHashCode(positionKey); + result = 31 * result + positionValue; + return result; + } + + /** + * Intermediate builder part for projecting a {@link MongoExpression} to a result field. + * + * @since 3.2 + * @author Christoph Strobl + */ + public interface FieldProjectionExpression { + + /** + * Set the name to be used in the result and return a {@link Field}. + * + * @param name must not be {@literal null}. + * @return the calling instance {@link Field}. + */ + Field as(String name); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java index 8bb2dab019..83417c7200 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/GeoCommand.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,6 +22,7 @@ import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; import org.springframework.data.mongodb.core.geo.Sphere; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -43,7 +44,7 @@ public final class GeoCommand { */ public GeoCommand(Shape shape) { - Assert.notNull(shape, "Shape must not be null!"); + Assert.notNull(shape, "Shape must not be null"); this.shape = shape; this.command = getCommand(shape); @@ -67,11 +68,12 @@ public String getCommand() { * Returns the MongoDB command for the given {@link Shape}. * * @param shape must not be {@literal null}. - * @return + * @return never {@literal null}. + * @throws IllegalArgumentException for unknown {@link Shape}. */ private String getCommand(Shape shape) { - Assert.notNull(shape, "Shape must not be null!"); + Assert.notNull(shape, "Shape must not be null"); if (shape instanceof Box) { return "$box"; @@ -86,10 +88,6 @@ private String getCommand(Shape shape) { throw new IllegalArgumentException("Unknown shape: " + shape); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -101,23 +99,17 @@ public int hashCode() { return result; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof GeoCommand)) { + if (!(obj instanceof GeoCommand other)) { return false; } - GeoCommand that = (GeoCommand) obj; - - return nullSafeEquals(this.command, that.command) && nullSafeEquals(this.shape, that.shape); + return nullSafeEquals(this.command, other.command) && nullSafeEquals(this.shape, other.shape); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java index a9b4ba4869..5757aa94a2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,13 @@ */ package org.springframework.data.mongodb.core.query; +import java.time.Duration; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.concurrent.TimeUnit; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -41,15 +41,46 @@ public class Meta { private enum MetaKey { MAX_TIME_MS("$maxTimeMS"), MAX_SCAN("$maxScan"), COMMENT("$comment"), SNAPSHOT("$snapshot"); - private String key; + private final String key; MetaKey(String key) { this.key = key; } } - private final Map values = new LinkedHashMap(2); - private final Set flags = new LinkedHashSet(); + private Map values = Collections.emptyMap(); + private Set flags = Collections.emptySet(); + private Integer cursorBatchSize; + private Boolean allowDiskUse; + + public Meta() {} + + /** + * Copy a {@link Meta} object. + * + * @since 2.2 + * @param source + */ + Meta(Meta source) { + + this.values = new LinkedHashMap<>(source.values); + this.flags = new LinkedHashSet<>(source.flags); + this.cursorBatchSize = source.cursorBatchSize; + this.allowDiskUse = source.allowDiskUse; + } + + /** + * Return whether the maximum time limit for processing operations is set. + * + * @return {@code true} if set; {@code false} otherwise. + * @since 4.0.6 + */ + public boolean hasMaxTime() { + + Long maxTimeMsec = getMaxTimeMsec(); + + return maxTimeMsec != null && maxTimeMsec > 0; + } /** * @return {@literal null} if not set. @@ -59,44 +90,86 @@ public Long getMaxTimeMsec() { return getValue(MetaKey.MAX_TIME_MS.key); } + /** + * Returns the required maximum time limit in milliseconds or throws {@link IllegalStateException} if the maximum time + * limit is not set. + * + * @return the maximum time limit in milliseconds for processing operations. + * @throws IllegalStateException if the maximum time limit is not set + * @see #hasMaxTime() + * @since 4.0.6 + */ + public Long getRequiredMaxTimeMsec() { + + Long maxTimeMsec = getMaxTimeMsec(); + + if (maxTimeMsec == null) { + throw new IllegalStateException("Maximum time limit in milliseconds not set"); + } + + return maxTimeMsec; + } + /** * Set the maximum time limit in milliseconds for processing operations. * * @param maxTimeMsec */ public void setMaxTimeMsec(long maxTimeMsec) { - setMaxTime(maxTimeMsec, TimeUnit.MILLISECONDS); + setMaxTime(Duration.ofMillis(maxTimeMsec)); } /** * Set the maximum time limit for processing operations. * - * @param timeout - * @param timeUnit + * @param timeout must not be {@literal null}. + * @since 2.1 + */ + public void setMaxTime(Duration timeout) { + + Assert.notNull(timeout, "Timeout must not be null"); + setValue(MetaKey.MAX_TIME_MS.key, timeout.toMillis()); + } + + /** + * Return whether the comment is set. + * + * @return {@code true} if set; {@code false} otherwise. + * @since 4.0.6 */ - public void setMaxTime(long timeout, @Nullable TimeUnit timeUnit) { - setValue(MetaKey.MAX_TIME_MS.key, (timeUnit != null ? timeUnit : TimeUnit.MILLISECONDS).toMillis(timeout)); + public boolean hasComment() { + return StringUtils.hasText(getComment()); } /** * @return {@literal null} if not set. */ @Nullable - public Long getMaxScan() { - return getValue(MetaKey.MAX_SCAN.key); + public String getComment() { + return getValue(MetaKey.COMMENT.key); } /** - * Only scan the specified number of documents. + * Returns the required comment or throws {@link IllegalStateException} if the comment is not set. * - * @param maxScan + * @return the comment. + * @throws IllegalStateException if the comment is not set + * @see #hasComment() + * @since 4.0.6 */ - public void setMaxScan(long maxScan) { - setValue(MetaKey.MAX_SCAN.key, maxScan); + public String getRequiredComment() { + + String comment = getComment(); + + if (comment == null) { + throw new IllegalStateException("Comment not set"); + } + + return comment; } /** - * Add a comment to the query. + * Add a comment to the query that is propagated to the profile log. * * @param comment */ @@ -106,30 +179,27 @@ public void setComment(String comment) { /** * @return {@literal null} if not set. + * @since 2.1 */ @Nullable - public String getComment() { - return getValue(MetaKey.COMMENT.key); + public Integer getCursorBatchSize() { + return cursorBatchSize; } /** - * Using snapshot prevents the cursor from returning a document more than once. + * Apply the batch size (number of documents to return in each response) for a query.
          + * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. * - * @param useSnapshot - */ - public void setSnapshot(boolean useSnapshot) { - setValue(MetaKey.SNAPSHOT.key, useSnapshot); - } - - /** - * @return {@literal null} if not set. + * @param cursorBatchSize The number of documents to return per batch. + * @since 2.1 */ - public boolean getSnapshot() { - return getValue(MetaKey.SNAPSHOT.key, false); + public void setCursorBatchSize(int cursorBatchSize) { + this.cursorBatchSize = cursorBatchSize; } /** - * Add {@link CursorOption} influencing behavior of the {@link com.mongodb.DBCursor}. + * Add {@link CursorOption} influencing behavior of the {@link com.mongodb.client.FindIterable}. * * @param option must not be {@literal null}. * @return @@ -137,7 +207,12 @@ public boolean getSnapshot() { */ public boolean addFlag(CursorOption option) { - Assert.notNull(option, "CursorOption must not be null!"); + Assert.notNull(option, "CursorOption must not be null"); + + if (this.flags == Collections.EMPTY_SET) { + this.flags = new LinkedHashSet<>(2); + } + return this.flags.add(option); } @@ -149,11 +224,36 @@ public Set getFlags() { return flags; } + /** + * When set to {@literal true}, aggregation stages can write data to disk. + * + * @return {@literal null} if not set. + * @since 3.0 + */ + @Nullable + public Boolean getAllowDiskUse() { + return allowDiskUse; + } + + /** + * Enables writing to temporary files for aggregation stages and queries. When set to {@literal true}, aggregation + * stages can write data to the {@code _tmp} subdirectory in the {@code dbPath} directory. + *

          + * Starting in MongoDB 4.2, the profiler log messages and diagnostic log messages includes a {@code usedDisk} + * indicator if any aggregation stage wrote data to temporary files due to memory restrictions. + * + * @param allowDiskUse use {@literal null} for server defaults. + * @since 3.0 + */ + public void setAllowDiskUse(@Nullable Boolean allowDiskUse) { + this.allowDiskUse = allowDiskUse; + } + /** * @return */ public boolean hasValues() { - return !this.values.isEmpty() || !this.flags.isEmpty(); + return !this.values.isEmpty() || !this.flags.isEmpty() || this.cursorBatchSize != null || this.allowDiskUse != null; } /** @@ -171,11 +271,15 @@ public Iterable> values() { * @param key must not be {@literal null} or empty. * @param value */ - private void setValue(String key, @Nullable Object value) { + void setValue(String key, @Nullable Object value) { + + Assert.hasText(key, "Meta key must not be 'null' or blank"); - Assert.hasText(key, "Meta key must not be 'null' or blank."); + if (values == Collections.EMPTY_MAP) { + values = new LinkedHashMap<>(2); + } - if (value == null || (value instanceof String && !StringUtils.hasText((String) value))) { + if (value == null || (value instanceof String stringValue && !StringUtils.hasText(stringValue))) { this.values.remove(key); } this.values.put(key, value); @@ -193,10 +297,6 @@ private T getValue(String key, T defaultValue) { return value != null ? value : defaultValue; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -205,22 +305,17 @@ public int hashCode() { return hash; } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } - if (!(obj instanceof Meta)) { + if (!(obj instanceof Meta other)) { return false; } - Meta other = (Meta) obj; if (!ObjectUtils.nullSafeEquals(this.values, other.values)) { return false; } @@ -243,8 +338,12 @@ public enum CursorOption { */ EXHAUST, - /** Allows querying of a replica slave. */ - SLAVE_OK, + /** + * Allows querying of a replica. + * + * @since 3.0.2 + */ + SECONDARY_READS, /** * Sets the cursor to return partial data from a query against a sharded cluster in which some shards do not respond diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MetricConversion.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MetricConversion.java new file mode 100644 index 0000000000..571bbd275c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MetricConversion.java @@ -0,0 +1,183 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.query; + +import java.math.BigDecimal; +import java.math.MathContext; +import java.math.RoundingMode; + +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.Metric; +import org.springframework.data.geo.Metrics; + +/** + * {@link Metric} and {@link Distance} conversions using the metric system. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public class MetricConversion { + + private static final BigDecimal METERS_MULTIPLIER = new BigDecimal(Metrics.KILOMETERS.getMultiplier()) + .multiply(new BigDecimal(1000)); + + // to achieve a calculation that is accurate to 0.3 meters + private static final int PRECISION = 8; + + /** + * Return meters to {@code metric} multiplier. + * + * @param metric + * @return + */ + public static double getMetersToMetricMultiplier(Metric metric) { + + ConversionMultiplier conversionMultiplier = ConversionMultiplier.builder().from(METERS_MULTIPLIER).to(metric) + .build(); + return conversionMultiplier.multiplier().doubleValue(); + } + + /** + * Return {@code distance} in meters. + * + * @param distance + * @return + */ + public static double getDistanceInMeters(Distance distance) { + return new BigDecimal(distance.getValue()).multiply(getMetricToMetersMultiplier(distance.getMetric())) + .doubleValue(); + } + + /** + * Return {@code distance} in radians (on an earth like sphere). + * + * @param distance must not be {@literal null}. + * @return distance in radians. + * @since 3.4.4 + */ + public static double toRadians(Distance distance) { + return metersToRadians(getDistanceInMeters(distance)); + } + + /** + * Return {@code distance} in radians (on an earth like sphere). + * + * @param meters + * @return distance in radians. + * @since 3.4.4 + */ + public static double metersToRadians(double meters) { + return BigDecimal.valueOf(meters).divide(METERS_MULTIPLIER, MathContext.DECIMAL64).doubleValue(); + } + + /** + * Return {@code metric} to meters multiplier. + * + * @param metric + * @return + */ + private static BigDecimal getMetricToMetersMultiplier(Metric metric) { + + ConversionMultiplier conversionMultiplier = ConversionMultiplier.builder().from(metric).to(METERS_MULTIPLIER) + .build(); + return conversionMultiplier.multiplier(); + } + + /** + * Provides a multiplier to convert between various metrics. Metrics must share the same base scale and provide a + * multiplier to convert between the base scale and its own metric. + * + * @author Mark Paluch + */ + private static class ConversionMultiplier { + + private final BigDecimal source; + private final BigDecimal target; + + ConversionMultiplier(Number source, Number target) { + + if (source instanceof BigDecimal bigDecimal) { + this.source = bigDecimal; + } else { + this.source = BigDecimal.valueOf(source.doubleValue()); + } + + if (target instanceof BigDecimal bigDecimal) { + this.target = bigDecimal; + } else { + this.target = BigDecimal.valueOf(target.doubleValue()); + } + } + + /** + * Returns the multiplier to convert a number from the {@code source} metric to the {@code target} metric. + * + * @return + */ + BigDecimal multiplier() { + return target.divide(source, PRECISION, RoundingMode.HALF_UP); + } + + /** + * Creates a new {@link ConversionMultiplierBuilder}. + * + * @return + */ + static ConversionMultiplierBuilder builder() { + return new ConversionMultiplierBuilder(); + } + + } + + /** + * Builder for {@link ConversionMultiplier}. + * + * @author Mark Paluch + */ + private static class ConversionMultiplierBuilder { + + private Number from; + private Number to; + + ConversionMultiplierBuilder() {} + + ConversionMultiplierBuilder from(Number from) { + this.from = from; + return this; + } + + ConversionMultiplierBuilder from(Metric from) { + this.from = from.getMultiplier(); + return this; + } + + ConversionMultiplierBuilder to(Number to) { + this.to = to; + return this; + } + + ConversionMultiplierBuilder to(Metric to) { + this.to = to.getMultiplier(); + return this; + } + + ConversionMultiplier build() { + return new ConversionMultiplier(this.from, this.to); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java index 9b8f38439e..e26a61c61e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/MongoRegexCreator.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import java.util.regex.Pattern; +import org.bson.BsonRegularExpression; import org.springframework.lang.Nullable; /** @@ -67,7 +68,7 @@ public enum MatchMode { */ REGEX, - LIKE; + LIKE } private static final Pattern PUNCTATION_PATTERN = Pattern.compile("\\p{Punct}"); @@ -88,18 +89,24 @@ public String toRegularExpression(@Nullable String source, @Nullable MatchMode m String regex = prepareAndEscapeStringBeforeApplyingLikeRegex(source, matcherType); - switch (matcherType) { - case STARTING_WITH: - return String.format("^%s", regex); - case ENDING_WITH: - return String.format("%s$", regex); - case CONTAINING: - return String.format(".*%s.*", regex); - case EXACT: - return String.format("^%s$", regex); - default: - return regex; - } + return switch (matcherType) { + case STARTING_WITH -> String.format("^%s", regex); + case ENDING_WITH -> String.format("%s$", regex); + case CONTAINING -> String.format(".*%s.*", regex); + case EXACT -> String.format("^%s$", regex); + default -> regex; + }; + } + + /** + * @param source + * @return + * @since 2.2.14 + * @deprecated since 4.1.1 + */ + @Deprecated(since = "4.1.1", forRemoval = true) + public Object toCaseInsensitiveMatch(Object source) { + return source instanceof String stringValue ? new BsonRegularExpression(Pattern.quote(stringValue), "i") : source; } private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, MatchMode matcherType) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java index 14ca2c529e..f0f3b0a4dc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/NearQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,19 +24,159 @@ import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; + /** - * Builder class to build near-queries. + * Builder class to build near-queries.
          + * MongoDB {@code $geoNear} operator allows usage of a {@literal GeoJSON Point} or legacy coordinate pair. Though + * syntactically different, there's no difference between {@code near: [-73.99171, 40.738868]} and {@code near: { type: + * "Point", coordinates: [-73.99171, 40.738868] } } for the MongoDB server
          + *
          + * Please note that there is a huge difference in the distance calculation. Using the legacy format (for near) operates + * upon {@literal Radians} on an Earth like sphere, whereas the {@literal GeoJSON} format uses {@literal Meters}. The + * actual type within the document is of no concern at this point.
          + * To avoid a serious headache make sure to set the {@link Metric} to the desired unit of measure which ensures the + * distance to be calculated correctly.
          + *
          + * In other words:
          + * Assume you've got 5 Documents like the ones below
          + * + *

          + *     
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a5"),
          + *     "name" : "Penn Station",
          + *     "location" : { "type" : "Point", "coordinates" : [  -73.99408, 40.75057 ] }
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a6"),
          + *     "name" : "10gen Office",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a9"),
          + *     "name" : "City Bakery ",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796aa"),
          + *     "name" : "Splash Bar",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796ab"),
          + *     "name" : "Momofuku Milk Bar",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.985839, 40.731698 ] }
          + * }
          + *      
          + * 
          + * + * Fetching all Documents within a 400 Meter radius from {@code [-73.99171, 40.738868] } would look like this using + * {@literal GeoJSON}: + * + *
          + *     
          + * {
          + *     $geoNear: {
          + *         maxDistance: 400,
          + *         num: 10,
          + *         near: { type: "Point", coordinates: [-73.99171, 40.738868] },
          + *         spherical:true,
          + *         key: "location",
          + *         distanceField: "distance"
          + *     }
          + * }
          + *
          + *     
          + * 
          + * + * resulting in the following 3 Documents. + * + *
          + *     
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a6"),
          + *     "name" : "10gen Office",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
          + *     "distance" : 0.0 // Meters
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a9"),
          + *     "name" : "City Bakery ",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
          + *     "distance" : 69.3582262492474 // Meters
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796aa"),
          + *     "name" : "Splash Bar",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
          + *     "distance" : 69.3582262492474 // Meters
          + * }
          + *     
          + * 
          + * + * Using legacy coordinate pairs one operates upon radians as discussed before. Assume we use {@link Metrics#KILOMETERS} + * when constructing the geoNear command. The {@link Metric} will make sure the distance multiplier is set correctly, so + * the command is rendered like + * + *
          + *     
          + * {
          + *     $geoNear: {
          + *         maxDistance: 0.0000627142377, // 400 Meters
          + *         distanceMultiplier: 6378.137,
          + *         num: 10,
          + *         near: [-73.99171, 40.738868],
          + *         spherical:true,
          + *         key: "location",
          + *         distanceField: "distance"
          + *     }
          + * }
          + *     
          + * 
          + * + * Please note the calculated distance now uses {@literal Kilometers} instead of {@literal Meters} as unit of measure, + * so we need to take it times 1000 to match up to {@literal Meters} as in the {@literal GeoJSON} variant.
          + * Still as we've been requesting the {@link Distance} in {@link Metrics#KILOMETERS} the {@link Distance#getValue()} + * reflects exactly this. + * + *
          + *     
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a6"),
          + *     "name" : "10gen Office",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] }
          + *     "distance" : 0.0 // Kilometers
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796a9"),
          + *     "name" : "City Bakery ",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
          + *     "distance" : 0.0693586286032982 // Kilometers
          + * }
          + * {
          + *     "_id" : ObjectId("5c10f3735d38908db52796aa"),
          + *     "name" : "Splash Bar",
          + *     "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] }
          + *     "distance" : 0.0693586286032982 // Kilometers
          + * }
          + *     
          + * 
          * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch */ -public final class NearQuery { +public final class NearQuery implements ReadConcernAware, ReadPreferenceAware { private final Point point; private @Nullable Query query; @@ -44,8 +184,10 @@ public final class NearQuery { private @Nullable Distance minDistance; private Metric metric; private boolean spherical; - private @Nullable Long num; + private @Nullable Long limit; private @Nullable Long skip; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; /** * Creates a new {@link NearQuery}. @@ -55,8 +197,8 @@ public final class NearQuery { */ private NearQuery(Point point, Metric metric) { - Assert.notNull(point, "Point must not be null!"); - Assert.notNull(metric, "Metric must not be null!"); + Assert.notNull(point, "Point must not be null"); + Assert.notNull(metric, "Metric must not be null"); this.point = point; this.spherical = false; @@ -89,10 +231,14 @@ public static NearQuery near(double x, double y, Metric metric) { } /** - * Creates a new {@link NearQuery} starting at the given {@link Point}. + * Creates a new {@link NearQuery} starting at the given {@link Point}.
          + * NOTE: There is a difference in using {@link Point} versus {@link GeoJsonPoint}. {@link Point} + * values are rendered as coordinate pairs in the legacy format and operate upon radians, whereas the + * {@link GeoJsonPoint} uses according to its specification {@literal meters} as unit of measure. This may lead to + * different results when using a {@link Metrics#NEUTRAL neutral Metric}. * * @param point must not be {@literal null}. - * @return + * @return new instance of {@link NearQuery}. */ public static NearQuery near(Point point) { return near(point, Metrics.NEUTRAL); @@ -101,11 +247,15 @@ public static NearQuery near(Point point) { /** * Creates a {@link NearQuery} starting near the given {@link Point} using the given {@link Metric} to adapt given * values to further configuration. E.g. setting a {@link #maxDistance(double)} will be interpreted as a value of the - * initially set {@link Metric}. + * initially set {@link Metric}.
          + * NOTE: There is a difference in using {@link Point} versus {@link GeoJsonPoint}. {@link Point} + * values are rendered as coordinate pairs in the legacy format and operate upon radians, whereas the + * {@link GeoJsonPoint} uses according to its specification {@literal meters} as unit of measure. This may lead to + * different results when using a {@link Metrics#NEUTRAL neutral Metric}. * * @param point must not be {@literal null}. * @param metric must not be {@literal null}. - * @return + * @return new instance of {@link NearQuery}. */ public static NearQuery near(Point point, Metric metric) { return new NearQuery(point, metric); @@ -124,11 +274,12 @@ public Metric getMetric() { /** * Configures the maximum number of results to return. * - * @param num + * @param limit * @return + * @since 2.2 */ - public NearQuery num(long num) { - this.num = num; + public NearQuery limit(long limit) { + this.limit = limit; return this; } @@ -151,10 +302,10 @@ public NearQuery skip(long skip) { */ public NearQuery with(Pageable pageable) { - Assert.notNull(pageable, "Pageable must not be 'null'."); + Assert.notNull(pageable, "Pageable must not be 'null'"); if (pageable.isPaged()) { - this.num = pageable.getOffset() + pageable.getPageSize(); this.skip = pageable.getOffset(); + this.limit = (long) pageable.getPageSize(); } return this; } @@ -186,7 +337,7 @@ public NearQuery maxDistance(double maxDistance) { */ public NearQuery maxDistance(double maxDistance, Metric metric) { - Assert.notNull(metric, "Metric must not be null!"); + Assert.notNull(metric, "Metric must not be null"); return maxDistance(new Distance(maxDistance, metric)); } @@ -200,7 +351,7 @@ public NearQuery maxDistance(double maxDistance, Metric metric) { */ public NearQuery maxDistance(Distance distance) { - Assert.notNull(distance, "Distance must not be null!"); + Assert.notNull(distance, "Distance must not be null"); if (distance.getMetric() != Metrics.NEUTRAL) { this.spherical(true); @@ -243,7 +394,7 @@ public NearQuery minDistance(double minDistance) { */ public NearQuery minDistance(double minDistance, Metric metric) { - Assert.notNull(metric, "Metric must not be null!"); + Assert.notNull(metric, "Metric must not be null"); return minDistance(new Distance(minDistance, metric)); } @@ -258,7 +409,7 @@ public NearQuery minDistance(double minDistance, Metric metric) { */ public NearQuery minDistance(Distance distance) { - Assert.notNull(distance, "Distance must not be null!"); + Assert.notNull(distance, "Distance must not be null"); if (distance.getMetric() != Metrics.NEUTRAL) { this.spherical(true); @@ -381,13 +532,13 @@ private NearQuery adaptMetric(Metric metric) { */ public NearQuery query(Query query) { - Assert.notNull(query, "Cannot apply 'null' query on NearQuery."); + Assert.notNull(query, "Cannot apply 'null' query on NearQuery"); this.query = query; this.skip = query.getSkip(); if (query.getLimit() != 0) { - this.num = (long) query.getLimit(); + this.limit = (long) query.getLimit(); } return this; } @@ -400,6 +551,85 @@ public Long getSkip() { return skip; } + /** + * Get the {@link Collation} to use along with the {@link #query(Query)}. + * + * @return the {@link Collation} if set. {@literal null} otherwise. + * @since 2.2 + */ + @Nullable + public Collation getCollation() { + return query != null ? query.getCollation().orElse(null) : null; + } + + /** + * Configures the query to use the given {@link ReadConcern} unless the underlying {@link #query(Query)} + * {@link Query#hasReadConcern() specifies} another one. + * + * @param readConcern must not be {@literal null}. + * @return this. + * @since 4.1 + */ + public NearQuery withReadConcern(ReadConcern readConcern) { + + Assert.notNull(readConcern, "ReadConcern must not be null"); + this.readConcern = readConcern; + return this; + } + + /** + * Configures the query to use the given {@link ReadPreference} unless the underlying {@link #query(Query)} + * {@link Query#hasReadPreference() specifies} another one. + * + * @param readPreference must not be {@literal null}. + * @return this. + * @since 4.1 + */ + public NearQuery withReadPreference(ReadPreference readPreference) { + + Assert.notNull(readPreference, "ReadPreference must not be null"); + this.readPreference = readPreference; + return this; + } + + /** + * Get the {@link ReadConcern} to use. Will return the underlying {@link #query(Query) queries} + * {@link Query#getReadConcern() ReadConcern} if present or the one defined on the {@link NearQuery#readConcern} + * itself. + * + * @return can be {@literal null} if none set. + * @since 4.1 + * @see ReadConcernAware + */ + @Nullable + @Override + public ReadConcern getReadConcern() { + + if (query != null && query.hasReadConcern()) { + return query.getReadConcern(); + } + return readConcern; + } + + /** + * Get the {@link ReadPreference} to use. Will return the underlying {@link #query(Query) queries} + * {@link Query#getReadPreference() ReadPreference} if present or the one defined on the + * {@link NearQuery#readPreference} itself. + * + * @return can be {@literal null} if none set. + * @since 4.1 + * @see ReadPreferenceAware + */ + @Nullable + @Override + public ReadPreference getReadPreference() { + + if (query != null && query.hasReadPreference()) { + return query.getReadPreference(); + } + return readPreference; + } + /** * Returns the {@link Document} built by the {@link NearQuery}. * @@ -416,25 +646,46 @@ public Document toDocument() { } if (maxDistance != null) { - document.put("maxDistance", maxDistance.getNormalizedValue()); + document.put("maxDistance", getDistanceValueInRadiantsOrMeters(maxDistance)); } if (minDistance != null) { - document.put("minDistance", minDistance.getNormalizedValue()); + document.put("minDistance", getDistanceValueInRadiantsOrMeters(minDistance)); } if (metric != null) { - document.put("distanceMultiplier", metric.getMultiplier()); + document.put("distanceMultiplier", getDistanceMultiplier()); } - if (num != null) { - document.put("num", num); + if (limit != null) { + document.put("num", limit); } - document.put("near", Arrays.asList(point.getX(), point.getY())); + if (usesGeoJson()) { + document.put("near", point); + } else { + document.put("near", Arrays.asList(point.getX(), point.getY())); + } - document.put("spherical", spherical); + document.put("spherical", spherical ? spherical : usesGeoJson()); return document; } + + private double getDistanceMultiplier() { + return usesMetricSystem() ? MetricConversion.getMetersToMetricMultiplier(metric) : metric.getMultiplier(); + } + + private double getDistanceValueInRadiantsOrMeters(Distance distance) { + return usesMetricSystem() ? MetricConversion.getDistanceInMeters(distance) : distance.getNormalizedValue(); + } + + private boolean usesMetricSystem() { + return usesGeoJson(); + } + + private boolean usesGeoJson() { + return point instanceof GeoJsonPoint; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java index af8a616137..31c6b9069f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,24 +18,36 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import static org.springframework.util.ObjectUtils.*; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.concurrent.TimeUnit; import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.OffsetScrollPosition; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.query.Meta.CursorOption; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; + /** * MongoDB Query object representing criteria, projection, sorting and query hints. * @@ -44,28 +56,48 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Anton Barkan */ -public class Query { +public class Query implements ReadConcernAware, ReadPreferenceAware { private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES"; - private final Set> restrictedTypes = new HashSet<>(); + private Set> restrictedTypes = Collections.emptySet(); private final Map criteria = new LinkedHashMap<>(); private @Nullable Field fieldSpec = null; private Sort sort = Sort.unsorted(); private long skip; - private int limit; + private Limit limit = Limit.unlimited(); + + private KeysetScrollPosition keysetScrollPosition; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; + private @Nullable String hint; private Meta meta = new Meta(); private Optional collation = Optional.empty(); + Query(Query query) { + this.restrictedTypes = query.restrictedTypes; + this.fieldSpec = query.fieldSpec; + this.sort = query.sort; + this.limit = query.limit; + this.skip = query.skip; + this.keysetScrollPosition = query.keysetScrollPosition; + this.readConcern = query.readConcern; + this.readPreference = query.readPreference; + this.hint = query.hint; + this.meta = query.meta; + this.collation = query.collation; + } + /** * Static factory method to create a {@link Query} using the provided {@link CriteriaDefinition}. * * @param criteriaDefinition must not be {@literal null}. - * @return + * @return new instance of {@link Query}. * @since 1.6 */ public static Query query(CriteriaDefinition criteriaDefinition) { @@ -88,11 +120,13 @@ public Query(CriteriaDefinition criteriaDefinition) { * Adds the given {@link CriteriaDefinition} to the current {@link Query}. * * @param criteriaDefinition must not be {@literal null}. - * @return + * @return this. * @since 1.6 */ public Query addCriteria(CriteriaDefinition criteriaDefinition) { + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); + CriteriaDefinition existing = this.criteria.get(criteriaDefinition.getKey()); String key = criteriaDefinition.getKey(); @@ -100,8 +134,8 @@ public Query addCriteria(CriteriaDefinition criteriaDefinition) { this.criteria.put(key, criteriaDefinition); } else { throw new InvalidMongoDbApiUsageException( - String.format("Due to limitations of the com.mongodb.BasicDocument, you can't add a second '%s' criteria. " - + "Query already contains '%s'", key, serializeToJsonSafely(existing.getCriteriaObject()))); + String.format("Due to limitations of the com.mongodb.BasicDocument, you can't add a second '%s' criteria;" + + " Query already contains '%s'", key, serializeToJsonSafely(existing.getCriteriaObject()))); } return this; @@ -117,10 +151,11 @@ public Field fields() { } /** - * Set number of documents to skip before returning results. + * Set number of documents to skip before returning results. Use {@literal zero} or a {@literal negative} value to + * avoid skipping. * - * @param skip - * @return + * @param skip number of documents to skip. Use {@literal zero} or a {@literal negative} value to avoid skipping. + * @return this. */ public Query skip(long skip) { this.skip = skip; @@ -128,25 +163,116 @@ public Query skip(long skip) { } /** - * Limit the number of returned documents to {@code limit}. + * Limit the number of returned documents to {@code limit}. A {@literal zero} or {@literal negative} value is + * considered as unlimited. * - * @param limit - * @return + * @param limit number of documents to return. Use {@literal zero} or {@literal negative} for unlimited. + * @return this. */ public Query limit(int limit) { - this.limit = limit; + this.limit = limit > 0 ? Limit.of(limit) : Limit.unlimited(); return this; } /** - * Configures the query to use the given hint when being executed. + * Limit the number of returned documents to {@link Limit}. * - * @param name must not be {@literal null} or empty. - * @return + * @param limit number of documents to return. + * @return this. + * @since 4.2 + */ + public Query limit(Limit limit) { + + Assert.notNull(limit, "Limit must not be null"); + + if (limit.isUnlimited()) { + this.limit = limit; + return this; + } + + // retain zero/negative semantics for unlimited. + return limit(limit.max()); + } + + /** + * Configures the query to use the given hint when being executed. The {@code hint} can either be an index name or a + * json {@link Document} representation. + * + * @param hint must not be {@literal null} or empty. + * @return this. + * @see Document#parse(String) + */ + public Query withHint(String hint) { + + Assert.hasText(hint, "Hint must not be empty or null"); + this.hint = hint; + return this; + } + + /** + * Configures the query to use the given {@link ReadConcern} when being executed. + * + * @param readConcern must not be {@literal null}. + * @return this. + * @since 3.1 + */ + public Query withReadConcern(ReadConcern readConcern) { + + Assert.notNull(readConcern, "ReadConcern must not be null"); + this.readConcern = readConcern; + return this; + } + + /** + * Configures the query to use the given {@link ReadPreference} when being executed. + * + * @param readPreference must not be {@literal null}. + * @return this. + * @since 4.1 + */ + public Query withReadPreference(ReadPreference readPreference) { + + Assert.notNull(readPreference, "ReadPreference must not be null"); + this.readPreference = readPreference; + return this; + } + + @Override + public boolean hasReadConcern() { + return this.readConcern != null; + } + + @Override + public ReadConcern getReadConcern() { + return this.readConcern; + } + + @Override + public boolean hasReadPreference() { + return this.readPreference != null || getMeta().getFlags().contains(CursorOption.SECONDARY_READS); + } + + @Override + public ReadPreference getReadPreference() { + + if (readPreference == null) { + return getMeta().getFlags().contains(CursorOption.SECONDARY_READS) ? ReadPreference.primaryPreferred() : null; + } + + return this.readPreference; + } + + /** + * Configures the query to use the given {@link Document hint} when being executed. + * + * @param hint must not be {@literal null}. + * @return this. + * @since 2.2 */ - public Query withHint(String name) { - Assert.hasText(name, "Hint must not be empty or null!"); - this.hint = name; + public Query withHint(Document hint) { + + Assert.notNull(hint, "Hint must not be null"); + this.hint = hint.toJson(); return this; } @@ -154,30 +280,89 @@ public Query withHint(String name) { * Sets the given pagination information on the {@link Query} instance. Will transparently set {@code skip} and * {@code limit} as well as applying the {@link Sort} instance defined with the {@link Pageable}. * - * @param pageable - * @return + * @param pageable must not be {@literal null}. + * @return this. */ public Query with(Pageable pageable) { - if (pageable.isUnpaged()) { - return this; + if (pageable.isPaged()) { + this.limit = pageable.toLimit(); + this.skip = pageable.getOffset(); } - this.limit = pageable.getPageSize(); - this.skip = pageable.getOffset(); - return with(pageable.getSort()); } + /** + * Sets the given cursor position on the {@link Query} instance. Will transparently set {@code skip}. + * + * @param position must not be {@literal null}. + * @return this. + */ + public Query with(ScrollPosition position) { + + Assert.notNull(position, "ScrollPosition must not be null"); + + if (position instanceof OffsetScrollPosition offset) { + return with(offset); + } + + if (position instanceof KeysetScrollPosition keyset) { + return with(keyset); + } + + throw new IllegalArgumentException(String.format("ScrollPosition %s not supported", position)); + } + + /** + * Sets the given cursor position on the {@link Query} instance. Will transparently set {@code skip}. + * + * @param position must not be {@literal null}. + * @return this. + */ + public Query with(OffsetScrollPosition position) { + + Assert.notNull(position, "ScrollPosition must not be null"); + + this.skip = position.isInitial() ? 0 : position.getOffset() + 1; + this.keysetScrollPosition = null; + return this; + } + + /** + * Sets the given cursor position on the {@link Query} instance. Will transparently reset {@code skip}. + * + * @param position must not be {@literal null}. + * @return this. + */ + public Query with(KeysetScrollPosition position) { + + Assert.notNull(position, "ScrollPosition must not be null"); + + this.skip = 0; + this.keysetScrollPosition = position; + + return this; + } + + public boolean hasKeyset() { + return keysetScrollPosition != null; + } + + @Nullable + public KeysetScrollPosition getKeyset() { + return keysetScrollPosition; + } + /** * Adds a {@link Sort} to the {@link Query} instance. * - * @param sort - * @return + * @param sort must not be {@literal null}. + * @return this. */ public Query with(Sort sort) { - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(sort, "Sort must not be null"); if (sort.isUnsorted()) { return this; @@ -185,8 +370,8 @@ public Query with(Sort sort) { sort.stream().filter(Order::isIgnoreCase).findFirst().ifPresent(it -> { - throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! " - + "MongoDB does not support sorting ignoring case currently!", it.getProperty())); + throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case;" + + " MongoDB does not support sorting ignoring case currently", it.getProperty())); }); this.sort = this.sort.and(sort); @@ -206,15 +391,22 @@ public Set> getRestrictedTypes() { * * @param type may not be {@literal null} * @param additionalTypes may not be {@literal null} - * @return + * @return this. */ public Query restrict(Class type, Class... additionalTypes) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); Assert.notNull(additionalTypes, "AdditionalTypes must not be null"); + if (restrictedTypes == Collections.EMPTY_SET) { + restrictedTypes = new HashSet<>(1 + additionalTypes.length); + } + restrictedTypes.add(type); - restrictedTypes.addAll(Arrays.asList(additionalTypes)); + + if (additionalTypes.length > 0) { + restrictedTypes.addAll(Arrays.asList(additionalTypes)); + } return this; } @@ -224,6 +416,17 @@ public Query restrict(Class type, Class... additionalTypes) { */ public Document getQueryObject() { + if (criteria.isEmpty() && restrictedTypes.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + if (criteria.size() == 1 && restrictedTypes.isEmpty()) { + + for (CriteriaDefinition definition : criteria.values()) { + return definition.getCriteriaObject(); + } + } + Document document = new Document(); for (CriteriaDefinition definition : criteria.values()) { @@ -241,7 +444,7 @@ public Document getQueryObject() { * @return the field {@link Document}. */ public Document getFieldsObject() { - return this.fieldSpec == null ? new Document() : fieldSpec.getFieldsObject(); + return this.fieldSpec == null ? BsonUtils.EMPTY_DOCUMENT : fieldSpec.getFieldsObject(); } /** @@ -250,37 +453,59 @@ public Document getFieldsObject() { public Document getSortObject() { if (this.sort.isUnsorted()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document document = new Document(); - this.sort.stream()// - .forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); + this.sort.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); return document; } /** - * Get the number of documents to skip. + * Returns {@literal true} if the {@link Query} has a sort parameter. * - * @return + * @return {@literal true} if sorted. + * @see Sort#isSorted() + * @since 2.2 + */ + public boolean isSorted() { + return sort.isSorted(); + } + + /** + * Get the number of documents to skip. {@literal Zero} or a {@literal negative} value indicates no skip. + * + * @return number of documents to skip */ public long getSkip() { return this.skip; } /** - * Get the maximum number of documents to be return. + * Returns whether the query is {@link #limit(int) limited}. * - * @return + * @return {@code true} if the query is limited; {@code false} otherwise. + * @since 4.1 + */ + public boolean isLimited() { + return this.limit.isLimited(); + } + + /** + * Get the maximum number of documents to be return. {@literal Zero} or a {@literal negative} value indicates no + * limit. + * + * @return number of documents to return. + * @see #isLimited() */ public int getLimit() { - return this.limit; + return limit.isUnlimited() ? 0 : this.limit.max(); } /** - * @return + * @return can be {@literal null}. */ @Nullable public String getHint() { @@ -289,7 +514,7 @@ public String getHint() { /** * @param maxTimeMsec - * @return + * @return this. * @see Meta#setMaxTimeMsec(long) * @since 1.6 */ @@ -300,55 +525,67 @@ public Query maxTimeMsec(long maxTimeMsec) { } /** - * @param timeout - * @param timeUnit - * @return - * @see Meta#setMaxTime(long, TimeUnit) - * @since 1.6 + * @param timeout must not be {@literal null}. + * @return this. + * @see Meta#setMaxTime(Duration) + * @since 2.1 */ - public Query maxTime(long timeout, TimeUnit timeUnit) { + public Query maxTime(Duration timeout) { - meta.setMaxTime(timeout, timeUnit); + meta.setMaxTime(timeout); return this; } /** - * @param maxScan - * @return - * @see Meta#setMaxScan(long) + * Add a comment to the query that is propagated to the profile log. + * + * @param comment must not be {@literal null}. + * @return this. + * @see Meta#setComment(String) * @since 1.6 */ - public Query maxScan(long maxScan) { + public Query comment(String comment) { - meta.setMaxScan(maxScan); + meta.setComment(comment); return this; } /** - * @param comment - * @return - * @see Meta#setComment(String) - * @since 1.6 + * Enables writing to temporary files for aggregation stages and queries. When set to {@literal true}, aggregation + * stages can write data to the {@code _tmp} subdirectory in the {@code dbPath} directory. + *

          + * Starting in MongoDB 4.2, the profiler log messages and diagnostic log messages includes a {@code usedDisk} + * indicator if any aggregation stage wrote data to temporary files due to memory restrictions. + * + * @param allowDiskUse + * @return this. + * @see Meta#setAllowDiskUse(Boolean) + * @since 3.2 */ - public Query comment(String comment) { + public Query allowDiskUse(boolean allowDiskUse) { - meta.setComment(comment); + meta.setAllowDiskUse(allowDiskUse); return this; } /** - * @return - * @see Meta#setSnapshot(boolean) - * @since 1.6 + * Set the number of documents to return in each response batch.
          + * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. + * + * @param batchSize The number of documents to return per batch. + * @return this. + * @see Meta#setCursorBatchSize(int) + * @since 2.1 */ - public Query useSnapshot() { + public Query cursorBatchSize(int batchSize) { - meta.setSnapshot(true); + meta.setCursorBatchSize(batchSize); return this; } /** - * @return + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#NO_TIMEOUT * @since 1.10 */ @@ -359,7 +596,7 @@ public Query noCursorTimeout() { } /** - * @return + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#EXHAUST * @since 1.10 */ @@ -370,18 +607,20 @@ public Query exhaust() { } /** - * @return - * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#SLAVE_OK - * @since 1.10 + * Allows querying of a replica. + * + * @return this. + * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#SECONDARY_READS + * @since 3.0.2 */ - public Query slaveOk() { + public Query allowSecondaryReads() { - meta.addFlag(Meta.CursorOption.SLAVE_OK); + meta.addFlag(Meta.CursorOption.SECONDARY_READS); return this; } /** - * @return + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#PARTIAL * @since 1.10 */ @@ -405,7 +644,7 @@ public Meta getMeta() { */ public void setMeta(Meta meta) { - Assert.notNull(meta, "Query meta might be empty but must not be null."); + Assert.notNull(meta, "Query meta might be empty but must not be null"); this.meta = meta; } @@ -413,7 +652,7 @@ public void setMeta(Meta meta) { * Set the {@link Collation} applying language-specific rules for string comparison. * * @param collation can be {@literal null}. - * @return + * @return this. * @since 2.0 */ public Query collation(@Nullable Collation collation) { @@ -425,7 +664,7 @@ public Query collation(@Nullable Collation collation) { /** * Get the {@link Collation} defining language-specific rules for string comparison. * - * @return + * @return never {@literal null}. * @since 2.0 */ public Optional getCollation() { @@ -436,22 +675,68 @@ protected List getCriteria() { return new ArrayList<>(this.criteria.values()); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() + /** + * Create an independent copy of the given {@link Query}.
          + * The resulting {@link Query} will not be {@link Object#equals(Object) binary equal} to the given source but + * semantically equal in terms of creating the same result when executed. + * + * @param source The source {@link Query} to use a reference. Must not be {@literal null}. + * @return new {@link Query}. + * @since 2.2 */ + public static Query of(Query source) { + + Assert.notNull(source, "Source must not be null"); + + Document sourceFields = source.getFieldsObject(); + Document sourceSort = source.getSortObject(); + Document sourceQuery = source.getQueryObject(); + + Query target = new Query() { + + @Override + public Document getFieldsObject() { + return BsonUtils.merge(sourceFields, super.getFieldsObject()); + } + + @Override + public Document getSortObject() { + return BsonUtils.merge(sourceSort, super.getSortObject()); + } + + @Override + public Document getQueryObject() { + return BsonUtils.merge(sourceQuery, super.getQueryObject()); + } + + @Override + public boolean isSorted() { + return source.isSorted() || super.isSorted(); + } + }; + + target.skip = source.getSkip(); + + target.limit = source.isLimited() ? Limit.of(source.getLimit()) : Limit.unlimited(); + target.hint = source.getHint(); + target.collation = source.getCollation(); + target.restrictedTypes = new HashSet<>(source.getRestrictedTypes()); + + if (source.getMeta().hasValues()) { + target.setMeta(new Meta(source.getMeta())); + } + + return target; + } + @Override public String toString() { return String.format("Query: %s, Fields: %s, Sort: %s", serializeToJsonSafely(getQueryObject()), serializeToJsonSafely(getFieldsObject()), serializeToJsonSafely(getSortObject())); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -477,7 +762,7 @@ protected boolean querySettingsEquals(Query that) { boolean sortEqual = this.sort.equals(that.sort); boolean hintEqual = nullSafeEquals(this.hint, that.hint); boolean skipEqual = this.skip == that.skip; - boolean limitEqual = this.limit == that.limit; + boolean limitEqual = nullSafeEquals(this.limit, that.limit); boolean metaEqual = nullSafeEquals(this.meta, that.meta); boolean collationEqual = nullSafeEquals(this.collation.orElse(null), that.collation.orElse(null)); @@ -485,10 +770,6 @@ protected boolean querySettingsEquals(Query that) { && collationEqual; } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { @@ -499,7 +780,7 @@ public int hashCode() { result += 31 * nullSafeHashCode(sort); result += 31 * nullSafeHashCode(hint); result += 31 * skip; - result += 31 * limit; + result += 31 * limit.hashCode(); result += 31 * nullSafeHashCode(meta); result += 31 * nullSafeHashCode(collation.orElse(null)); @@ -518,4 +799,5 @@ public int hashCode() { public static boolean isRestrictedTypeKey(String key) { return RESTRICTED_TYPES_KEY.equals(key); } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java index 702eb347e1..11e0f7fb24 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/SerializationUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.query; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; @@ -22,10 +23,10 @@ import java.util.Map; import org.bson.Document; + import org.springframework.core.convert.converter.Converter; import org.springframework.lang.Nullable; - -import com.mongodb.util.JSON; +import org.springframework.util.ObjectUtils; /** * Utility methods for JSON serialization. @@ -76,9 +77,8 @@ public static Map flattenMap(@Nullable Document source) { private static void toFlatMap(String currentPath, Object source, Map map) { - if (source instanceof Document) { + if (source instanceof Document document) { - Document document = (Document) source; Iterator> it = document.entrySet().iterator(); String pathPrefix = currentPath.isEmpty() ? "" : currentPath + '.'; @@ -118,19 +118,32 @@ public static String serializeToJsonSafely(@Nullable Object value) { } try { - return value instanceof Document ? ((Document) value).toJson() : JSON.serialize(value); + String json = value instanceof Document document ? document.toJson() : serializeValue(value); + return json.replaceAll("\":", "\" :").replaceAll("\\{\"", "{ \""); } catch (Exception e) { - if (value instanceof Collection) { - return toString((Collection) value); - } else if (value instanceof Map) { - return toString((Map) value); + if (value instanceof Collection collection) { + return toString(collection); + } else if (value instanceof Map map) { + return toString(map); + } else if (ObjectUtils.isArray(value)) { + return toString(Arrays.asList(ObjectUtils.toObjectArray(value))); } else { - return String.format("{ \"$java\" : %s }", value.toString()); + return String.format("{ \"$java\" : %s }", value); } } } + public static String serializeValue(@Nullable Object value) { + + if (value == null) { + return "null"; + } + + String documentJson = new Document("toBeEncoded", value).toJson(); + return documentJson.substring(documentJson.indexOf(':') + 1, documentJson.length() - 1).trim(); + } + private static String toString(Map source) { return iterableToDelimitedString(source.entrySet(), "{ ", " }", entry -> String.format("\"%s\" : %s", entry.getKey(), serializeToJsonSafely(entry.getValue()))); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java index 728fb38c01..bd6d8c3469 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Term.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.query; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; /** * A {@link Term} defines one or multiple words {@link Type#WORD} or phrases {@link Type#PHRASE} to be used in the @@ -28,7 +29,7 @@ public class Term { public enum Type { - WORD, PHRASE; + WORD, PHRASE } private final Type type; @@ -90,6 +91,33 @@ public String getFormatted() { return negated ? negateRaw(formatted) : formatted; } + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + + if (!(o instanceof Term other)) { + return false; + } + + return ObjectUtils.nullSafeEquals(negated, other.negated) && ObjectUtils.nullSafeEquals(type, other.type) + && ObjectUtils.nullSafeEquals(raw, other.raw); + } + + @Override + public int hashCode() { + + int result = 17; + + result += ObjectUtils.nullSafeHashCode(type); + result += ObjectUtils.nullSafeHashCode(raw); + result += ObjectUtils.nullSafeHashCode(negated); + + return result; + } + @Override public String toString() { return getFormatted(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java index 1302e4437f..e1a7d0c4d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextCriteria.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ import org.bson.Document; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -34,7 +35,7 @@ public class TextCriteria implements CriteriaDefinition { private final List terms; - private @Nullable String language; + private final @Nullable String language; private @Nullable Boolean caseSensitive; private @Nullable Boolean diacriticSensitive; @@ -51,7 +52,7 @@ public TextCriteria() { private TextCriteria(@Nullable String language) { this.language = language; - this.terms = new ArrayList(); + this.terms = new ArrayList<>(); } /** @@ -72,7 +73,7 @@ public static TextCriteria forDefaultLanguage() { */ public static TextCriteria forLanguage(String language) { - Assert.hasText(language, "Language must not be null or empty!"); + Assert.hasText(language, "Language must not be null or empty"); return new TextCriteria(language); } @@ -98,7 +99,7 @@ public TextCriteria matchingAny(String... words) { */ public TextCriteria matching(Term term) { - Assert.notNull(term, "Term to add must not be null."); + Assert.notNull(term, "Term to add must not be null"); this.terms.add(term); return this; @@ -194,19 +195,11 @@ public TextCriteria diacriticSensitive(boolean diacriticSensitive) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getKey() - */ @Override public String getKey() { return "$text"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.CriteriaDefinition#getCriteriaObject() - */ @Override public Document getCriteriaObject() { @@ -231,9 +224,37 @@ public Document getCriteriaObject() { return new Document("$text", document); } + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) { + return true; + } + if (!(o instanceof TextCriteria that)) { + return false; + } + + return ObjectUtils.nullSafeEquals(terms, that.terms) && ObjectUtils.nullSafeEquals(language, that.language) + && ObjectUtils.nullSafeEquals(caseSensitive, that.caseSensitive) + && ObjectUtils.nullSafeEquals(diacriticSensitive, that.diacriticSensitive); + } + + @Override + public int hashCode() { + + int result = 17; + + result += ObjectUtils.nullSafeHashCode(terms); + result += ObjectUtils.nullSafeHashCode(language); + result += ObjectUtils.nullSafeHashCode(caseSensitive); + result += ObjectUtils.nullSafeHashCode(diacriticSensitive); + + return result; + } + private String join(Iterable terms) { - List result = new ArrayList(); + List result = new ArrayList<>(); for (Term term : terms) { if (term != null) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java index ed1e38c934..a6583299d6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,10 @@ package org.springframework.data.mongodb.core.query; import java.util.Locale; +import java.util.Map.Entry; import org.bson.Document; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; /** @@ -35,6 +37,7 @@ public class TextQuery extends Query { private String scoreFieldName = DEFAULT_SCORE_FIELD_FIELDNAME; private boolean includeScore = false; private boolean sortByScore = false; + private int sortByScoreIndex = 0; /** * Creates new {@link TextQuery} using the the given {@code wordsAndPhrases} with {@link TextCriteria} @@ -65,17 +68,17 @@ public TextQuery(String wordsAndPhrases, @Nullable String language) { * For a full list of supported languages see the mongdodb reference manual for * Text Search Languages. * - * @param wordsAndPhrases - * @param locale + * @param wordsAndPhrases must not be {@literal null}. + * @param locale can be {@literal null}. */ public TextQuery(String wordsAndPhrases, @Nullable Locale locale) { - this(wordsAndPhrases, locale != null ? locale.getLanguage() : (String) null); + this(wordsAndPhrases, locale != null ? locale.getLanguage() : null); } /** * Creates new {@link TextQuery} for given {@link TextCriteria}. * - * @param criteria. + * @param criteria must not be {@literal null}. */ public TextQuery(TextCriteria criteria) { super(criteria); @@ -84,8 +87,8 @@ public TextQuery(TextCriteria criteria) { /** * Creates new {@link TextQuery} searching for given {@link TextCriteria}. * - * @param criteria - * @return + * @param criteria must not be {@literal null}. + * @return new instance of {@link TextQuery}. */ public static TextQuery queryText(TextCriteria criteria) { return new TextQuery(criteria); @@ -95,10 +98,11 @@ public static TextQuery queryText(TextCriteria criteria) { * Add sorting by text score. Will also add text score to returned fields. * * @see TextQuery#includeScore() - * @return + * @return this. */ public TextQuery sortByScore() { + this.sortByScoreIndex = getSortObject().size(); this.includeScore(); this.sortByScore = true; return this; @@ -107,7 +111,7 @@ public TextQuery sortByScore() { /** * Add field {@literal score} holding the documents textScore to the returned fields. * - * @return + * @return this. */ public TextQuery includeScore() { @@ -118,8 +122,8 @@ public TextQuery includeScore() { /** * Include text search document score in returned fields using the given fieldname. * - * @param fieldname - * @return + * @param fieldname must not be {@literal null}. + * @return this. */ public TextQuery includeScore(String fieldname) { @@ -131,7 +135,7 @@ public TextQuery includeScore(String fieldname) { /** * Set the fieldname used for scoring. * - * @param fieldName + * @param fieldName must not be {@literal null}. */ public void setScoreFieldName(String fieldName) { this.scoreFieldName = fieldName; @@ -140,16 +144,12 @@ public void setScoreFieldName(String fieldName) { /** * Get the fieldname used for scoring * - * @return + * @return never {@literal null}. */ public String getScoreFieldName() { return scoreFieldName; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#getFieldsObject() - */ @Override public Document getFieldsObject() { @@ -157,27 +157,59 @@ public Document getFieldsObject() { return super.getFieldsObject(); } - Document fields = super.getFieldsObject(); + Document fields = BsonUtils.asMutableDocument(super.getFieldsObject()); fields.put(getScoreFieldName(), META_TEXT_SCORE); return fields; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Query#getSortObject() - */ @Override public Document getSortObject() { - Document sort = new Document(); - if (this.sortByScore) { - sort.put(getScoreFieldName(), META_TEXT_SCORE); + + int sortByScoreIndex = this.sortByScoreIndex; + + return sortByScoreIndex != 0 + ? sortByScoreAtPosition(super.getSortObject(), sortByScoreIndex) + : sortByScoreAtPositionZero(); } + return super.getSortObject(); + } + + private Document sortByScoreAtPositionZero() { + + Document sort = new Document(); + + sort.put(getScoreFieldName(), META_TEXT_SCORE); sort.putAll(super.getSortObject()); return sort; } + + private Document sortByScoreAtPosition(Document source, int sortByScoreIndex) { + + Document target = new Document(); + int index = 0; + + for (Entry entry : source.entrySet()) { + if (index == sortByScoreIndex) { + target.put(getScoreFieldName(), META_TEXT_SCORE); + } + target.put(entry.getKey(), entry.getValue()); + index++; + } + + if (index == sortByScoreIndex) { + target.put(getScoreFieldName(), META_TEXT_SCORE); + } + + return target; + } + + @Override + public boolean isSorted() { + return super.isSorted() || sortByScore; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java index 6001f42464..677575c9e4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcher.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,11 @@ */ package org.springframework.data.mongodb.core.query; -import lombok.AccessLevel; -import lombok.EqualsAndHashCode; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - import java.util.Set; import org.springframework.data.domain.ExampleMatcher; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; /** * {@link ExampleMatcher} implementation for query by example (QBE). Unlike plain {@link ExampleMatcher} this untyped @@ -33,195 +30,122 @@ * @author Mark Paluch * @since 2.0 */ -@EqualsAndHashCode -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) public class UntypedExampleMatcher implements ExampleMatcher { - private final @NonNull ExampleMatcher delegate; + private final ExampleMatcher delegate; + + private UntypedExampleMatcher(ExampleMatcher delegate) { + this.delegate = delegate; + } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#matching() - */ public static UntypedExampleMatcher matching() { return new UntypedExampleMatcher(ExampleMatcher.matching()); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#matchingAny() - */ public static UntypedExampleMatcher matchingAny() { return new UntypedExampleMatcher(ExampleMatcher.matchingAny()); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#matchingAll() - */ public static UntypedExampleMatcher matchingAll() { return new UntypedExampleMatcher(ExampleMatcher.matchingAll()); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withIgnorePaths(java.lang.String...) - */ public UntypedExampleMatcher withIgnorePaths(String... ignoredPaths) { return new UntypedExampleMatcher(delegate.withIgnorePaths(ignoredPaths)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withStringMatcher(java.lang.String) - */ public UntypedExampleMatcher withStringMatcher(StringMatcher defaultStringMatcher) { return new UntypedExampleMatcher(delegate.withStringMatcher(defaultStringMatcher)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withIgnoreCase() - */ public UntypedExampleMatcher withIgnoreCase() { return new UntypedExampleMatcher(delegate.withIgnoreCase()); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withIgnoreCase(boolean) - */ public UntypedExampleMatcher withIgnoreCase(boolean defaultIgnoreCase) { return new UntypedExampleMatcher(delegate.withIgnoreCase(defaultIgnoreCase)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withMatcher(java.lang.String, org.springframework.data.domain.ExampleMatcher.MatcherConfigurer) - */ public UntypedExampleMatcher withMatcher(String propertyPath, MatcherConfigurer matcherConfigurer) { return new UntypedExampleMatcher(delegate.withMatcher(propertyPath, matcherConfigurer)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withMatcher(java.lang.String, org.springframework.data.domain.ExampleMatcher.GenericPropertyMatcher) - */ public UntypedExampleMatcher withMatcher(String propertyPath, GenericPropertyMatcher genericPropertyMatcher) { return new UntypedExampleMatcher(delegate.withMatcher(propertyPath, genericPropertyMatcher)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withTransformer(java.lang.String, org.springframework.data.domain.ExampleMatcher.PropertyValueTransformer) - */ public UntypedExampleMatcher withTransformer(String propertyPath, PropertyValueTransformer propertyValueTransformer) { return new UntypedExampleMatcher(delegate.withTransformer(propertyPath, propertyValueTransformer)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withIgnoreCase(java.lang.String...) - */ public UntypedExampleMatcher withIgnoreCase(String... propertyPaths) { return new UntypedExampleMatcher(delegate.withIgnoreCase(propertyPaths)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withIncludeNullValues() - */ public UntypedExampleMatcher withIncludeNullValues() { return new UntypedExampleMatcher(delegate.withIncludeNullValues()); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withIgnoreNullValues() - */ public UntypedExampleMatcher withIgnoreNullValues() { return new UntypedExampleMatcher(delegate.withIgnoreNullValues()); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#withNullHandler(org.springframework.data.domain.ExampleMatcher.NullHandler) - */ public UntypedExampleMatcher withNullHandler(NullHandler nullHandler) { return new UntypedExampleMatcher(delegate.withNullHandler(nullHandler)); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#getNullHandler() - */ public NullHandler getNullHandler() { return delegate.getNullHandler(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#getDefaultStringMatcher() - */ public StringMatcher getDefaultStringMatcher() { return delegate.getDefaultStringMatcher(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#isIgnoreCaseEnabled() - */ public boolean isIgnoreCaseEnabled() { return delegate.isIgnoreCaseEnabled(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#isIgnoredPath() - */ public boolean isIgnoredPath(String path) { return delegate.isIgnoredPath(path); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#getIgnoredPaths() - */ public Set getIgnoredPaths() { return delegate.getIgnoredPaths(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#getPropertySpecifiers() - */ public PropertySpecifiers getPropertySpecifiers() { return delegate.getPropertySpecifiers(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#isAllMatching() - */ public boolean isAllMatching() { return delegate.isAllMatching(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#isAnyMatching() - */ public boolean isAnyMatching() { return delegate.isAnyMatching(); } - /* - * (non-Javadoc) - * @see org.springframework.data.domain.ExampleMatcher#getMatchMode() - */ public MatchMode getMatchMode() { return delegate.getMatchMode(); } + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + UntypedExampleMatcher that = (UntypedExampleMatcher) o; + + return ObjectUtils.nullSafeEquals(delegate, that.delegate); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(delegate); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java index a96e4c9f69..32d98f5804 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.query; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -48,24 +49,25 @@ * @author Mark Paluch * @author Pavel Vodrazka */ -public class Update { +public class Update implements UpdateDefinition { public enum Position { LAST, FIRST } private boolean isolated = false; - private Set keysToUpdate = new HashSet<>(); - private Map modifierOps = new LinkedHashMap<>(); - private Map pushCommandBuilders = new LinkedHashMap<>(1); + private final Set keysToUpdate = new HashSet<>(); + private final Map modifierOps = new LinkedHashMap<>(); + private Map pushCommandBuilders = Collections.emptyMap(); + private List arrayFilters = Collections.emptyList(); /** * Static factory method to create an Update using the provided key * - * @param key - * @return + * @param key the field to update. + * @return new instance of {@link Update}. */ - public static Update update(String key, Object value) { + public static Update update(String key, @Nullable Object value) { return new Update().set(key, value); } @@ -78,7 +80,7 @@ public static Update update(String key, Object value) { * * @param object the source {@link Document} to create the update from. * @param exclude the fields to exclude. - * @return + * @return new instance of {@link Update}. */ public static Update fromDocument(Document object, String... exclude) { @@ -93,8 +95,8 @@ public static Update fromDocument(Document object, String... exclude) { Object value = object.get(key); update.modifierOps.put(key, value); - if (isKeyword(key) && value instanceof Document) { - update.keysToUpdate.addAll(((Document) value).keySet()); + if (isKeyword(key) && value instanceof Document document) { + update.keysToUpdate.addAll(document.keySet()); } else { update.keysToUpdate.add(key); } @@ -106,12 +108,13 @@ public static Update fromDocument(Document object, String... exclude) { /** * Update using the {@literal $set} update modifier * - * @param key - * @param value - * @return + * @param key the field name. + * @param value can be {@literal null}. In this case the property remains in the db with a {@literal null} value. To + * remove it use {@link #unset(String)}. + * @return this. * @see MongoDB Update operator: $set */ - public Update set(String key, Object value) { + public Update set(String key, @Nullable Object value) { addMultiFieldOperation("$set", key, value); return this; } @@ -119,13 +122,13 @@ public Update set(String key, Object value) { /** * Update using the {@literal $setOnInsert} update modifier * - * @param key - * @param value - * @return + * @param key the field name. + * @param value can be {@literal null}. + * @return this. * @see MongoDB Update operator: * $setOnInsert */ - public Update setOnInsert(String key, Object value) { + public Update setOnInsert(String key, @Nullable Object value) { addMultiFieldOperation("$setOnInsert", key, value); return this; } @@ -133,8 +136,8 @@ public Update setOnInsert(String key, Object value) { /** * Update using the {@literal $unset} update modifier * - * @param key - * @return + * @param key the field name. + * @return this. * @see MongoDB Update operator: $unset */ public Update unset(String key) { @@ -145,9 +148,9 @@ public Update unset(String key) { /** * Update using the {@literal $inc} update modifier * - * @param key - * @param inc - * @return + * @param key the field name. + * @param inc must not be {@literal null}. + * @return this. * @see MongoDB Update operator: $inc */ public Update inc(String key, Number inc) { @@ -155,15 +158,20 @@ public Update inc(String key, Number inc) { return this; } + @Override + public void inc(String key) { + inc(key, 1L); + } + /** * Update using the {@literal $push} update modifier * - * @param key - * @param value - * @return + * @param key the field name. + * @param value can be {@literal null}. + * @return this. * @see MongoDB Update operator: $push */ - public Update push(String key, Object value) { + public Update push(String key, @Nullable Object value) { addMultiFieldOperation("$push", key, value); return this; } @@ -173,7 +181,7 @@ public Update push(String key, Object value) { * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values as well as using * {@code $position}. * - * @param key + * @param key the field name. * @return {@link PushOperatorBuilder} for given key * @see MongoDB Update operator: $push * @see MongoDB Update operator: $each @@ -181,36 +189,22 @@ public Update push(String key, Object value) { public PushOperatorBuilder push(String key) { if (!pushCommandBuilders.containsKey(key)) { + + if (pushCommandBuilders == Collections.EMPTY_MAP) { + pushCommandBuilders = new LinkedHashMap<>(1); + } + pushCommandBuilders.put(key, new PushOperatorBuilder(key)); } return pushCommandBuilders.get(key); } - /** - * Update using the {@code $pushAll} update modifier.
          - * Note: In MongoDB 2.4 the usage of {@code $pushAll} has been deprecated in favor of {@code $push $each}. - * Important: As of MongoDB 3.6 {@code $pushAll} is not longer supported. Use {@code $push $each} instead. - * {@link #push(String)}) returns a builder that can be used to populate the {@code $each} object. - * - * @param key - * @param values - * @return - * @see MongoDB Update operator: - * $pushAll - * @deprecated as of MongoDB 2.4. Removed in MongoDB 3.6. Use {@link #push(String) $push $each} instead. - */ - @Deprecated - public Update pushAll(String key, Object[] values) { - addMultiFieldOperation("$pushAll", key, Arrays.asList(values)); - return this; - } - /** * Update using {@code $addToSet} modifier.
          * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values * - * @param key - * @return + * @param key the field name. + * @return new instance of {@link AddToSetBuilder}. * @since 1.5 */ public AddToSetBuilder addToSet(String key) { @@ -220,13 +214,13 @@ public AddToSetBuilder addToSet(String key) { /** * Update using the {@literal $addToSet} update modifier * - * @param key - * @param value - * @return + * @param key the field name. + * @param value can be {@literal null}. + * @return this. * @see MongoDB Update operator: * $addToSet */ - public Update addToSet(String key, Object value) { + public Update addToSet(String key, @Nullable Object value) { addMultiFieldOperation("$addToSet", key, value); return this; } @@ -234,9 +228,9 @@ public Update addToSet(String key, Object value) { /** * Update using the {@literal $pop} update modifier * - * @param key - * @param pos - * @return + * @param key the field name. + * @param pos must not be {@literal null}. + * @return this. * @see MongoDB Update operator: $pop */ public Update pop(String key, Position pos) { @@ -247,12 +241,12 @@ public Update pop(String key, Position pos) { /** * Update using the {@literal $pull} update modifier * - * @param key - * @param value - * @return + * @param key the field name. + * @param value can be {@literal null}. + * @return this. * @see MongoDB Update operator: $pull */ - public Update pull(String key, Object value) { + public Update pull(String key, @Nullable Object value) { addMultiFieldOperation("$pull", key, value); return this; } @@ -260,9 +254,9 @@ public Update pull(String key, Object value) { /** * Update using the {@literal $pullAll} update modifier * - * @param key - * @param values - * @return + * @param key the field name. + * @param values must not be {@literal null}. + * @return this. * @see MongoDB Update operator: * $pullAll */ @@ -274,9 +268,9 @@ public Update pullAll(String key, Object[] values) { /** * Update using the {@literal $rename} update modifier * - * @param oldName - * @param newName - * @return + * @param oldName must not be {@literal null}. + * @param newName must not be {@literal null}. + * @return this. * @see MongoDB Update operator: * $rename */ @@ -288,8 +282,8 @@ public Update rename(String oldName, String newName) { /** * Update given key to current date using {@literal $currentDate} modifier. * - * @param key - * @return + * @param key the field name. + * @return this. * @since 1.6 * @see MongoDB Update operator: * $currentDate @@ -303,8 +297,8 @@ public Update currentDate(String key) { /** * Update given key to current date using {@literal $currentDate : { $type : "timestamp" }} modifier. * - * @param key - * @return + * @param key the field name. + * @return this. * @since 1.6 * @see MongoDB Update operator: * $currentDate @@ -320,13 +314,13 @@ public Update currentTimestamp(String key) { * * @param key must not be {@literal null}. * @param multiplier must not be {@literal null}. - * @return + * @return this. * @since 1.7 * @see MongoDB Update operator: $mul */ public Update multiply(String key, Number multiplier) { - Assert.notNull(multiplier, "Multiplier must not be null."); + Assert.notNull(multiplier, "Multiplier must not be null"); addMultiFieldOperation("$mul", key, multiplier.doubleValue()); return this; } @@ -336,14 +330,14 @@ public Update multiply(String key, Number multiplier) { * * @param key must not be {@literal null}. * @param value must not be {@literal null}. - * @return + * @return this. * @since 1.10 * @see Comparison/Sort Order * @see MongoDB Update operator: $max */ public Update max(String key, Object value) { - Assert.notNull(value, "Value for max operation must not be null."); + Assert.notNull(value, "Value for max operation must not be null"); addMultiFieldOperation("$max", key, value); return this; } @@ -353,14 +347,14 @@ public Update max(String key, Object value) { * * @param key must not be {@literal null}. * @param value must not be {@literal null}. - * @return + * @return this. * @since 1.10 * @see Comparison/Sort Order * @see MongoDB Update operator: $min */ public Update min(String key, Object value) { - Assert.notNull(value, "Value for min operation must not be null."); + Assert.notNull(value, "Value for min operation must not be null"); addMultiFieldOperation("$min", key, value); return this; } @@ -368,8 +362,8 @@ public Update min(String key, Object value) { /** * The operator supports bitwise {@code and}, bitwise {@code or}, and bitwise {@code xor} operations. * - * @param key - * @return + * @param key the field name. + * @return this. * @since 1.7 */ public BitwiseOperatorBuilder bitwise(String key) { @@ -379,9 +373,9 @@ public BitwiseOperatorBuilder bitwise(String key) { /** * Prevents a write operation that affects multiple documents from yielding to other reads or writes * once the first document is written.
          - * Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, Update, Class)}. + * Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, UpdateDefinition, Class)}. * - * @return never {@literal null}. + * @return this. * @since 2.0 */ public Update isolated() { @@ -391,9 +385,42 @@ public Update isolated() { } /** - * @return {@literal true} if update isolated is set. - * @since 2.0 + * Filter elements in an array that match the given criteria for update. {@link CriteriaDefinition} is passed directly + * to the driver without further type or field mapping. + * + * @param criteria must not be {@literal null}. + * @return this. + * @since 2.2 */ + public Update filterArray(CriteriaDefinition criteria) { + + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + + this.arrayFilters.add(criteria::getCriteriaObject); + return this; + } + + /** + * Filter elements in an array that match the given criteria for update. {@code expression} is used directly with the + * driver without further type or field mapping. + * + * @param identifier the positional operator identifier filter criteria name. + * @param expression the positional operator filter expression. + * @return this. + * @since 2.2 + */ + public Update filterArray(String identifier, Object expression) { + + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + + this.arrayFilters.add(() -> new Document(identifier, expression)); + return this; + } + public Boolean isIsolated() { return isolated; } @@ -402,39 +429,29 @@ public Document getUpdateObject() { return new Document(modifierOps); } - /** - * This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}. - * - * @param operator - * @param key - * @param value - * @deprectaed Use {@link #addMultiFieldOperation(String, String, Object)} instead. - */ - @Deprecated - protected void addFieldOperation(String operator, String key, Object value) { - - Assert.hasText(key, "Key/Path for update must not be null or blank."); + public List getArrayFilters() { + return Collections.unmodifiableList(this.arrayFilters); + } - modifierOps.put(operator, new Document(key, value)); - this.keysToUpdate.add(key); + @Override + public boolean hasArrayFilters() { + return !this.arrayFilters.isEmpty(); } - protected void addMultiFieldOperation(String operator, String key, Object value) { + protected void addMultiFieldOperation(String operator, String key, @Nullable Object value) { - Assert.hasText(key, "Key/Path for update must not be null or blank."); + Assert.hasText(key, "Key/Path for update must not be null or blank"); Object existingValue = this.modifierOps.get(operator); Document keyValueMap; if (existingValue == null) { keyValueMap = new Document(); this.modifierOps.put(operator, keyValueMap); + } else if (existingValue instanceof Document document) { + keyValueMap = document; } else { - if (existingValue instanceof Document) { - keyValueMap = (Document) existingValue; - } else { - throw new InvalidDataAccessApiUsageException( - "Modifier Operations should be a LinkedHashMap but was " + existingValue.getClass()); - } + throw new InvalidDataAccessApiUsageException( + "Modifier Operations should be a LinkedHashMap but was " + existingValue.getClass()); } keyValueMap.put(key, value); @@ -444,8 +461,8 @@ protected void addMultiFieldOperation(String operator, String key, Object value) /** * Determine if a given {@code key} will be touched on execution. * - * @param key - * @return + * @param key the field name. + * @return {@literal true} if given field is updated. */ public boolean modifies(String key) { return this.keysToUpdate.contains(key); @@ -454,28 +471,20 @@ public boolean modifies(String key) { /** * Inspects given {@code key} for '$'. * - * @param key - * @return + * @param key the field name. + * @return {@literal true} if given key is prefixed. */ private static boolean isKeyword(String key) { return StringUtils.startsWithIgnoreCase(key, "$"); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return Objects.hash(getUpdateObject(), isolated); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -493,10 +502,6 @@ public boolean equals(Object obj) { return Objects.equals(this.getUpdateObject(), that.getUpdateObject()); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { @@ -539,21 +544,13 @@ public boolean isEmpty() { return modifiers.isEmpty(); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return Objects.hashCode(modifiers); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -608,21 +605,13 @@ default String toJsonString() { */ private static abstract class AbstractModifier implements Modifier { - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ObjectUtils.nullSafeHashCode(getKey()) + ObjectUtils.nullSafeHashCode(getValue()); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object that) { + public boolean equals(@Nullable Object that) { if (this == that) { return true; @@ -639,10 +628,6 @@ public boolean equals(Object that) { return Objects.deepEquals(getValue(), ((Modifier) that).getValue()); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return toJsonString(); @@ -669,26 +654,18 @@ private Object[] extractValues(Object[] values) { return values; } - if (values.length == 1 && values[0] instanceof Collection) { - return ((Collection) values[0]).toArray(); + if (values.length == 1 && values[0] instanceof Collection collection) { + return collection.toArray(); } return Arrays.copyOf(values, values.length); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getKey() - */ @Override public String getKey() { return "$each"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getValue() - */ @Override public Object getValue() { return this.values; @@ -734,19 +711,11 @@ private static class Slice extends AbstractModifier { this.count = count; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getKey() - */ @Override public String getKey() { return "$slice"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getValue() - */ @Override public Object getValue() { return this.count; @@ -771,7 +740,7 @@ private static class SortModifier extends AbstractModifier { */ SortModifier(Direction direction) { - Assert.notNull(direction, "Direction must not be null!"); + Assert.notNull(direction, "Direction must not be null"); this.sort = direction.isAscending() ? 1 : -1; } @@ -782,32 +751,24 @@ private static class SortModifier extends AbstractModifier { */ SortModifier(Sort sort) { - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(sort, "Sort must not be null"); for (Order order : sort) { if (order.isIgnoreCase()) { - throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case! " - + "MongoDB does not support sorting ignoring case currently!", order.getProperty())); + throw new IllegalArgumentException(String.format("Given sort contained an Order for %s with ignore case;" + + " MongoDB does not support sorting ignoring case currently", order.getProperty())); } } this.sort = sort; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getKey() - */ @Override public String getKey() { return "$sort"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.query.Update.Modifier#getValue() - */ @Override public Object getValue() { return this.sort; @@ -870,7 +831,7 @@ public PushOperatorBuilder slice(int count) { */ public PushOperatorBuilder sort(Direction direction) { - Assert.notNull(direction, "Direction must not be null."); + Assert.notNull(direction, "Direction must not be null"); this.modifiers.addModifier(new SortModifier(direction)); return this; } @@ -885,7 +846,7 @@ public PushOperatorBuilder sort(Direction direction) { */ public PushOperatorBuilder sort(Sort sort) { - Assert.notNull(sort, "Sort must not be null."); + Assert.notNull(sort, "Sort must not be null"); this.modifiers.addModifier(new SortModifier(sort)); return this; } @@ -893,18 +854,14 @@ public PushOperatorBuilder sort(Sort sort) { /** * Forces values to be added at the given {@literal position}. * - * @param position needs to be greater than or equal to zero. + * @param position the position offset. As of MongoDB 3.6 use a negative value to indicate starting from the end, + * counting (but not including) the last element of the array. * @return never {@literal null}. * @since 1.7 */ public PushOperatorBuilder atPosition(int position) { - if (position < 0) { - throw new IllegalArgumentException("Position must be greater than or equal to zero."); - } - this.modifiers.addModifier(new PositionModifier(position)); - return this; } @@ -942,21 +899,13 @@ public Update value(Object value) { return Update.this.push(key, this.modifiers); } - /* - * (non-Javadoc) - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return Objects.hash(getOuterType(), key, modifiers); } - /* - * (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -997,8 +946,8 @@ public AddToSetBuilder(String key) { /** * Propagates {@code $each} to {@code $addToSet} * - * @param values - * @return + * @param values must not be {@literal null}. + * @return never {@literal null}. */ public Update each(Object... values) { return Update.this.addToSet(this.key, new Each(values)); @@ -1007,8 +956,8 @@ public Update each(Object... values) { /** * Propagates {@link #value(Object)} to {@code $addToSet} * - * @param values - * @return + * @param value + * @return never {@literal null}. */ public Update value(Object value) { return Update.this.addToSet(this.key, value); @@ -1031,7 +980,7 @@ private enum BitwiseOperator { @Override public String toString() { return super.toString().toLowerCase(); - }; + } } /** @@ -1042,8 +991,8 @@ public String toString() { */ protected BitwiseOperatorBuilder(Update reference, String key) { - Assert.notNull(reference, "Reference must not be null!"); - Assert.notNull(key, "Key must not be null!"); + Assert.notNull(reference, "Reference must not be null"); + Assert.notNull(key, "Key must not be null"); this.reference = reference; this.key = key; @@ -1053,7 +1002,7 @@ protected BitwiseOperatorBuilder(Update reference, String key) { * Updates to the result of a bitwise and operation between the current value and the given one. * * @param value - * @return + * @return never {@literal null}. */ public Update and(long value) { @@ -1065,7 +1014,7 @@ public Update and(long value) { * Updates to the result of a bitwise or operation between the current value and the given one. * * @param value - * @return + * @return never {@literal null}. */ public Update or(long value) { @@ -1077,7 +1026,7 @@ public Update or(long value) { * Updates to the result of a bitwise xor operation between the current value and the given one. * * @param value - * @return + * @return never {@literal null}. */ public Update xor(long value) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UpdateDefinition.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UpdateDefinition.java new file mode 100644 index 0000000000..5aafffeb82 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/UpdateDefinition.java @@ -0,0 +1,91 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import java.util.List; + +import org.bson.Document; + +/** + * Interface fixing must have operations for {@literal updates} as implemented via {@link Update}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +public interface UpdateDefinition { + + /** + * If {@literal true} prevents a write operation that affects multiple documents from yielding to + * other reads or writes once the first document is written.
          + * + * @return {@literal true} if update isolated is set. + */ + Boolean isIsolated(); + + /** + * @return the actual update in its native {@link Document} format. Never {@literal null}. + */ + Document getUpdateObject(); + + /** + * Check if a given {@literal key} is modified by applying the update. + * + * @param key must not be {@literal null}. + * @return {@literal true} if the actual {@link UpdateDefinition} attempts to modify the given {@literal key}. + */ + boolean modifies(String key); + + /** + * Increment the value of a given {@literal key} by {@code 1}. + * + * @param key must not be {@literal null}. + */ + void inc(String key); + + /** + * Get the specification which elements to modify in an array field. {@link ArrayFilter} are passed directly to the + * driver without further type or field mapping. + * + * @return never {@literal null}. + * @since 2.2 + */ + List getArrayFilters(); + + /** + * @return {@literal true} if {@link UpdateDefinition} contains {@link #getArrayFilters() array filters}. + * @since 2.2 + */ + default boolean hasArrayFilters() { + return !getArrayFilters().isEmpty(); + } + + /** + * A filter to specify which elements to modify in an array field. + * + * @since 2.2 + */ + interface ArrayFilter { + + /** + * Get the {@link Document} representation of the filter to apply. The returned {@link Document} is used directly + * with the driver without further type or field mapping. + * + * @return never {@literal null}. + */ + Document asDocument(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java index 07aed09173..b59c20c6b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,10 @@ */ package org.springframework.data.mongodb.core.schema; -import lombok.AllArgsConstructor; -import lombok.NonNull; - import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; /** * Value object representing a MongoDB-specific JSON schema which is the default {@link MongoJsonSchema} implementation. @@ -27,17 +27,44 @@ * @author Mark Paluch * @since 2.1 */ -@AllArgsConstructor class DefaultMongoJsonSchema implements MongoJsonSchema { - private final @NonNull JsonSchemaObject root; + private final JsonSchemaObject root; + + @Nullable // + private final Document encryptionMetadata; + + DefaultMongoJsonSchema(JsonSchemaObject root) { + this(root, null); + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#toDocument() + /** + * Create new instance of {@link DefaultMongoJsonSchema}. + * + * @param root the schema root element. + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 */ + DefaultMongoJsonSchema(JsonSchemaObject root, @Nullable Document encryptionMetadata) { + + Assert.notNull(root, "Root schema object must not be null"); + + this.root = root; + this.encryptionMetadata = encryptionMetadata; + } + @Override - public Document toDocument() { - return new Document("$jsonSchema", root.toDocument()); + public Document schemaDocument() { + + Document schemaDocument = new Document(); + + // we want this to be the first element rendered, so it reads nice when printed to json + if (!CollectionUtils.isEmpty(encryptionMetadata)) { + schemaDocument.append("encryptMetadata", encryptionMetadata); + } + + schemaDocument.putAll(root.toDocument()); + + return schemaDocument; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java index ee1299543e..0407bac272 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,8 @@ */ package org.springframework.data.mongodb.core.schema; -import lombok.AllArgsConstructor; -import lombok.NonNull; - import org.bson.Document; +import org.springframework.util.Assert; /** * JSON schema backed by a {@link org.bson.Document} object. @@ -26,17 +24,18 @@ * @author Mark Paluch * @since 2.1 */ -@AllArgsConstructor class DocumentJsonSchema implements MongoJsonSchema { - private final @NonNull Document document; + private final Document document; + + DocumentJsonSchema(Document document) { + + Assert.notNull(document, "Document must not be null"); + this.document = document; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#toDocument() - */ @Override - public Document toDocument() { - return new Document("$jsonSchema", new Document(document)); + public Document schemaDocument() { + return new Document(document); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java index 2d38105a98..26dbd7dffb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,13 @@ import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashSet; +import java.util.List; import java.util.Set; +import java.util.UUID; import org.bson.Document; import org.springframework.data.domain.Range; +import org.springframework.data.mongodb.core.EncryptionAlgorithms; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ArrayJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.BooleanJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.DateJsonSchemaObject; @@ -30,7 +33,10 @@ import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.StringJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.TimestampJsonSchemaObject; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * {@link JsonSchemaProperty} implementation. @@ -52,35 +58,23 @@ public class IdentifiableJsonSchemaProperty implemen */ IdentifiableJsonSchemaProperty(String identifier, T jsonSchemaObject) { - Assert.notNull(identifier, "Identifier must not be null!"); - Assert.notNull(jsonSchemaObject, "JsonSchemaObject must not be null!"); + Assert.notNull(identifier, "Identifier must not be null"); + Assert.notNull(jsonSchemaObject, "JsonSchemaObject must not be null"); this.identifier = identifier; this.jsonSchemaObjectDelegate = jsonSchemaObject; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaProperty#getIdentifier() - */ @Override public String getIdentifier() { return identifier; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() - */ @Override public Document toDocument() { return new Document(identifier, jsonSchemaObjectDelegate.toDocument()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#getTypes() - */ @Override public Set getTypes() { return jsonSchemaObjectDelegate.getTypes(); @@ -360,7 +354,6 @@ public static class ObjectJsonSchemaProperty extends IdentifiableJsonSchemaPrope /** * @param range must not be {@literal null}. * @return new instance of {@link ObjectJsonSchemaProperty}. - * @see ObjectJsonSchemaObject#propertiesCount */ public ObjectJsonSchemaProperty propertiesCount(Range range) { return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.propertiesCount(range)); @@ -519,6 +512,10 @@ public ObjectJsonSchemaProperty description(String description) { public ObjectJsonSchemaProperty generatedDescription() { return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); } + + public List getProperties() { + return jsonSchemaObjectDelegate.getProperties(); + } } /** @@ -953,7 +950,7 @@ public DateJsonSchemaProperty description(String description) { /** * @return new instance of {@link DateJsonSchemaProperty}. - * @see DateJsonSchemaProperty#generateDescription() + * @see DateJsonSchemaProperty#generatedDescription() */ public DateJsonSchemaProperty generatedDescription() { return new DateJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); @@ -983,10 +980,283 @@ public TimestampJsonSchemaProperty description(String description) { /** * @return new instance of {@link TimestampJsonSchemaProperty}. - * @see TimestampJsonSchemaProperty#generateDescription() + * @see TimestampJsonSchemaProperty#generatedDescription() */ public TimestampJsonSchemaProperty generatedDescription() { return new TimestampJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); } } + + /** + * Delegating {@link JsonSchemaProperty} implementation having a {@literal required} flag for evaluation during schema + * creation process. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class RequiredJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty delegate; + private final boolean required; + + RequiredJsonSchemaProperty(JsonSchemaProperty delegate, boolean required) { + + this.delegate = delegate; + this.required = required; + } + + @Override + public String getIdentifier() { + return delegate.getIdentifier(); + } + + @Override + public Set getTypes() { + return delegate.getTypes(); + } + + @Override + public Document toDocument() { + return delegate.toDocument(); + } + + @Override + public boolean isRequired() { + return required; + } + } + + /** + * {@link JsonSchemaProperty} implementation for encrypted fields. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class EncryptedJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty targetProperty; + private final @Nullable String algorithm; + private final @Nullable Object keyId; + private final @Nullable List keyIds; + + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} wrapping the given {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + */ + public EncryptedJsonSchemaProperty(JsonSchemaProperty target) { + this(target, null, null, null); + } + + private EncryptedJsonSchemaProperty(JsonSchemaProperty target, @Nullable String algorithm, @Nullable Object keyId, + @Nullable List keyIds) { + + Assert.notNull(target, "Target must not be null"); + this.targetProperty = target; + this.algorithm = algorithm; + this.keyId = keyId; + this.keyIds = keyIds; + } + + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} wrapping the given {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public static EncryptedJsonSchemaProperty encrypted(JsonSchemaProperty target) { + return new EncryptedJsonSchemaProperty(target); + } + + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} with {@literal Range} encryption, wrapping the given + * {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 4.5 + */ + public static EncryptedJsonSchemaProperty rangeEncrypted(JsonSchemaProperty target) { + return new EncryptedJsonSchemaProperty(target).algorithm(EncryptionAlgorithms.RANGE); + } + + /** + * Use {@literal AEAD_AES_256_CBC_HMAC_SHA_512-Random} algorithm. + * + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_random() { + return algorithm(EncryptionAlgorithms.AEAD_AES_256_CBC_HMAC_SHA_512_Random); + } + + /** + * Use {@literal AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic} algorithm. + * + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_deterministic() { + return algorithm(EncryptionAlgorithms.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic); + } + + /** + * Use the given algorithm identified via its name. + * + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty algorithm(String algorithm) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, keyIds); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keyId(String keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, null); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 4.5 + */ + public EncryptedJsonSchemaProperty keyId(Object keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, null); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keys(UUID... keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); + } + + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keys(Object... keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); + } + + @Override + public Document toDocument() { + + Document doc = targetProperty.toDocument(); + Document propertySpecification = doc.get(targetProperty.getIdentifier(), Document.class); + + Document enc = new Document(); + + if (!ObjectUtils.isEmpty(keyId)) { + enc.append("keyId", keyId); + } else if (!ObjectUtils.isEmpty(keyIds)) { + enc.append("keyId", keyIds); + } + + Type type = extractPropertyType(propertySpecification); + if (type != null) { + + propertySpecification.remove(type.representation()); + enc.append("bsonType", type.toBsonType().value()); // TODO: no samples with type -> is it bson type all the way? + } + + if (StringUtils.hasText(algorithm)) { + enc.append("algorithm", algorithm); + } + + propertySpecification.append("encrypt", enc); + + return doc; + } + + @Override + public String getIdentifier() { + return targetProperty.getIdentifier(); + } + + @Override + public Set getTypes() { + return targetProperty.getTypes(); + } + + @Nullable + private Type extractPropertyType(Document source) { + + if (source.containsKey("type")) { + return Type.of(source.get("type", String.class)); + } + if (source.containsKey("bsonType")) { + return Type.of(source.get("bsonType", String.class)); + } + + return null; + } + + public Object getKeyId() { + if (keyId != null) { + return keyId; + } + if (keyIds != null && keyIds.size() == 1) { + return keyIds.iterator().next(); + } + return null; + } + } + + /** + * {@link JsonSchemaProperty} implementation typically wrapping an {@link EncryptedJsonSchemaProperty encrypted + * property} to mark it as queryable. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class QueryableJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty targetProperty; + private final QueryCharacteristics characteristics; + + public QueryableJsonSchemaProperty(JsonSchemaProperty target, QueryCharacteristics characteristics) { + this.targetProperty = target; + this.characteristics = characteristics; + } + + @Override + public Document toDocument() { + + Document doc = targetProperty.toDocument(); + Document propertySpecification = doc.get(targetProperty.getIdentifier(), Document.class); + + if (propertySpecification.containsKey("encrypt")) { + Document encrypt = propertySpecification.get("encrypt", Document.class); + List queries = characteristics.getCharacteristics().stream().map(QueryCharacteristic::toDocument) + .toList(); + encrypt.append("queries", queries); + } + + return doc; + } + + @Override + public String getIdentifier() { + return targetProperty.getIdentifier(); + } + + @Override + public Set getTypes() { + return targetProperty.getTypes(); + } + + boolean isEncrypted() { + return targetProperty instanceof EncryptedJsonSchemaProperty; + } + + public JsonSchemaProperty getTargetProperty() { + return targetProperty; + } + + public QueryCharacteristics getCharacteristics() { + return characteristics; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java index 15164f1b5e..a84f361d37 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,21 @@ */ package org.springframework.data.mongodb.core.schema; -import lombok.RequiredArgsConstructor; - import java.math.BigDecimal; import java.util.Arrays; +import java.util.Collection; import java.util.Date; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import org.bson.BsonTimestamp; import org.bson.Document; +import org.bson.types.BSONTimestamp; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ArrayJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.BooleanJsonSchemaObject; @@ -37,11 +41,12 @@ import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.TimestampJsonSchemaObject; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; /** * Interface that can be implemented by objects that know how to serialize themselves to JSON schema using * {@link #toDocument()}. - *

          + *
          * This class also declares factory methods for type-specific {@link JsonSchemaObject schema objects} such as * {@link #string()} or {@link #object()}. For example: * @@ -177,7 +182,7 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.nullType()); } - if (type.isArray()) { + if (type.isArray() || ClassUtils.isAssignable(Collection.class, type)) { if (type.equals(byte[].class)) { return of(Type.binaryType()); @@ -186,6 +191,10 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.arrayType()); } + if (type.equals(Document.class) || ClassUtils.isAssignable(Map.class, type)) { + return of(Type.objectType()); + } + if (type.equals(Object.class)) { return of(Type.objectType()); } @@ -202,7 +211,19 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.dateType()); } - if (ClassUtils.isAssignable(BsonTimestamp.class, type)) { + if (ClassUtils.isAssignable(Binary.class, type)) { + return of(Type.binaryType()); + } + + if (ClassUtils.isAssignable(Code.class, type)) { + return of(Type.javascriptType()); + } + + if (ClassUtils.isAssignable(Decimal128.class, type)) { + return of(Type.bigDecimalType()); + } + + if (ClassUtils.isAssignable(BsonTimestamp.class, type) || ClassUtils.isAssignable(BSONTimestamp.class, type)) { return of(Type.timestampType()); } @@ -210,36 +231,41 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.regexType()); } - if (ClassUtils.isAssignable(Boolean.class, type)) { + if (ClassUtils.isAssignable(Enum.class, type)) { + return of(Type.stringType()); + } + + Class resolved = ClassUtils.resolvePrimitiveIfNecessary(type); + if (ClassUtils.isAssignable(Boolean.class, resolved)) { return of(Type.booleanType()); } - if (ClassUtils.isAssignable(Number.class, type)) { + if (ClassUtils.isAssignable(Number.class, resolved)) { - if (type.equals(Long.class)) { + if (resolved.equals(Long.class)) { return of(Type.longType()); } - if (type.equals(Float.class)) { + if (resolved.equals(Float.class)) { return of(Type.doubleType()); } - if (type.equals(Double.class)) { + if (resolved.equals(Double.class)) { return of(Type.doubleType()); } - if (type.equals(Integer.class)) { + if (resolved.equals(Integer.class)) { return of(Type.intType()); } - if (type.equals(BigDecimal.class)) { + if (resolved.equals(BigDecimal.class)) { return of(Type.bigDecimalType()); } return of(Type.numberType()); } - throw new IllegalArgumentException(String.format("No JSON schema type found for %s.", type)); + throw new IllegalArgumentException(String.format("No JSON schema type found for %s", type)); } /** @@ -401,6 +427,23 @@ static Type jsonTypeOf(String name) { return new JsonType(name); } + /** + * Create a {@link Type} with its default {@link Type#representation() representation} via the name. + * + * @param name must not be {@literal null}. + * @return the matching type instance. + * @since 2.2 + */ + static Type of(String name) { + + Type type = jsonTypeOf(name); + if (jsonTypes().contains(type)) { + return type; + } + + return bsonTypeOf(name); + } + /** * @return all known JSON types. */ @@ -429,60 +472,107 @@ static Set bsonTypes() { */ Object value(); + /** + * Get the {@literal bsonType} representation of the given type. + * + * @return never {@literal null}. + * @since 2.2 + */ + default Type toBsonType() { + + if (representation().equals("bsonType")) { + return this; + } + + if (value().equals(Type.booleanType().value())) { + return bsonTypeOf("bool"); + } + if (value().equals(Type.numberType().value())) { + return bsonTypeOf("long"); + } + + return bsonTypeOf((String) value()); + } + /** * @author Christpoh Strobl * @since 2.1 */ - @RequiredArgsConstructor class JsonType implements Type { private final String name; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type#representation() - */ + public JsonType(String name) { + this.name = name; + } + @Override public String representation() { return "type"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type#value() - */ @Override public String value() { return name; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + JsonType jsonType = (JsonType) o; + + return ObjectUtils.nullSafeEquals(name, jsonType.name); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(name); + } } /** * @author Christpoh Strobl * @since 2.1 */ - @RequiredArgsConstructor class BsonType implements Type { private final String name; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type#representation() - */ + BsonType(String name) { + this.name = name; + } + @Override public String representation() { return "bsonType"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type#value() - */ @Override public String value() { return name; } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + BsonType bsonType = (BsonType) o; + + return ObjectUtils.nullSafeEquals(name, bsonType.name); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(name); + } } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java index 44fb3c7dcc..a854c6184a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,24 @@ */ package org.springframework.data.mongodb.core.schema; -import lombok.AccessLevel; -import lombok.RequiredArgsConstructor; +import java.util.Collection; +import java.util.List; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.BooleanJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.DateJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NullJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NumericJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.RequiredJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.StringJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.TimestampJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.UntypedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NumericJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; /** * A {@literal property} or {@literal patternProperty} within a {@link JsonSchemaObject} of {@code type : 'object'}. @@ -47,6 +51,14 @@ public interface JsonSchemaProperty extends JsonSchemaObject { */ String getIdentifier(); + /** + * @return {@literal false} by default. + * @since 2.2 + */ + default boolean isRequired() { + return false; + } + /** * Creates a new {@link UntypedJsonSchemaProperty} with given {@literal identifier} without {@code type}. * @@ -58,6 +70,29 @@ static UntypedJsonSchemaProperty untyped(String identifier) { return new UntypedJsonSchemaProperty(identifier, JsonSchemaObject.untyped()); } + /** + * Turns the given target property into an {@link EncryptedJsonSchemaProperty ecrypted} one. + * + * @param property must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 2.2 + */ + static EncryptedJsonSchemaProperty encrypted(JsonSchemaProperty property) { + return EncryptedJsonSchemaProperty.encrypted(property); + } + + /** + * Turns the given target property into a {@link QueryableJsonSchemaProperty queryable} one, eg. for {@literal range} + * encrypted properties. + * + * @param property the queryable property. Must not be {@literal null}. + * @param queries predefined query characteristics. + * @since 4.5 + */ + static QueryableJsonSchemaProperty queryable(JsonSchemaProperty property, List queries) { + return new QueryableJsonSchemaProperty(property, new QueryCharacteristics(queries)); + } + /** * Creates a new {@link StringJsonSchemaProperty} with given {@literal identifier} of {@code type : 'string'}. * @@ -80,6 +115,18 @@ static ObjectJsonSchemaProperty object(String identifier) { return new ObjectJsonSchemaProperty(identifier, JsonSchemaObject.object()); } + /** + * Creates a new {@link JsonSchemaProperty} with given {@literal identifier} of {@code bsonType : 'objectId'}. + * + * @param identifier the {@literal property} name or {@literal patternProperty} regex. Must not be {@literal null} nor + * {@literal empty}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 2.2 + */ + static JsonSchemaProperty objectId(String identifier) { + return JsonSchemaProperty.named(identifier).ofType(Type.objectIdType()); + } + /** * Creates a new {@link NumericJsonSchemaProperty} with given {@literal identifier} of {@code type : 'number'}. * @@ -194,36 +241,72 @@ static TimestampJsonSchemaProperty timestamp(String identifier) { /** * Obtain a builder to create a {@link JsonSchemaProperty}. * - * @param identifier - * @return + * @param identifier must not be {@literal null}. + * @return new instance of {@link JsonSchemaPropertyBuilder}. */ static JsonSchemaPropertyBuilder named(String identifier) { return new JsonSchemaPropertyBuilder(identifier); } + /** + * Turns the given {@link JsonSchemaProperty} into a required on. + * + * @param property must not be {@literal null}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 2.2 + */ + static JsonSchemaProperty required(JsonSchemaProperty property) { + return new RequiredJsonSchemaProperty(property, true); + } + + /** + * Merges multiple {@link JsonSchemaProperty} with potentially different attributes into one. + * + * @param properties must not be {@literal null}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 3.4 + */ + static JsonSchemaProperty merged(Collection properties) { + return new MergedJsonSchemaProperty(properties); + } + /** * Builder for {@link IdentifiableJsonSchemaProperty}. */ - @RequiredArgsConstructor(access = AccessLevel.PACKAGE) class JsonSchemaPropertyBuilder { private final String identifier; + JsonSchemaPropertyBuilder(String identifier) { + this.identifier = identifier; + } + /** * Configure a {@link Type} for the property. * * @param type must not be {@literal null}. - * @return + * @return new instance of {@link IdentifiableJsonSchemaProperty}. */ public IdentifiableJsonSchemaProperty ofType(Type type) { return new IdentifiableJsonSchemaProperty<>(identifier, TypedJsonSchemaObject.of(type)); } + /** + * Configure the {@link Type} for the property by deriving it from the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return new instance of {@link IdentifiableJsonSchemaProperty}. + * @since 2.2 + */ + public IdentifiableJsonSchemaProperty ofType(@Nullable Class type) { + return new IdentifiableJsonSchemaProperty<>(identifier, JsonSchemaObject.of(type)); + } + /** * Configure a {@link TypedJsonSchemaObject} for the property. * * @param schemaObject must not be {@literal null}. - * @return + * @return new instance of {@link IdentifiableJsonSchemaProperty}. */ public IdentifiableJsonSchemaProperty with(TypedJsonSchemaObject schemaObject) { return new IdentifiableJsonSchemaProperty<>(identifier, schemaObject); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java new file mode 100644 index 0000000000..a6fc3ab8bd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java @@ -0,0 +1,68 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +import org.bson.Document; + +/** + * {@link MongoJsonSchema} implementation that is capable of merging properties from different schemas into a single + * one. + * + * @author Christoph Strobl + * @since 3.4 + */ +class MergedJsonSchema implements MongoJsonSchema { + + private final List schemaList; + private final BiFunction, Map, Document> mergeFunction; + + MergedJsonSchema(List schemaList, ConflictResolutionFunction conflictResolutionFunction) { + this(schemaList, new TypeUnifyingMergeFunction(conflictResolutionFunction)); + } + + MergedJsonSchema(List schemaList, + BiFunction, Map, Document> mergeFunction) { + + this.schemaList = new ArrayList<>(schemaList); + this.mergeFunction = mergeFunction; + } + + @Override + public MongoJsonSchema mergeWith(Collection sources) { + + schemaList.addAll(sources); + return this; + } + + @Override + public Document schemaDocument() { + + Document targetSchema = new Document(); + for (MongoJsonSchema schema : schemaList) { + targetSchema = mergeFunction.apply(targetSchema, schema.schemaDocument()); + } + + return targetSchema; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchemaProperty.java new file mode 100644 index 0000000000..856ab772ee --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchemaProperty.java @@ -0,0 +1,77 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction; + +/** + * {@link JsonSchemaProperty} implementation that is capable of combining multiple properties with different values into + * a single one. + * + * @author Christoph Strobl + * @since 3.4 + */ +class MergedJsonSchemaProperty implements JsonSchemaProperty { + + private final Iterable properties; + private final BiFunction, Map, Document> mergeFunction; + + MergedJsonSchemaProperty(Iterable properties) { + this(properties, (k, a, b) -> { + throw new IllegalStateException( + String.format("Error resolving conflict for '%s'; No conflict resolution function defined", k)); + }); + } + + MergedJsonSchemaProperty(Iterable properties, + ConflictResolutionFunction conflictResolutionFunction) { + this(properties, new TypeUnifyingMergeFunction(conflictResolutionFunction)); + } + + MergedJsonSchemaProperty(Iterable properties, + BiFunction, Map, Document> mergeFunction) { + + this.properties = properties; + this.mergeFunction = mergeFunction; + } + + @Override + public Set getTypes() { + return Collections.emptySet(); + } + + @Override + public Document toDocument() { + + Document document = new Document(); + + for (JsonSchemaProperty property : properties) { + document = mergeFunction.apply(document, property.toDocument()); + } + return document; + } + + @Override + public String getIdentifier() { + return properties.iterator().next().getIdentifier(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java index 86778bc143..f64218cc56 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,17 @@ */ package org.springframework.data.mongodb.core.schema; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; +import java.util.List; +import java.util.Map; import java.util.Set; import org.bson.Document; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** * Interface defining MongoDB-specific JSON schema object. New objects can be built with {@link #builder()}, for @@ -62,19 +68,31 @@ public interface MongoJsonSchema { /** - * Create the {@link Document} containing the specified {@code $jsonSchema}.
          + * Create the {@code $jsonSchema} {@link Document} containing the specified {@link #schemaDocument()}.
          * Property and field names need to be mapped to the domain type ones by running the {@link Document} through a * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. * * @return never {@literal null}. */ - Document toDocument(); + default Document toDocument() { + return new Document("$jsonSchema", schemaDocument()); + } + + /** + * Create the {@link Document} defining the schema.
          + * Property and field names need to be mapped to the domain type property by running the {@link Document} through a + * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. + * + * @return never {@literal null}. + * @since 3.3 + */ + Document schemaDocument(); /** * Create a new {@link MongoJsonSchema} for a given root object. * * @param root must not be {@literal null}. - * @return + * @return new instance of {@link MongoJsonSchema}. */ static MongoJsonSchema of(JsonSchemaObject root) { return new DefaultMongoJsonSchema(root); @@ -84,12 +102,78 @@ static MongoJsonSchema of(JsonSchemaObject root) { * Create a new {@link MongoJsonSchema} for a given root {@link Document} containing the schema definition. * * @param document must not be {@literal null}. - * @return + * @return new instance of {@link MongoJsonSchema}. */ static MongoJsonSchema of(Document document) { return new DocumentJsonSchema(document); } + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + static MongoJsonSchema merge(MongoJsonSchema... sources) { + return merge((path, left, right) -> { + throw new IllegalStateException(String.format("Cannot merge schema for path '%s' holding values '%s' and '%s'", + path.dotPath(), left, right)); + }, sources); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + static MongoJsonSchema merge(ConflictResolutionFunction mergeFunction, MongoJsonSchema... sources) { + return new MergedJsonSchema(Arrays.asList(sources), mergeFunction); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergeWith(MongoJsonSchema... sources) { + return mergeWith(Arrays.asList(sources)); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergeWith(Collection sources) { + return mergeWith(sources, (path, left, right) -> { + throw new IllegalStateException(String.format("Cannot merge schema for path '%s' holding values '%s' and '%s'", + path.dotPath(), left, right)); + }); + } + + /** + * Create a new {@link MongoJsonSchema} merging properties from the given sources. + * + * @param sources must not be {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergeWith(Collection sources, + ConflictResolutionFunction conflictResolutionFunction) { + + List schemaList = new ArrayList<>(sources.size() + 1); + schemaList.add(this); + schemaList.addAll(new ArrayList<>(sources)); + return new MergedJsonSchema(schemaList, conflictResolutionFunction); + } + /** * Obtain a new {@link MongoJsonSchemaBuilder} to fluently define the schema. * @@ -99,6 +183,125 @@ static MongoJsonSchemaBuilder builder() { return new MongoJsonSchemaBuilder(); } + /** + * A resolution function that is called on conflicting paths when trying to merge properties with different values + * into a single value. + * + * @author Christoph Strobl + * @since 3.4 + */ + @FunctionalInterface + interface ConflictResolutionFunction { + + /** + * Resolve the conflict for two values under the same {@code path}. + * + * @param path the {@link Path} leading to the conflict. + * @param left can be {@literal null}. + * @param right can be {@literal null}. + * @return never {@literal null}. + */ + Resolution resolveConflict(Path path, @Nullable Object left, @Nullable Object right); + + /** + * @author Christoph Strobl + * @since 3.4 + */ + interface Path { + + /** + * @return the name of the currently processed element + */ + String currentElement(); + + /** + * @return the path leading to the currently processed element in dot {@literal '.'} notation. + */ + String dotPath(); + } + + /** + * The result after processing a conflict when merging schemas. May indicate to {@link #SKIP skip} the entry + * entirely. + * + * @author Christoph Strobl + * @since 3.4 + */ + interface Resolution extends Map.Entry { + + @Override + default Object setValue(Object value) { + throw new IllegalStateException("Cannot set value result; Maybe you missed to override the method"); + } + + /** + * Resolution + */ + Resolution SKIP = new Resolution() { + + @Override + public String getKey() { + throw new IllegalStateException("No key for skipped result"); + } + + @Override + public Object getValue() { + throw new IllegalStateException("No value for skipped result"); + } + + @Override + public Object setValue(Object value) { + throw new IllegalStateException("Cannot set value on skipped result"); + } + }; + + /** + * Obtain a {@link Resolution} that will skip the entry and proceed computation. + * + * @return never {@literal null}. + */ + static Resolution skip() { + return SKIP; + } + + /** + * Construct a resolution for a {@link Path} using the given {@code value}. + * + * @param path the conflicting path. + * @param value the value to apply. + * @return + */ + static Resolution ofValue(Path path, Object value) { + + Assert.notNull(path, "Path must not be null"); + + return ofValue(path.currentElement(), value); + } + + /** + * Construct a resolution from a {@code key} and {@code value}. + * + * @param key name of the path segment, typically {@link Path#currentElement()} + * @param value the value to apply. + * @return + */ + static Resolution ofValue(String key, Object value) { + + return new Resolution() { + @Override + public String getKey() { + return key; + } + + @Override + public Object getValue() { + return value; + } + }; + } + } + } + /** * {@link MongoJsonSchemaBuilder} provides a fluent API for defining a {@link MongoJsonSchema}. * @@ -108,6 +311,9 @@ class MongoJsonSchemaBuilder { private ObjectJsonSchemaObject root; + @Nullable // + private Document encryptionMetadata; + MongoJsonSchemaBuilder() { root = new ObjectJsonSchemaObject(); } @@ -135,7 +341,7 @@ public MongoJsonSchemaBuilder maxProperties(int count) { } /** - * @param properties + * @param properties must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see ObjectJsonSchemaObject#required(String...) */ @@ -157,7 +363,7 @@ public MongoJsonSchemaBuilder additionalProperties(boolean additionalPropertiesA } /** - * @param schema + * @param schema must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see ObjectJsonSchemaObject#additionalProperties(ObjectJsonSchemaObject) */ @@ -168,7 +374,7 @@ public MongoJsonSchemaBuilder additionalProperties(ObjectJsonSchemaObject schema } /** - * @param properties + * @param properties must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see ObjectJsonSchemaObject#properties(JsonSchemaProperty...) */ @@ -179,7 +385,7 @@ public MongoJsonSchemaBuilder properties(JsonSchemaProperty... properties) { } /** - * @param properties + * @param properties must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see ObjectJsonSchemaObject#patternProperties(JsonSchemaProperty...) */ @@ -190,7 +396,7 @@ public MongoJsonSchemaBuilder patternProperties(JsonSchemaProperty... properties } /** - * @param property + * @param property must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see ObjectJsonSchemaObject#property(JsonSchemaProperty) */ @@ -201,7 +407,7 @@ public MongoJsonSchemaBuilder property(JsonSchemaProperty property) { } /** - * @param possibleValues + * @param possibleValues must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see ObjectJsonSchemaObject#possibleValues(Collection) */ @@ -212,7 +418,7 @@ public MongoJsonSchemaBuilder possibleValues(Set possibleValues) { } /** - * @param allOf + * @param allOf must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see UntypedJsonSchemaObject#allOf(Collection) */ @@ -223,7 +429,7 @@ public MongoJsonSchemaBuilder allOf(Set allOf) { } /** - * @param anyOf + * @param anyOf must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see UntypedJsonSchemaObject#anyOf(Collection) */ @@ -234,7 +440,7 @@ public MongoJsonSchemaBuilder anyOf(Set anyOf) { } /** - * @param oneOf + * @param oneOf must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see UntypedJsonSchemaObject#oneOf(Collection) */ @@ -245,7 +451,7 @@ public MongoJsonSchemaBuilder oneOf(Set oneOf) { } /** - * @param notMatch + * @param notMatch must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see UntypedJsonSchemaObject#notMatch(JsonSchemaObject) */ @@ -256,7 +462,7 @@ public MongoJsonSchemaBuilder notMatch(JsonSchemaObject notMatch) { } /** - * @param description + * @param description must not be {@literal null}. * @return {@code this} {@link MongoJsonSchemaBuilder}. * @see UntypedJsonSchemaObject#description(String) */ @@ -266,13 +472,23 @@ public MongoJsonSchemaBuilder description(String description) { return this; } + /** + * Define the {@literal encryptMetadata} element of the schema. + * + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + public void encryptionMetadata(@Nullable Document encryptionMetadata) { + this.encryptionMetadata = encryptionMetadata; + } + /** * Obtain the {@link MongoJsonSchema}. * * @return new instance of {@link MongoJsonSchema}. */ public MongoJsonSchema build() { - return MongoJsonSchema.of(root); + return new DefaultMongoJsonSchema(root, encryptionMetadata); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java new file mode 100644 index 0000000000..8604ba9d6c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java @@ -0,0 +1,40 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import org.bson.Document; + +/** + * Defines the specific character of a query that can be executed. Mainly used to define the characteristic of queryable + * encrypted fields. + * + * @author Christoph Strobl + * @since 4.5 + */ +public interface QueryCharacteristic { + + /** + * @return the query type, eg. {@literal range}. + */ + String queryType(); + + /** + * @return the raw {@link Document} representation of the instance. + */ + default Document toDocument() { + return new Document("queryType", queryType()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java new file mode 100644 index 0000000000..4ec775c5e7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java @@ -0,0 +1,263 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.lang.Nullable; + +/** + * Encapsulation of individual {@link QueryCharacteristic query characteristics} used to define queries that can be + * executed when using queryable encryption. + * + * @author Christoph Strobl + * @since 4.5 + */ +public class QueryCharacteristics implements Iterable { + + /** + * instance indicating none + */ + private static final QueryCharacteristics NONE = new QueryCharacteristics(Collections.emptyList()); + + private final List characteristics; + + QueryCharacteristics(List characteristics) { + this.characteristics = characteristics; + } + + /** + * @return marker instance indicating no characteristics have been defined. + */ + public static QueryCharacteristics none() { + return NONE; + } + + /** + * Create new {@link QueryCharacteristics} from given list of {@link QueryCharacteristic characteristics}. + * + * @param characteristics must not be {@literal null}. + * @return new instance of {@link QueryCharacteristics}. + */ + public static QueryCharacteristics of(List characteristics) { + return new QueryCharacteristics(List.copyOf(characteristics)); + } + + /** + * Create new {@link QueryCharacteristics} from given {@link QueryCharacteristic characteristics}. + * + * @param characteristics must not be {@literal null}. + * @return new instance of {@link QueryCharacteristics}. + */ + public static QueryCharacteristics of(QueryCharacteristic... characteristics) { + return new QueryCharacteristics(Arrays.asList(characteristics)); + } + + /** + * @return the list of {@link QueryCharacteristic characteristics}. + */ + public List getCharacteristics() { + return characteristics; + } + + @Override + public Iterator iterator() { + return this.characteristics.iterator(); + } + + /** + * Create a new {@link RangeQuery range query characteristic} used to define range queries against an encrypted field. + * + * @param targeted field type + * @return new instance of {@link RangeQuery}. + */ + public static RangeQuery range() { + return new RangeQuery<>(); + } + + /** + * Create a new {@link EqualityQuery equality query characteristic} used to define equality queries against an + * encrypted field. + * + * @param targeted field type + * @return new instance of {@link EqualityQuery}. + */ + public static EqualityQuery equality() { + return new EqualityQuery<>(null); + } + + /** + * {@link QueryCharacteristic} for equality comparison. + * + * @param + * @since 4.5 + */ + public static class EqualityQuery implements QueryCharacteristic { + + private final @Nullable Long contention; + + /** + * Create new instance of {@link EqualityQuery}. + * + * @param contention can be {@literal null}. + */ + public EqualityQuery(@Nullable Long contention) { + this.contention = contention; + } + + /** + * @param contention concurrent counter partition factor. + * @return new instance of {@link EqualityQuery}. + */ + public EqualityQuery contention(long contention) { + return new EqualityQuery<>(contention); + } + + @Override + public String queryType() { + return "equality"; + } + + @Override + public Document toDocument() { + return QueryCharacteristic.super.toDocument().append("contention", contention); + } + } + + /** + * {@link QueryCharacteristic} for range comparison. + * + * @param + * @since 4.5 + */ + public static class RangeQuery implements QueryCharacteristic { + + private final @Nullable Range valueRange; + private final @Nullable Integer trimFactor; + private final @Nullable Long sparsity; + private final @Nullable Long precision; + private final @Nullable Long contention; + + private RangeQuery() { + this(Range.unbounded(), null, null, null, null); + } + + /** + * Create new instance of {@link RangeQuery}. + * + * @param valueRange + * @param trimFactor + * @param sparsity + * @param contention + */ + public RangeQuery(@Nullable Range valueRange, @Nullable Integer trimFactor, @Nullable Long sparsity, + @Nullable Long precision, @Nullable Long contention) { + this.valueRange = valueRange; + this.trimFactor = trimFactor; + this.sparsity = sparsity; + this.precision = precision; + this.contention = contention; + } + + /** + * @param lower the lower value range boundary for the queryable field. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery min(T lower) { + + Range range = Range.of(Bound.inclusive(lower), + valueRange != null ? valueRange.getUpperBound() : Bound.unbounded()); + return new RangeQuery<>(range, trimFactor, sparsity, precision, contention); + } + + /** + * @param upper the upper value range boundary for the queryable field. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery max(T upper) { + + Range range = Range.of(valueRange != null ? valueRange.getLowerBound() : Bound.unbounded(), + Bound.inclusive(upper)); + return new RangeQuery<>(range, trimFactor, sparsity, precision, contention); + } + + /** + * @param trimFactor value to control the throughput of concurrent inserts and updates. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery trimFactor(int trimFactor) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param sparsity value to control the value density within the index. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery sparsity(long sparsity) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param contention concurrent counter partition factor. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery contention(long contention) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param precision digits considered comparing floating point numbers. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery precision(long precision) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + @Override + public String queryType() { + return "range"; + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument() { + + Document target = QueryCharacteristic.super.toDocument(); + if (contention != null) { + target.append("contention", contention); + } + if (trimFactor != null) { + target.append("trimFactor", trimFactor); + } + if (valueRange != null) { + target.append("min", valueRange.getLowerBound().getValue().orElse((T) BsonNull.VALUE)).append("max", + valueRange.getUpperBound().getValue().orElse((T) BsonNull.VALUE)); + } + if (sparsity != null) { + target.append("sparsity", sparsity); + } + + return target; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunction.java new file mode 100644 index 0000000000..95f116619f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunction.java @@ -0,0 +1,172 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Path; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Resolution; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Merge function considering BSON type hints. Conflicts are resolved through a {@link ConflictResolutionFunction}. + * + * @author Christoph Strobl + * @since 3.4 + */ +class TypeUnifyingMergeFunction implements BiFunction, Map, Document> { + + private final ConflictResolutionFunction conflictResolutionFunction; + + public TypeUnifyingMergeFunction(ConflictResolutionFunction conflictResolutionFunction) { + this.conflictResolutionFunction = conflictResolutionFunction; + } + + @Override + public Document apply(Map left, Map right) { + return merge(SimplePath.root(), left, right); + } + + @SuppressWarnings("unchecked") + Document merge(SimplePath path, Map left, Map right) { + + Document target = new Document(left); + + for (String key : right.keySet()) { + + SimplePath currentPath = path.append(key); + if (isTypeKey(key)) { + + Object unifiedExistingType = getUnifiedExistingType(key, target); + + if (unifiedExistingType != null) { + if (!ObjectUtils.nullSafeEquals(unifiedExistingType, right.get(key))) { + resolveConflict(currentPath, left, right, target); + } + continue; + } + } + + if (!target.containsKey(key)) { + target.put(key, right.get(key)); + continue; + } + + Object existingEntry = target.get(key); + Object newEntry = right.get(key); + if (existingEntry instanceof Map && newEntry instanceof Map) { + target.put(key, merge(currentPath, (Map) existingEntry, (Map) newEntry)); + } else if (!ObjectUtils.nullSafeEquals(existingEntry, newEntry)) { + resolveConflict(currentPath, left, right, target); + } + } + + return target; + } + + private void resolveConflict(Path path, Map left, Map right, Document target) { + applyConflictResolution(path, target, conflictResolutionFunction.resolveConflict(path, left, right)); + } + + private void applyConflictResolution(Path path, Document target, Resolution resolution) { + + if (Resolution.SKIP.equals(resolution) || resolution.getValue() == null) { + target.remove(path.currentElement()); + return; + } + + if (isTypeKey(resolution.getKey())) { + target.put(getTypeKeyToUse(resolution.getKey(), target), resolution.getValue()); + } else { + target.put(resolution.getKey(), resolution.getValue()); + } + } + + private static boolean isTypeKey(String key) { + return "bsonType".equals(key) || "type".equals(key); + } + + private static String getTypeKeyToUse(String key, Document source) { + + if ("bsonType".equals(key) && source.containsKey("type")) { + return "type"; + } + if ("type".equals(key) && source.containsKey("bsonType")) { + return "bsonType"; + } + return key; + } + + @Nullable + private static Object getUnifiedExistingType(String key, Document source) { + return source.get(getTypeKeyToUse(key, source)); + } + + /** + * Trivial {@link List} based {@link Path} implementation. + * + * @author Christoph Strobl + * @since 3.4 + */ + static class SimplePath implements Path { + + private final List path; + + SimplePath(List path) { + this.path = path; + } + + static SimplePath root() { + return new SimplePath(Collections.emptyList()); + } + + static SimplePath of(List path, String next) { + + List fullPath = new ArrayList<>(path.size() + 1); + fullPath.addAll(path); + fullPath.add(next); + return new SimplePath(fullPath); + } + + public SimplePath append(String next) { + return of(this.path, next); + } + + @Override + public String currentElement() { + return CollectionUtils.lastElement(path); + } + + @Override + public String dotPath() { + return StringUtils.collectionToDelimitedString(path, "."); + } + + @Override + public String toString() { + return dotPath(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java index ebf57187a4..abf8b0b8a2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Optional; @@ -41,6 +40,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Michał Kurcius * @since 2.1 */ public class TypedJsonSchemaObject extends UntypedJsonSchemaObject { @@ -69,7 +69,7 @@ public class TypedJsonSchemaObject extends UntypedJsonSchemaObject { super(restrictions, description, generateDescription); - Assert.notNull(types, "Types must not be null! Please consider using 'Collections.emptySet()'."); + Assert.notNull(types, "Types must not be null Please consider using 'Collections.emptySet()'"); this.types = types; } @@ -82,16 +82,12 @@ public class TypedJsonSchemaObject extends UntypedJsonSchemaObject { */ public static TypedJsonSchemaObject of(Type... types) { - Assert.notNull(types, "Types must not be null!"); - Assert.noNullElements(types, "Types must not contain null!"); + Assert.notNull(types, "Types must not be null"); + Assert.noNullElements(types, "Types must not contain null"); return new TypedJsonSchemaObject(new LinkedHashSet<>(Arrays.asList(types)), null, false, Restrictions.empty()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#getTypes() - */ @Override public Set getTypes() { return types; @@ -111,7 +107,6 @@ public TypedJsonSchemaObject description(String description) { /** * Auto generate the {@literal description} if not explicitly set. * - * @param description must not be {@literal null}. * @return new instance of {@link TypedJsonSchemaObject}. */ @Override @@ -167,7 +162,7 @@ public TypedJsonSchemaObject oneOf(Collection oneOf) { /** * The field value must not match the specified schemas. * - * @param oneOf must not be {@literal null}. + * @param notMatch must not be {@literal null}. * @return new instance of {@link TypedJsonSchemaObject}. */ @Override @@ -374,79 +369,52 @@ public ObjectJsonSchemaObject property(JsonSchemaProperty property) { return properties(property); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public ObjectJsonSchemaObject possibleValues(Collection possibleValues) { return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public ObjectJsonSchemaObject allOf(Collection allOf) { return newInstance(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public ObjectJsonSchemaObject anyOf(Collection anyOf) { return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public ObjectJsonSchemaObject oneOf(Collection oneOf) { return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public ObjectJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#description(java.lang.String) - */ @Override public ObjectJsonSchemaObject description(String description) { return newInstance(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.UntypedJsonSchemaObject#generatedDescription() - */ @Override public ObjectJsonSchemaObject generatedDescription() { return newInstance(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() - */ + public List getProperties() { + return properties; + } + @Override public Document toDocument() { Document doc = new Document(super.toDocument()); - if (!CollectionUtils.isEmpty(requiredProperties)) { - doc.append("required", requiredProperties); + Collection allRequiredProperties = requiredProperties(); + if (!CollectionUtils.isEmpty(allRequiredProperties)) { + doc.append("required", new ArrayList<>(allRequiredProperties)); } if (propertiesCount != null) { @@ -465,12 +433,21 @@ public Document toDocument() { if (additionalProperties != null) { - doc.append("additionalProperties", additionalProperties instanceof JsonSchemaObject - ? ((JsonSchemaObject) additionalProperties).toDocument() : additionalProperties); + doc.append("additionalProperties", + additionalProperties instanceof JsonSchemaObject schemaObject ? schemaObject.toDocument() + : additionalProperties); } return doc; } + private Collection requiredProperties() { + + Set target = new LinkedHashSet<>(); + target.addAll(requiredProperties); + properties.stream().filter(JsonSchemaProperty::isRequired).forEach(it -> target.add(it.getIdentifier())); + return target; + } + private ObjectJsonSchemaObject newInstance(@Nullable String description, boolean generateDescription, Restrictions restrictions) { @@ -492,10 +469,6 @@ private Document reduceToDocument(Collection source) { .collect(Document::new, Document::putAll, (target, propertyDocument) -> {}); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { @@ -514,8 +487,8 @@ protected String generateDescription() { StringUtils.collectionToDelimitedString(requiredProperties, ", ")); } } - if (additionalProperties instanceof Boolean) { - description += (((Boolean) additionalProperties) ? " " : " not ") + "allowing additional properties"; + if (additionalProperties instanceof Boolean booleanValue) { + description += (booleanValue ? " " : " not ") + "allowing additional properties"; } if (!CollectionUtils.isEmpty(properties)) { @@ -543,8 +516,7 @@ protected String generateDescription() { */ public static class NumericJsonSchemaObject extends TypedJsonSchemaObject { - private static final Set NUMERIC_TYPES = new HashSet<>( - Arrays.asList(Type.doubleType(), Type.intType(), Type.longType(), Type.numberType(), Type.bigDecimalType())); + private static final Set NUMERIC_TYPES = Set.of(Type.doubleType(), Type.intType(), Type.longType(), Type.numberType(), Type.bigDecimalType()); @Nullable Number multipleOf; @Nullable Range range; @@ -573,9 +545,9 @@ private NumericJsonSchemaObject(Set types, @Nullable String description, b * @param value must not be {@literal null}. * @return must not be {@literal null}. */ - NumericJsonSchemaObject multipleOf(Number value) { + public NumericJsonSchemaObject multipleOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); NumericJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); newInstance.multipleOf = value; @@ -591,7 +563,7 @@ NumericJsonSchemaObject multipleOf(Number value) { */ public NumericJsonSchemaObject within(Range range) { - Assert.notNull(range, "Range must not be null!"); + Assert.notNull(range, "Range must not be null"); NumericJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); newInstance.range = range; @@ -608,7 +580,7 @@ public NumericJsonSchemaObject within(Range range) { @SuppressWarnings("unchecked") public NumericJsonSchemaObject gt(Number min) { - Assert.notNull(min, "Min must not be null!"); + Assert.notNull(min, "Min must not be null"); Bound upper = this.range != null ? this.range.getUpperBound() : Bound.unbounded(); return within(Range.of(createBound(min, false), upper)); @@ -623,7 +595,7 @@ public NumericJsonSchemaObject gt(Number min) { @SuppressWarnings("unchecked") public NumericJsonSchemaObject gte(Number min) { - Assert.notNull(min, "Min must not be null!"); + Assert.notNull(min, "Min must not be null"); Bound upper = this.range != null ? this.range.getUpperBound() : Bound.unbounded(); return within(Range.of(createBound(min, true), upper)); @@ -638,7 +610,7 @@ public NumericJsonSchemaObject gte(Number min) { @SuppressWarnings("unchecked") public NumericJsonSchemaObject lt(Number max) { - Assert.notNull(max, "Max must not be null!"); + Assert.notNull(max, "Max must not be null"); Bound lower = this.range != null ? this.range.getLowerBound() : Bound.unbounded(); return within(Range.of(lower, createBound(max, false))); @@ -651,81 +623,49 @@ public NumericJsonSchemaObject lt(Number max) { * @return new instance of {@link NumericJsonSchemaObject}. */ @SuppressWarnings("unchecked") - NumericJsonSchemaObject lte(Number max) { + public NumericJsonSchemaObject lte(Number max) { - Assert.notNull(max, "Max must not be null!"); + Assert.notNull(max, "Max must not be null"); Bound lower = this.range != null ? this.range.getLowerBound() : Bound.unbounded(); return within(Range.of(lower, createBound(max, true))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public NumericJsonSchemaObject possibleValues(Collection possibleValues) { return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public NumericJsonSchemaObject allOf(Collection allOf) { return newInstance(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public NumericJsonSchemaObject anyOf(Collection anyOf) { return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public NumericJsonSchemaObject oneOf(Collection oneOf) { return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public NumericJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public NumericJsonSchemaObject description(String description) { return newInstance(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public NumericJsonSchemaObject generatedDescription() { return newInstance(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() - */ @Override public Document toDocument() { @@ -772,39 +712,35 @@ private NumericJsonSchemaObject newInstance(@Nullable String description, boolea private static Bound createBound(Number number, boolean inclusive) { - if (number instanceof Long) { - return inclusive ? Bound.inclusive((Long) number) : Bound.exclusive((Long) number); + if (number instanceof Long longValue) { + return inclusive ? Bound.inclusive(longValue) : Bound.exclusive(longValue); } - if (number instanceof Double) { - return inclusive ? Bound.inclusive((Double) number) : Bound.exclusive((Double) number); + if (number instanceof Double doubleValue) { + return inclusive ? Bound.inclusive(doubleValue) : Bound.exclusive(doubleValue); } - if (number instanceof Float) { - return inclusive ? Bound.inclusive((Float) number) : Bound.exclusive((Float) number); + if (number instanceof Float floatValue) { + return inclusive ? Bound.inclusive(floatValue) : Bound.exclusive(floatValue); } - if (number instanceof Integer) { - return inclusive ? Bound.inclusive((Integer) number) : Bound.exclusive((Integer) number); + if (number instanceof Integer integerValue) { + return inclusive ? Bound.inclusive(integerValue) : Bound.exclusive(integerValue); } - if (number instanceof BigDecimal) { - return inclusive ? Bound.inclusive((BigDecimal) number) : Bound.exclusive((BigDecimal) number); + if (number instanceof BigDecimal bigDecimalValue) { + return inclusive ? Bound.inclusive(bigDecimalValue) : Bound.exclusive(bigDecimalValue); } - throw new IllegalArgumentException("Unsupported numeric value."); + throw new IllegalArgumentException("Unsupported numeric value"); } private static Set validateTypes(Set types) { types.forEach(type -> { Assert.isTrue(NUMERIC_TYPES.contains(type), - () -> String.format("%s is not a valid numeric type. Expected one of %s.", type, NUMERIC_TYPES)); + () -> String.format("%s is not a valid numeric type; Expected one of %s", type, NUMERIC_TYPES)); }); return types; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { @@ -851,7 +787,7 @@ private StringJsonSchemaObject(@Nullable String description, boolean generateDes */ public StringJsonSchemaObject length(Range range) { - Assert.notNull(range, "Range must not be null!"); + Assert.notNull(range, "Range must not be null"); StringJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); newInstance.length = range; @@ -891,7 +827,7 @@ public StringJsonSchemaObject maxLength(int length) { */ public StringJsonSchemaObject matching(String pattern) { - Assert.notNull(pattern, "Pattern must not be null!"); + Assert.notNull(pattern, "Pattern must not be null"); StringJsonSchemaObject newInstance = newInstance(description, generateDescription, restrictions); newInstance.pattern = pattern; @@ -899,73 +835,41 @@ public StringJsonSchemaObject matching(String pattern) { return newInstance; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public StringJsonSchemaObject possibleValues(Collection possibleValues) { return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public StringJsonSchemaObject allOf(Collection allOf) { return newInstance(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public StringJsonSchemaObject anyOf(Collection anyOf) { return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public StringJsonSchemaObject oneOf(Collection oneOf) { return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public StringJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public StringJsonSchemaObject description(String description) { return newInstance(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public StringJsonSchemaObject generatedDescription() { return newInstance(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() - */ @Override public Document toDocument() { @@ -977,7 +881,7 @@ public Document toDocument() { length.getUpperBound().getValue().ifPresent(it -> doc.append("maxLength", it)); } - if (!StringUtils.isEmpty(pattern)) { + if (StringUtils.hasText(pattern)) { doc.append("pattern", pattern); } @@ -995,10 +899,6 @@ private StringJsonSchemaObject newInstance(@Nullable String description, boolean return newInstance; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { @@ -1121,80 +1021,48 @@ public ArrayJsonSchemaObject additionalItems(boolean additionalItemsAllowed) { return newInstance; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public ArrayJsonSchemaObject possibleValues(Collection possibleValues) { return newInstance(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public ArrayJsonSchemaObject allOf(Collection allOf) { return newInstance(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public ArrayJsonSchemaObject anyOf(Collection anyOf) { return newInstance(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public ArrayJsonSchemaObject oneOf(Collection oneOf) { return newInstance(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public ArrayJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return newInstance(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public ArrayJsonSchemaObject description(String description) { return newInstance(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public ArrayJsonSchemaObject generatedDescription() { return newInstance(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#toDocument() - */ @Override public Document toDocument() { Document doc = new Document(super.toDocument()); if (!CollectionUtils.isEmpty(items)) { - doc.append("items", items.size() == 1 ? items.iterator().next() + doc.append("items", items.size() == 1 ? items.iterator().next().toDocument() : items.stream().map(JsonSchemaObject::toDocument).collect(Collectors.toList())); } @@ -1228,10 +1096,6 @@ private ArrayJsonSchemaObject newInstance(@Nullable String description, boolean return newInstance; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { @@ -1282,76 +1146,44 @@ private BooleanJsonSchemaObject(@Nullable String description, boolean generateDe super(Type.booleanType(), description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public BooleanJsonSchemaObject possibleValues(Collection possibleValues) { return new BooleanJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public BooleanJsonSchemaObject allOf(Collection allOf) { return new BooleanJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public BooleanJsonSchemaObject anyOf(Collection anyOf) { return new BooleanJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public BooleanJsonSchemaObject oneOf(Collection oneOf) { return new BooleanJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public BooleanJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return new BooleanJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public BooleanJsonSchemaObject description(String description) { return new BooleanJsonSchemaObject(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public BooleanJsonSchemaObject generatedDescription() { return new BooleanJsonSchemaObject(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { - return "Must be a boolean."; + return "Must be a boolean"; } } @@ -1375,76 +1207,44 @@ private NullJsonSchemaObject(@Nullable String description, boolean generateDescr super(Type.nullType(), description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public NullJsonSchemaObject possibleValues(Collection possibleValues) { return new NullJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public NullJsonSchemaObject allOf(Collection allOf) { return new NullJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public NullJsonSchemaObject anyOf(Collection anyOf) { return new NullJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public NullJsonSchemaObject oneOf(Collection oneOf) { return new NullJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public NullJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return new NullJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public NullJsonSchemaObject description(String description) { return new NullJsonSchemaObject(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public NullJsonSchemaObject generatedDescription() { return new NullJsonSchemaObject(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { - return "Must be null."; + return "Must be null"; } } @@ -1467,76 +1267,44 @@ private DateJsonSchemaObject(@Nullable String description, boolean generateDescr super(Type.dateType(), description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public DateJsonSchemaObject possibleValues(Collection possibleValues) { return new DateJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public DateJsonSchemaObject allOf(Collection allOf) { return new DateJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public DateJsonSchemaObject anyOf(Collection anyOf) { return new DateJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public DateJsonSchemaObject oneOf(Collection oneOf) { return new DateJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public DateJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return new DateJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public DateJsonSchemaObject description(String description) { return new DateJsonSchemaObject(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public DateJsonSchemaObject generatedDescription() { return new DateJsonSchemaObject(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { - return "Must be a date."; + return "Must be a date"; } } @@ -1559,77 +1327,45 @@ private TimestampJsonSchemaObject(@Nullable String description, boolean generate super(Type.timestampType(), description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#possibleValues(java.util.Collection) - */ @Override public TimestampJsonSchemaObject possibleValues(Collection possibleValues) { return new TimestampJsonSchemaObject(description, generateDescription, restrictions.possibleValues(possibleValues)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#allOf(java.util.Collection) - */ @Override public TimestampJsonSchemaObject allOf(Collection allOf) { return new TimestampJsonSchemaObject(description, generateDescription, restrictions.allOf(allOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#anyOf(java.util.Collection) - */ @Override public TimestampJsonSchemaObject anyOf(Collection anyOf) { return new TimestampJsonSchemaObject(description, generateDescription, restrictions.anyOf(anyOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#oneOf(java.util.Collection) - */ @Override public TimestampJsonSchemaObject oneOf(Collection oneOf) { return new TimestampJsonSchemaObject(description, generateDescription, restrictions.oneOf(oneOf)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#notMatch(org.springframework.data.mongodb.core.schema.JsonSchemaObject) - */ @Override public TimestampJsonSchemaObject notMatch(JsonSchemaObject notMatch) { return new TimestampJsonSchemaObject(description, generateDescription, restrictions.notMatch(notMatch)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#description(java.lang.String) - */ @Override public TimestampJsonSchemaObject description(String description) { return new TimestampJsonSchemaObject(description, generateDescription, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generatedDescription() - */ @Override public TimestampJsonSchemaObject generatedDescription() { return new TimestampJsonSchemaObject(description, true, restrictions); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject#generateDescription() - */ @Override protected String generateDescription() { - return "Must be a timestamp."; + return "Must be a timestamp"; } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java index 124bac65fa..54ca29e0e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/UntypedJsonSchemaObject.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,6 @@ */ package org.springframework.data.mongodb.core.schema; -import lombok.AccessLevel; -import lombok.RequiredArgsConstructor; - import java.util.Collection; import java.util.Collections; import java.util.List; @@ -61,10 +58,6 @@ public static UntypedJsonSchemaObject newInstance() { return new UntypedJsonSchemaObject(null, null, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#getTypes() - */ @Override public Set getTypes() { return Collections.emptySet(); @@ -132,7 +125,7 @@ public UntypedJsonSchemaObject oneOf(Collection oneOf) { /** * The field value must not match the specified schemas. * - * @param oneOf must not be {@literal null}. + * @param notMatch must not be {@literal null}. * @return new instance of {@link TypedJsonSchemaObject}. */ public UntypedJsonSchemaObject notMatch(JsonSchemaObject notMatch) { @@ -182,7 +175,6 @@ protected String generateDescription() { * @author Christoph Strobl * @since 2.1 */ - @RequiredArgsConstructor(access = AccessLevel.PACKAGE) static class Restrictions { private final Collection possibleValues; @@ -191,6 +183,16 @@ static class Restrictions { private final Collection oneOf; private final @Nullable JsonSchemaObject notMatch; + Restrictions(Collection possibleValues, Collection allOf, + Collection anyOf, Collection oneOf, JsonSchemaObject notMatch) { + + this.possibleValues = possibleValues; + this.allOf = allOf; + this.anyOf = anyOf; + this.oneOf = oneOf; + this.notMatch = notMatch; + } + /** * @return new empty {@link Restrictions}. */ @@ -206,7 +208,7 @@ static Restrictions empty() { */ Restrictions possibleValues(Collection possibleValues) { - Assert.notNull(possibleValues, "PossibleValues must not be null!"); + Assert.notNull(possibleValues, "PossibleValues must not be null"); return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); } @@ -216,7 +218,7 @@ Restrictions possibleValues(Collection possibleValues) { */ Restrictions allOf(Collection allOf) { - Assert.notNull(allOf, "AllOf must not be null!"); + Assert.notNull(allOf, "AllOf must not be null"); return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); } @@ -226,7 +228,7 @@ Restrictions allOf(Collection allOf) { */ Restrictions anyOf(Collection anyOf) { - Assert.notNull(anyOf, "AnyOf must not be null!"); + Assert.notNull(anyOf, "AnyOf must not be null"); return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); } @@ -236,7 +238,7 @@ Restrictions anyOf(Collection anyOf) { */ Restrictions oneOf(Collection oneOf) { - Assert.notNull(oneOf, "OneOf must not be null!"); + Assert.notNull(oneOf, "OneOf must not be null"); return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); } @@ -246,7 +248,7 @@ Restrictions oneOf(Collection oneOf) { */ Restrictions notMatch(JsonSchemaObject notMatch) { - Assert.notNull(notMatch, "NotMatch must not be null!"); + Assert.notNull(notMatch, "NotMatch must not be null"); return new Restrictions(possibleValues, allOf, anyOf, oneOf, notMatch); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java index f1b888233b..d443d4bcea 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/ExecutableMongoScript.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,7 +23,9 @@ * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public class ExecutableMongoScript { private final String code; @@ -35,7 +37,7 @@ public class ExecutableMongoScript { */ public ExecutableMongoScript(String code) { - Assert.hasText(code, "Code must not be null or empty!"); + Assert.hasText(code, "Code must not be null or empty"); this.code = code; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java index 4d6c826955..c344a07cae 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/script/NamedMongoScript.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,19 +20,21 @@ /** * An {@link ExecutableMongoScript} assigned to a name that allows calling the function by its {@literal name} once it - * has been saved to the {@link com.mongodb.DB} instance. + * has been saved to the {@link com.mongodb.client.MongoDatabase} instance. * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public class NamedMongoScript { private final @Id String name; private final ExecutableMongoScript script; /** - * Creates new {@link NamedMongoScript} that can be saved to the {@link com.mongodb.DB} instance. + * Creates new {@link NamedMongoScript} that can be saved to the {@link com.mongodb.client.MongoDatabase} instance. * * @param name must not be {@literal null} or empty. * @param rawScript the {@link String} representation of the {@literal JavaScript} function. Must not be @@ -50,8 +52,8 @@ public NamedMongoScript(String name, String rawScript) { */ public NamedMongoScript(String name, ExecutableMongoScript script) { - Assert.hasText(name, "Name must not be null or empty!"); - Assert.notNull(script, "ExecutableMongoScript must not be null!"); + Assert.hasText(name, "Name must not be null or empty"); + Assert.notNull(script, "ExecutableMongoScript must not be null"); this.name = name; this.script = script; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java index 522d687e51..b4550ee8de 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -49,8 +49,8 @@ public class ExpressionNode implements Iterable { */ protected ExpressionNode(SpelNode node, ExpressionState state) { - Assert.notNull(node, "SpelNode must not be null!"); - Assert.notNull(state, "ExpressionState must not be null!"); + Assert.notNull(node, "SpelNode must not be null"); + Assert.notNull(state, "ExpressionState must not be null"); this.node = node; this.state = state; @@ -67,20 +67,20 @@ protected ExpressionNode(SpelNode node, ExpressionState state) { */ public static ExpressionNode from(SpelNode node, ExpressionState state) { - if (node instanceof Operator) { - return new OperatorNode((Operator) node, state); + if (node instanceof Operator operator) { + return new OperatorNode(operator, state); } - if (node instanceof MethodReference) { - return new MethodReferenceNode((MethodReference) node, state); + if (node instanceof MethodReference methodReference) { + return new MethodReferenceNode(methodReference, state); } - if (node instanceof Literal) { - return new LiteralNode((Literal) node, state); + if (node instanceof Literal literal) { + return new LiteralNode(literal, state); } - if (node instanceof OperatorNot) { - return new NotOperatorNode((OperatorNot) node, state); + if (node instanceof OperatorNot operatorNot) { + return new NotOperatorNode(operatorNot, state); } return new ExpressionNode(node, state); @@ -103,7 +103,7 @@ public String getName() { */ public boolean isOfType(Class type) { - Assert.notNull(type, "Type must not be empty!"); + Assert.notNull(type, "Type must not be empty"); return type.isAssignableFrom(node.getClass()); } @@ -114,7 +114,7 @@ public boolean isOfType(Class type) { * @return */ boolean isOfSameTypeAs(@Nullable ExpressionNode node) { - return node == null ? false : this.node.getClass().equals(node.node.getClass()); + return node != null && this.node.getClass().equals(node.node.getClass()); } /** @@ -172,7 +172,7 @@ public boolean hasChildren() { */ public ExpressionNode getChild(int index) { - Assert.isTrue(index >= 0, "Index must be greater or equal to zero!"); + Assert.isTrue(index >= 0, "Index must be greater or equal to zero"); return from(node.getChild(index), state); } @@ -184,7 +184,7 @@ public ExpressionNode getChild(int index) { */ public boolean hasfirstChildNotOfType(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); return hasChildren() && !node.getChild(0).getClass().equals(type); } @@ -198,10 +198,6 @@ protected ExpressionNode from(SpelNode node) { return from(node, state); } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ @Override public Iterator iterator() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java index 8f39180cf2..8869f51e09 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformationContextSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -46,7 +46,7 @@ public class ExpressionTransformationContextSupport { public ExpressionTransformationContextSupport(T currentNode, @Nullable ExpressionNode parentNode, @Nullable Document previousOperationObject) { - Assert.notNull(currentNode, "currentNode must not be null!"); + Assert.notNull(currentNode, "currentNode must not be null"); this.currentNode = currentNode; this.parentNode = parentNode; @@ -112,7 +112,7 @@ public boolean parentIsSameOperation() { */ public Document addToPreviousOperation(Object value) { - Assert.state(previousOperationObject != null, "No previous operation available!"); + Assert.state(previousOperationObject != null, "No previous operation available"); extractArgumentListFrom(previousOperationObject).add(value); return previousOperationObject; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java index f962c06fb1..512f753042 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.spel; /** - * SPI interface to implement components that can transfrom an {@link ExpressionTransformationContextSupport} into an + * SPI interface to implement components that can transform an {@link ExpressionTransformationContextSupport} into an * object. * * @author Oliver Gierke diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java index 1c05de8374..030ef0d055 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/LiteralNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.core.spel; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; import org.springframework.expression.spel.ExpressionState; @@ -39,23 +37,10 @@ */ public class LiteralNode extends ExpressionNode { - private static final Set> SUPPORTED_LITERAL_TYPES; + private static final Set> SUPPORTED_LITERAL_TYPES = Set.of(BooleanLiteral.class, FloatLiteral.class, + IntLiteral.class, LongLiteral.class, NullLiteral.class, RealLiteral.class, StringLiteral.class); private final Literal literal; - static { - - Set> supportedTypes = new HashSet>(7, 1); - supportedTypes.add(BooleanLiteral.class); - supportedTypes.add(FloatLiteral.class); - supportedTypes.add(IntLiteral.class); - supportedTypes.add(LongLiteral.class); - supportedTypes.add(NullLiteral.class); - supportedTypes.add(RealLiteral.class); - supportedTypes.add(StringLiteral.class); - - SUPPORTED_LITERAL_TYPES = Collections.unmodifiableSet(supportedTypes); - } - /** * Creates a new {@link LiteralNode} from the given {@link Literal} and {@link ExpressionState}. * @@ -75,18 +60,13 @@ public class LiteralNode extends ExpressionNode { */ public boolean isUnaryMinus(@Nullable ExpressionNode parent) { - if (!(parent instanceof OperatorNode)) { + if (!(parent instanceof OperatorNode operatorNode)) { return false; } - OperatorNode operator = (OperatorNode) parent; - return operator.isUnaryMinus(); + return operatorNode.isUnaryMinus(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionNode#isLiteral() - */ @Override public boolean isLiteral() { return SUPPORTED_LITERAL_TYPES.contains(literal.getClass()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index ceadea7c55..5f1b0c4309 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -35,6 +35,7 @@ * @author Sebastien Gerard * @author Christoph Strobl * @author Mark Paluch + * @author Julia Lee */ public class MethodReferenceNode extends ExpressionNode { @@ -45,122 +46,218 @@ public class MethodReferenceNode extends ExpressionNode { Map map = new HashMap(); // BOOLEAN OPERATORS - map.put("and", arrayArgumentAggregationMethodReference().forOperator("$and")); - map.put("or", arrayArgumentAggregationMethodReference().forOperator("$or")); - map.put("not", arrayArgumentAggregationMethodReference().forOperator("$not")); + map.put("and", arrayArgRef().forOperator("$and")); + map.put("or", arrayArgRef().forOperator("$or")); + map.put("not", arrayArgRef().forOperator("$not")); // SET OPERATORS - map.put("setEquals", arrayArgumentAggregationMethodReference().forOperator("$setEquals")); - map.put("setIntersection", arrayArgumentAggregationMethodReference().forOperator("$setIntersection")); - map.put("setUnion", arrayArgumentAggregationMethodReference().forOperator("$setUnion")); - map.put("setDifference", arrayArgumentAggregationMethodReference().forOperator("$setDifference")); + map.put("setEquals", arrayArgRef().forOperator("$setEquals")); + map.put("setIntersection", arrayArgRef().forOperator("$setIntersection")); + map.put("setUnion", arrayArgRef().forOperator("$setUnion")); + map.put("setDifference", arrayArgRef().forOperator("$setDifference")); // 2nd. - map.put("setIsSubset", arrayArgumentAggregationMethodReference().forOperator("$setIsSubset")); - map.put("anyElementTrue", arrayArgumentAggregationMethodReference().forOperator("$anyElementTrue")); - map.put("allElementsTrue", arrayArgumentAggregationMethodReference().forOperator("$allElementsTrue")); + map.put("setIsSubset", arrayArgRef().forOperator("$setIsSubset")); + map.put("anyElementTrue", arrayArgRef().forOperator("$anyElementTrue")); + map.put("allElementsTrue", arrayArgRef().forOperator("$allElementsTrue")); // COMPARISON OPERATORS - map.put("cmp", arrayArgumentAggregationMethodReference().forOperator("$cmp")); - map.put("eq", arrayArgumentAggregationMethodReference().forOperator("$eq")); - map.put("gt", arrayArgumentAggregationMethodReference().forOperator("$gt")); - map.put("gte", arrayArgumentAggregationMethodReference().forOperator("$gte")); - map.put("lt", arrayArgumentAggregationMethodReference().forOperator("$lt")); - map.put("lte", arrayArgumentAggregationMethodReference().forOperator("$lte")); - map.put("ne", arrayArgumentAggregationMethodReference().forOperator("$ne")); + map.put("cmp", arrayArgRef().forOperator("$cmp")); + map.put("eq", arrayArgRef().forOperator("$eq")); + map.put("gt", arrayArgRef().forOperator("$gt")); + map.put("gte", arrayArgRef().forOperator("$gte")); + map.put("lt", arrayArgRef().forOperator("$lt")); + map.put("lte", arrayArgRef().forOperator("$lte")); + map.put("ne", arrayArgRef().forOperator("$ne")); + + // DOCUMENT OPERATORS + map.put("rank", emptyRef().forOperator("$rank")); + map.put("denseRank", emptyRef().forOperator("$denseRank")); + map.put("documentNumber", emptyRef().forOperator("$documentNumber")); + map.put("shift", mapArgRef().forOperator("$shift").mappingParametersTo("output", "by", "default")); // ARITHMETIC OPERATORS - map.put("abs", singleArgumentAggregationMethodReference().forOperator("$abs")); - map.put("add", arrayArgumentAggregationMethodReference().forOperator("$add")); - map.put("ceil", singleArgumentAggregationMethodReference().forOperator("$ceil")); - map.put("divide", arrayArgumentAggregationMethodReference().forOperator("$divide")); - map.put("exp", singleArgumentAggregationMethodReference().forOperator("$exp")); - map.put("floor", singleArgumentAggregationMethodReference().forOperator("$floor")); - map.put("ln", singleArgumentAggregationMethodReference().forOperator("$ln")); - map.put("log", arrayArgumentAggregationMethodReference().forOperator("$log")); - map.put("log10", singleArgumentAggregationMethodReference().forOperator("$log10")); - map.put("mod", arrayArgumentAggregationMethodReference().forOperator("$mod")); - map.put("multiply", arrayArgumentAggregationMethodReference().forOperator("$multiply")); - map.put("pow", arrayArgumentAggregationMethodReference().forOperator("$pow")); - map.put("sqrt", singleArgumentAggregationMethodReference().forOperator("$sqrt")); - map.put("subtract", arrayArgumentAggregationMethodReference().forOperator("$subtract")); - map.put("trunc", singleArgumentAggregationMethodReference().forOperator("$trunc")); + map.put("abs", singleArgRef().forOperator("$abs")); + map.put("add", arrayArgRef().forOperator("$add")); + map.put("ceil", singleArgRef().forOperator("$ceil")); + map.put("divide", arrayArgRef().forOperator("$divide")); + map.put("exp", singleArgRef().forOperator("$exp")); + map.put("floor", singleArgRef().forOperator("$floor")); + map.put("ln", singleArgRef().forOperator("$ln")); + map.put("log", arrayArgRef().forOperator("$log")); + map.put("log10", singleArgRef().forOperator("$log10")); + map.put("mod", arrayArgRef().forOperator("$mod")); + map.put("multiply", arrayArgRef().forOperator("$multiply")); + map.put("pow", arrayArgRef().forOperator("$pow")); + map.put("sqrt", singleArgRef().forOperator("$sqrt")); + map.put("subtract", arrayArgRef().forOperator("$subtract")); + map.put("trunc", singleArgRef().forOperator("$trunc")); + map.put("round", arrayArgRef().forOperator("$round")); + map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); + map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); + map.put("sin", singleArgRef().forOperator("$sin")); + map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("asin", singleArgRef().forOperator("$asin")); + map.put("asinh", singleArgRef().forOperator("$asinh")); + map.put("cos", singleArgRef().forOperator("$cos")); + map.put("cosh", singleArgRef().forOperator("$cosh")); + map.put("acos", singleArgRef().forOperator("$acos")); + map.put("acosh", singleArgRef().forOperator("$acosh")); + map.put("tan", singleArgRef().forOperator("$tan")); + map.put("tanh", singleArgRef().forOperator("$tanh")); + map.put("rand", emptyRef().forOperator("$rand")); + map.put("atan", singleArgRef().forOperator("$atan")); + map.put("atan2", arrayArgRef().forOperator("$atan2")); + map.put("atanh", singleArgRef().forOperator("$atanh")); // STRING OPERATORS - map.put("concat", arrayArgumentAggregationMethodReference().forOperator("$concat")); - map.put("strcasecmp", arrayArgumentAggregationMethodReference().forOperator("$strcasecmp")); - map.put("substr", arrayArgumentAggregationMethodReference().forOperator("$substr")); - map.put("toLower", singleArgumentAggregationMethodReference().forOperator("$toLower")); - map.put("toUpper", singleArgumentAggregationMethodReference().forOperator("$toUpper")); - map.put("strcasecmp", arrayArgumentAggregationMethodReference().forOperator("$strcasecmp")); - map.put("indexOfBytes", arrayArgumentAggregationMethodReference().forOperator("$indexOfBytes")); - map.put("indexOfCP", arrayArgumentAggregationMethodReference().forOperator("$indexOfCP")); - map.put("split", arrayArgumentAggregationMethodReference().forOperator("$split")); - map.put("strLenBytes", singleArgumentAggregationMethodReference().forOperator("$strLenBytes")); - map.put("strLenCP", singleArgumentAggregationMethodReference().forOperator("$strLenCP")); - map.put("substrCP", arrayArgumentAggregationMethodReference().forOperator("$substrCP")); + map.put("concat", arrayArgRef().forOperator("$concat")); + map.put("strcasecmp", arrayArgRef().forOperator("$strcasecmp")); + map.put("substr", arrayArgRef().forOperator("$substr")); + map.put("toLower", singleArgRef().forOperator("$toLower")); + map.put("toUpper", singleArgRef().forOperator("$toUpper")); + map.put("indexOfBytes", arrayArgRef().forOperator("$indexOfBytes")); + map.put("indexOfCP", arrayArgRef().forOperator("$indexOfCP")); + map.put("split", arrayArgRef().forOperator("$split")); + map.put("strLenBytes", singleArgRef().forOperator("$strLenBytes")); + map.put("strLenCP", singleArgRef().forOperator("$strLenCP")); + map.put("substrCP", arrayArgRef().forOperator("$substrCP")); + map.put("trim", mapArgRef().forOperator("$trim").mappingParametersTo("input", "chars")); + map.put("ltrim", mapArgRef().forOperator("$ltrim").mappingParametersTo("input", "chars")); + map.put("rtrim", mapArgRef().forOperator("$rtrim").mappingParametersTo("input", "chars")); + map.put("regexFind", mapArgRef().forOperator("$regexFind").mappingParametersTo("input", "regex", "options")); + map.put("regexFindAll", mapArgRef().forOperator("$regexFindAll").mappingParametersTo("input", "regex", "options")); + map.put("regexMatch", mapArgRef().forOperator("$regexMatch").mappingParametersTo("input", "regex", "options")); + map.put("replaceOne", mapArgRef().forOperator("$replaceOne").mappingParametersTo("input", "find", "replacement")); + map.put("replaceAll", mapArgRef().forOperator("$replaceAll").mappingParametersTo("input", "find", "replacement")); // TEXT SEARCH OPERATORS - map.put("meta", singleArgumentAggregationMethodReference().forOperator("$meta")); + map.put("meta", singleArgRef().forOperator("$meta")); // ARRAY OPERATORS - map.put("arrayElemAt", arrayArgumentAggregationMethodReference().forOperator("$arrayElemAt")); - map.put("concatArrays", arrayArgumentAggregationMethodReference().forOperator("$concatArrays")); - map.put("filter", mapArgumentAggregationMethodReference().forOperator("$filter") // + map.put("arrayElemAt", arrayArgRef().forOperator("$arrayElemAt")); + map.put("concatArrays", arrayArgRef().forOperator("$concatArrays")); + map.put("filter", mapArgRef().forOperator("$filter") // .mappingParametersTo("input", "as", "cond")); - map.put("isArray", singleArgumentAggregationMethodReference().forOperator("$isArray")); - map.put("size", singleArgumentAggregationMethodReference().forOperator("$size")); - map.put("slice", arrayArgumentAggregationMethodReference().forOperator("$slice")); - map.put("reverseArray", singleArgumentAggregationMethodReference().forOperator("$reverseArray")); - map.put("reduce", mapArgumentAggregationMethodReference().forOperator("$reduce").mappingParametersTo("input", - "initialValue", "in")); - map.put("zip", mapArgumentAggregationMethodReference().forOperator("$zip").mappingParametersTo("inputs", - "useLongestLength", "defaults")); - map.put("in", arrayArgumentAggregationMethodReference().forOperator("$in")); + map.put("first", singleArgRef().forOperator("$first")); + map.put("isArray", singleArgRef().forOperator("$isArray")); + map.put("last", singleArgRef().forOperator("$last")); + map.put("size", singleArgRef().forOperator("$size")); + map.put("slice", arrayArgRef().forOperator("$slice")); + map.put("sortArray", mapArgRef().forOperator("$sortArray").mappingParametersTo("input", "sortBy")); + map.put("reverseArray", singleArgRef().forOperator("$reverseArray")); + map.put("reduce", mapArgRef().forOperator("$reduce").mappingParametersTo("input", "initialValue", "in")); + map.put("zip", mapArgRef().forOperator("$zip").mappingParametersTo("inputs", "useLongestLength", "defaults")); + map.put("in", arrayArgRef().forOperator("$in")); + map.put("arrayToObject", singleArgRef().forOperator("$arrayToObject")); + map.put("indexOfArray", arrayArgRef().forOperator("$indexOfArray")); + map.put("range", arrayArgRef().forOperator("$range")); // VARIABLE OPERATORS - map.put("map", mapArgumentAggregationMethodReference().forOperator("$map") // + map.put("map", mapArgRef().forOperator("$map") // .mappingParametersTo("input", "as", "in")); - map.put("let", mapArgumentAggregationMethodReference().forOperator("$let").mappingParametersTo("vars", "in")); + map.put("let", mapArgRef().forOperator("$let").mappingParametersTo("vars", "in")); // LITERAL OPERATORS - map.put("literal", singleArgumentAggregationMethodReference().forOperator("$literal")); + map.put("literal", singleArgRef().forOperator("$literal")); // DATE OPERATORS - map.put("dayOfYear", singleArgumentAggregationMethodReference().forOperator("$dayOfYear")); - map.put("dayOfMonth", singleArgumentAggregationMethodReference().forOperator("$dayOfMonth")); - map.put("dayOfWeek", singleArgumentAggregationMethodReference().forOperator("$dayOfWeek")); - map.put("year", singleArgumentAggregationMethodReference().forOperator("$year")); - map.put("month", singleArgumentAggregationMethodReference().forOperator("$month")); - map.put("week", singleArgumentAggregationMethodReference().forOperator("$week")); - map.put("hour", singleArgumentAggregationMethodReference().forOperator("$hour")); - map.put("minute", singleArgumentAggregationMethodReference().forOperator("$minute")); - map.put("second", singleArgumentAggregationMethodReference().forOperator("$second")); - map.put("millisecond", singleArgumentAggregationMethodReference().forOperator("$millisecond")); - map.put("dateToString", mapArgumentAggregationMethodReference().forOperator("$dateToString") // + map.put("dateAdd", + mapArgRef().forOperator("$dateAdd").mappingParametersTo("startDate", "unit", "amount", "timezone")); + map.put("dateSubtract", + mapArgRef().forOperator("$dateSubtract").mappingParametersTo("startDate", "unit", "amount", "timezone")); + map.put("dateDiff", mapArgRef().forOperator("$dateDiff").mappingParametersTo("startDate", "endDate", "unit", + "timezone", "startOfWeek")); + map.put("dateTrunc", mapArgRef().forOperator("$dateTrunc").mappingParametersTo("date", "unit", "binSize", + "startOfWeek", "timezone")); + map.put("dayOfYear", singleArgRef().forOperator("$dayOfYear")); + map.put("dayOfMonth", singleArgRef().forOperator("$dayOfMonth")); + map.put("dayOfWeek", singleArgRef().forOperator("$dayOfWeek")); + map.put("year", singleArgRef().forOperator("$year")); + map.put("month", singleArgRef().forOperator("$month")); + map.put("week", singleArgRef().forOperator("$week")); + map.put("hour", singleArgRef().forOperator("$hour")); + map.put("minute", singleArgRef().forOperator("$minute")); + map.put("second", singleArgRef().forOperator("$second")); + map.put("millisecond", singleArgRef().forOperator("$millisecond")); + map.put("dateToString", mapArgRef().forOperator("$dateToString") // .mappingParametersTo("format", "date")); - map.put("isoDayOfWeek", singleArgumentAggregationMethodReference().forOperator("$isoDayOfWeek")); - map.put("isoWeek", singleArgumentAggregationMethodReference().forOperator("$isoWeek")); - map.put("isoWeekYear", singleArgumentAggregationMethodReference().forOperator("$isoWeekYear")); + map.put("dateFromString", mapArgRef().forOperator("$dateFromString") // + .mappingParametersTo("dateString", "format", "timezone", "onError", "onNull")); + map.put("dateFromParts", mapArgRef().forOperator("$dateFromParts").mappingParametersTo("year", "month", "day", + "hour", "minute", "second", "millisecond", "timezone")); + map.put("isoDateFromParts", mapArgRef().forOperator("$dateFromParts").mappingParametersTo("isoWeekYear", "isoWeek", + "isoDayOfWeek", "hour", "minute", "second", "millisecond", "timezone")); + map.put("dateToParts", mapArgRef().forOperator("$dateToParts") // + .mappingParametersTo("date", "timezone", "iso8601")); + map.put("isoDayOfWeek", singleArgRef().forOperator("$isoDayOfWeek")); + map.put("isoWeek", singleArgRef().forOperator("$isoWeek")); + map.put("isoWeekYear", singleArgRef().forOperator("$isoWeekYear")); + map.put("tsIncrement", singleArgRef().forOperator("$tsIncrement")); + map.put("tsSecond", singleArgRef().forOperator("$tsSecond")); // CONDITIONAL OPERATORS - map.put("cond", mapArgumentAggregationMethodReference().forOperator("$cond") // + map.put("cond", mapArgRef().forOperator("$cond") // .mappingParametersTo("if", "then", "else")); - map.put("ifNull", arrayArgumentAggregationMethodReference().forOperator("$ifNull")); + map.put("ifNull", arrayArgRef().forOperator("$ifNull")); // GROUP OPERATORS - map.put("sum", arrayArgumentAggregationMethodReference().forOperator("$sum")); - map.put("avg", arrayArgumentAggregationMethodReference().forOperator("$avg")); - map.put("first", singleArgumentAggregationMethodReference().forOperator("$first")); - map.put("last", singleArgumentAggregationMethodReference().forOperator("$last")); - map.put("max", arrayArgumentAggregationMethodReference().forOperator("$max")); - map.put("min", arrayArgumentAggregationMethodReference().forOperator("$min")); - map.put("push", singleArgumentAggregationMethodReference().forOperator("$push")); - map.put("addToSet", singleArgumentAggregationMethodReference().forOperator("$addToSet")); - map.put("stdDevPop", arrayArgumentAggregationMethodReference().forOperator("$stdDevPop")); - map.put("stdDevSamp", arrayArgumentAggregationMethodReference().forOperator("$stdDevSamp")); + map.put("sum", arrayArgRef().forOperator("$sum")); + map.put("avg", arrayArgRef().forOperator("$avg")); + map.put("first", singleArgRef().forOperator("$first")); + map.put("last", singleArgRef().forOperator("$last")); + map.put("max", arrayArgRef().forOperator("$max")); + map.put("min", arrayArgRef().forOperator("$min")); + map.put("push", singleArgRef().forOperator("$push")); + map.put("addToSet", singleArgRef().forOperator("$addToSet")); + map.put("stdDevPop", arrayArgRef().forOperator("$stdDevPop")); + map.put("stdDevSamp", arrayArgRef().forOperator("$stdDevSamp")); + map.put("covariancePop", arrayArgRef().forOperator("$covariancePop")); + map.put("covarianceSamp", arrayArgRef().forOperator("$covarianceSamp")); + map.put("bottom", mapArgRef().forOperator("$bottom") // + .mappingParametersTo("output", "sortBy")); + map.put("bottomN", mapArgRef().forOperator("$bottomN") // + .mappingParametersTo("n", "output", "sortBy")); + map.put("firstN", mapArgRef().forOperator("$firstN") // + .mappingParametersTo("n", "input")); + map.put("lastN", mapArgRef().forOperator("$lastN") // + .mappingParametersTo("n", "input")); + map.put("top", mapArgRef().forOperator("$top") // + .mappingParametersTo("output", "sortBy")); + map.put("topN", mapArgRef().forOperator("$topN") // + .mappingParametersTo("n", "output", "sortBy")); + map.put("maxN", mapArgRef().forOperator("$maxN") // + .mappingParametersTo("n", "input")); + map.put("minN", mapArgRef().forOperator("$minN") // + .mappingParametersTo("n", "input")); + map.put("percentile", mapArgRef().forOperator("$percentile") // + .mappingParametersTo("input", "p", "method")); + map.put("median", mapArgRef().forOperator("$median") // + .mappingParametersTo("input", "method")); // TYPE OPERATORS - map.put("type", singleArgumentAggregationMethodReference().forOperator("$type")); + map.put("type", singleArgRef().forOperator("$type")); + + // OBJECT OPERATORS + map.put("objectToArray", singleArgRef().forOperator("$objectToArray")); + map.put("mergeObjects", arrayArgRef().forOperator("$mergeObjects")); + map.put("getField", mapArgRef().forOperator("$getField").mappingParametersTo("field", "input")); + map.put("setField", mapArgRef().forOperator("$setField").mappingParametersTo("field", "value", "input")); + + // CONVERT OPERATORS + map.put("convert", mapArgRef().forOperator("$convert") // + .mappingParametersTo("input", "to", "onError", "onNull")); + map.put("toBool", singleArgRef().forOperator("$toBool")); + map.put("toDate", singleArgRef().forOperator("$toDate")); + map.put("toDecimal", singleArgRef().forOperator("$toDecimal")); + map.put("toDouble", singleArgRef().forOperator("$toDouble")); + map.put("toInt", singleArgRef().forOperator("$toInt")); + map.put("toLong", singleArgRef().forOperator("$toLong")); + map.put("toObjectId", singleArgRef().forOperator("$toObjectId")); + map.put("toString", singleArgRef().forOperator("$toString")); + map.put("degreesToRadians", singleArgRef().forOperator("$degreesToRadians")); + + // expression operators + map.put("locf", singleArgRef().forOperator("$locf")); FUNCTIONS = Collections.unmodifiableMap(map); } @@ -169,19 +266,6 @@ public class MethodReferenceNode extends ExpressionNode { super(reference, state); } - /** - * Returns the name of the method. - * - * @Deprecated since 1.10. Please use {@link #getMethodReference()}. - */ - @Nullable - @Deprecated - public String getMethodName() { - - AggregationMethodReference methodReference = getMethodReference(); - return methodReference != null ? methodReference.getMongoOperator() : null; - } - /** * Return the {@link AggregationMethodReference}. * @@ -255,7 +339,7 @@ public String[] getArgumentMap() { * * @return never {@literal null}. */ - static AggregationMethodReference singleArgumentAggregationMethodReference() { + static AggregationMethodReference singleArgRef() { return new AggregationMethodReference(null, ArgumentType.SINGLE, null); } @@ -264,7 +348,7 @@ static AggregationMethodReference singleArgumentAggregationMethodReference() { * * @return never {@literal null}. */ - static AggregationMethodReference arrayArgumentAggregationMethodReference() { + static AggregationMethodReference arrayArgRef() { return new AggregationMethodReference(null, ArgumentType.ARRAY, null); } @@ -273,10 +357,20 @@ static AggregationMethodReference arrayArgumentAggregationMethodReference() { * * @return never {@literal null}. */ - static AggregationMethodReference mapArgumentAggregationMethodReference() { + static AggregationMethodReference mapArgRef() { return new AggregationMethodReference(null, ArgumentType.MAP, null); } + /** + * Create a new {@link AggregationMethodReference} for a {@link ArgumentType#EMPTY_DOCUMENT} argument. + * + * @return never {@literal null}. + * @since 3.3 + */ + static AggregationMethodReference emptyRef() { + return new AggregationMethodReference(null, ArgumentType.EMPTY_DOCUMENT, null); + } + /** * Create a new {@link AggregationMethodReference} for a given {@literal aggregationExpressionOperator} reusing * previously set arguments. @@ -301,7 +395,7 @@ AggregationMethodReference forOperator(String aggregationExpressionOperator) { AggregationMethodReference mappingParametersTo(String... aggregationExpressionProperties) { Assert.isTrue(ObjectUtils.nullSafeEquals(argumentType, ArgumentType.MAP), - "Parameter mapping can only be applied to AggregationMethodReference with MAPPED ArgumentType."); + "Parameter mapping can only be applied to AggregationMethodReference with MAPPED ArgumentType"); return new AggregationMethodReference(mongoOperator, argumentType, aggregationExpressionProperties); } @@ -312,7 +406,7 @@ AggregationMethodReference mappingParametersTo(String... aggregationExpressionPr * @since 1.10 */ public enum ArgumentType { - SINGLE, ARRAY, MAP + SINGLE, ARRAY, MAP, EMPTY_DOCUMENT } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java index 1809307f51..ea0608225f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/NotOperatorNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ package org.springframework.data.mongodb.core.spel; import org.springframework.expression.spel.ExpressionState; -import org.springframework.expression.spel.SpelNode; import org.springframework.expression.spel.ast.OperatorNot; /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java index 866fe30021..7d242e777e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/OperatorNode.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,6 @@ */ package org.springframework.data.mongodb.core.spel; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -34,47 +31,15 @@ */ public class OperatorNode extends ExpressionNode { - private static final Map OPERATORS; - private static final Set SUPPORTED_MATH_OPERATORS; - - static { - - Map map = new HashMap(14, 1); - - map.put("+", "$add"); - map.put("-", "$subtract"); - map.put("*", "$multiply"); - map.put("/", "$divide"); - map.put("%", "$mod"); - map.put("^", "$pow"); - map.put("==", "$eq"); - map.put("!=", "$ne"); - map.put(">", "$gt"); - map.put(">=", "$gte"); - map.put("<", "$lt"); - map.put("<=", "$lte"); - - map.put("and", "$and"); - map.put("or", "$or"); - - OPERATORS = Collections.unmodifiableMap(map); - - Set set = new HashSet(12, 1); - set.add(OpMinus.class); - set.add(OpPlus.class); - set.add(OpMultiply.class); - set.add(OpDivide.class); - set.add(OpModulus.class); - set.add(OperatorPower.class); - set.add(OpNE.class); - set.add(OpEQ.class); - set.add(OpGT.class); - set.add(OpGE.class); - set.add(OpLT.class); - set.add(OpLE.class); - - SUPPORTED_MATH_OPERATORS = Collections.unmodifiableSet(set); - } + private static final Map OPERATORS = Map.ofEntries(Map.entry("+", "$add"), + Map.entry("-", "$subtract"), Map.entry("*", "$multiply"), Map.entry("/", "$divide"), Map.entry("%", "$mod"), + Map.entry("^", "$pow"), Map.entry("==", "$eq"), Map.entry("!=", "$ne"), Map.entry(">", "$gt"), + Map.entry(">=", "$gte"), Map.entry("<", "$lt"), Map.entry("<=", "$lte"), Map.entry("and", "$and"), + Map.entry("or", "$or")); + + private static final Set SUPPORTED_MATH_OPERATORS = Set.of(OpMinus.class, OpPlus.class, OpMultiply.class, + OpDivide.class, OpModulus.class, OperatorPower.class, OpNE.class, OpEQ.class, OpGT.class, OpGE.class, OpLT.class, + OpLE.class); private final Operator operator; @@ -89,19 +54,11 @@ public class OperatorNode extends ExpressionNode { this.operator = node; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionNode#isMathematicalOperation() - */ @Override public boolean isMathematicalOperation() { return SUPPORTED_MATH_OPERATORS.contains(operator.getClass()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.spel.ExpressionNode#isConjunctionOperator() - */ @Override public boolean isLogicalOperator() { return operator instanceof OpOr || operator instanceof OpAnd; @@ -125,7 +82,7 @@ public String getMongoOperator() { if (!OPERATORS.containsKey(operator.getOperatorName())) { throw new IllegalArgumentException(String.format( - "Unknown operator name. Cannot translate %s into its MongoDB aggregation function representation.", + "Unknown operator name; Cannot translate %s into its MongoDB aggregation function representation", operator.getOperatorName())); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java new file mode 100644 index 0000000000..c923cbb884 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java @@ -0,0 +1,45 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * {@link GranularityDefinition Granularities} available for Time Series data. + * + * @author Christoph Strobl + * @since 3.3 + */ +public enum Granularity implements GranularityDefinition { + + /** + * Server default value to indicate no explicit value should be sent. + */ + DEFAULT, + + /** + * High frequency ingestion. + */ + SECONDS, + + /** + * Medium frequency ingestion. + */ + MINUTES, + + /** + * Low frequency ingestion. + */ + HOURS +} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/DocumentBacked.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java similarity index 57% rename from spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/DocumentBacked.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java index 30c1513a49..0e714470db 100644 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/DocumentBacked.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2021-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,16 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.crossstore; - -import org.springframework.data.crossstore.ChangeSetBacked; +package org.springframework.data.mongodb.core.timeseries; /** - * @author Thomas Risberg - * @author Oliver Gierke - * @deprecated will be removed without replacement. + * The Granularity of time series data that is closest to the time span between incoming measurements. + * + * @author Christoph Strobl + * @since 3.3 */ -@Deprecated -public interface DocumentBacked extends ChangeSetBacked { +public interface GranularityDefinition { + String name(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java index 2600e0ead1..779ed4ec9f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/CriteriaValidator.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,13 @@ */ package org.springframework.data.mongodb.core.validation; -import lombok.AccessLevel; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - import org.bson.Document; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * {@link Validator} implementation based on {@link CriteriaDefinition query expressions}. @@ -34,12 +32,14 @@ * @see Criteria * @see Schema Validation */ -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) -@EqualsAndHashCode class CriteriaValidator implements Validator { private final CriteriaDefinition criteria; + private CriteriaValidator(CriteriaDefinition criteria) { + this.criteria = criteria; + } + /** * Creates a new {@link Validator} object, which is basically setup of query operators, based on a * {@link CriteriaDefinition} instance. @@ -50,26 +50,35 @@ class CriteriaValidator implements Validator { */ static CriteriaValidator of(CriteriaDefinition criteria) { - Assert.notNull(criteria, "Criteria must not be null!"); + Assert.notNull(criteria, "Criteria must not be null"); return new CriteriaValidator(criteria); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.validation.Validator#toDocument() - */ @Override public Document toDocument() { return criteria.getCriteriaObject(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return SerializationUtils.serializeToJsonSafely(toDocument()); } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + CriteriaValidator that = (CriteriaValidator) o; + + return ObjectUtils.nullSafeEquals(criteria, that.criteria); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(criteria); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java index 11e6505d74..5e27b99ad6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/DocumentValidator.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,11 @@ */ package org.springframework.data.mongodb.core.validation; -import lombok.AccessLevel; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - import org.bson.Document; import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * Most trivial {@link Validator} implementation using plain {@link Document} to describe the desired document structure @@ -32,12 +30,14 @@ * @since 2.1 * @see Schema Validation */ -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) -@EqualsAndHashCode class DocumentValidator implements Validator { private final Document validatorObject; + private DocumentValidator(Document validatorObject) { + this.validatorObject = validatorObject; + } + /** * Create new {@link DocumentValidator} defining validation rules via a plain {@link Document}. * @@ -46,26 +46,36 @@ class DocumentValidator implements Validator { */ static DocumentValidator of(Document validatorObject) { - Assert.notNull(validatorObject, "ValidatorObject must not be null!"); + Assert.notNull(validatorObject, "ValidatorObject must not be null"); return new DocumentValidator(new Document(validatorObject)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.validation.Validator#toDocument() - */ @Override public Document toDocument() { return new Document(validatorObject); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return SerializationUtils.serializeToJsonSafely(validatorObject); } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + DocumentValidator that = (DocumentValidator) o; + + return ObjectUtils.nullSafeEquals(validatorObject, that.validatorObject); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(validatorObject); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java index 8b9826692a..61ef8c5b4f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/JsonSchemaValidator.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,12 @@ */ package org.springframework.data.mongodb.core.validation; -import lombok.AccessLevel; -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - import org.bson.Document; import org.springframework.data.mongodb.core.query.SerializationUtils; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; /** * {@link Validator} implementation based on {@link MongoJsonSchema JSON Schema}. @@ -32,12 +30,14 @@ * @since 2.1 * @see Schema Validation */ -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) -@EqualsAndHashCode class JsonSchemaValidator implements Validator { private final MongoJsonSchema schema; + private JsonSchemaValidator(MongoJsonSchema schema) { + this.schema = schema; + } + /** * Create new {@link JsonSchemaValidator} defining validation rules via {@link MongoJsonSchema}. * @@ -46,26 +46,36 @@ class JsonSchemaValidator implements Validator { */ static JsonSchemaValidator of(MongoJsonSchema schema) { - Assert.notNull(schema, "Schema must not be null!"); + Assert.notNull(schema, "Schema must not be null"); return new JsonSchemaValidator(schema); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.validation.Validator#toDocument() - */ @Override public Document toDocument() { return schema.toDocument(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return SerializationUtils.serializeToJsonSafely(toDocument()); } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + JsonSchemaValidator that = (JsonSchemaValidator) o; + + return ObjectUtils.nullSafeEquals(schema, that.schema); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(schema); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java index 47ec6ff6f9..9261642c70 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/validation/Validator.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -48,7 +48,7 @@ public interface Validator { */ static Validator document(Document validationRules) { - Assert.notNull(validationRules, "ValidationRules must not be null!"); + Assert.notNull(validationRules, "ValidationRules must not be null"); return DocumentValidator.of(validationRules); } @@ -61,7 +61,7 @@ static Validator document(Document validationRules) { */ static Validator schema(MongoJsonSchema schema) { - Assert.notNull(schema, "Schema must not be null!"); + Assert.notNull(schema, "Schema must not be null"); return JsonSchemaValidator.of(schema); } @@ -75,7 +75,7 @@ static Validator schema(MongoJsonSchema schema) { */ static Validator criteria(CriteriaDefinition criteria) { - Assert.notNull(criteria, "Criteria must not be null!"); + Assert.notNull(criteria, "Criteria must not be null"); return CriteriaValidator.of(criteria); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java index a1ce0994ec..8f61be8659 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/AntPath.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -40,7 +40,7 @@ class AntPath { */ public AntPath(String path) { - Assert.notNull(path, "Path must not be null!"); + Assert.notNull(path, "Path must not be null"); this.path = path; } @@ -98,10 +98,6 @@ private static String quote(String s, int start, int end) { return Pattern.quote(s.substring(start, end)); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return path; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java index bd5cff2f78..54010a7c65 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsCriteria.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,7 +29,7 @@ public class GridFsCriteria extends Criteria { /** * Creates a new {@link GridFsCriteria} for the given key. * - * @param key + * @param key must not be {@literal null}. */ public GridFsCriteria(String key) { super(key); @@ -38,7 +38,7 @@ public GridFsCriteria(String key) { /** * Creates a {@link GridFsCriteria} for restrictions on the file's metadata. * - * @return + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereMetaData() { return new GridFsCriteria("metadata"); @@ -47,8 +47,8 @@ public static GridFsCriteria whereMetaData() { /** * Creates a {@link GridFsCriteria} for restrictions on a single file's metadata item. * - * @param metadataKey - * @return + * @param metadataKey can be {@literal null}. + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereMetaData(@Nullable String metadataKey) { @@ -59,7 +59,7 @@ public static GridFsCriteria whereMetaData(@Nullable String metadataKey) { /** * Creates a {@link GridFsCriteria} for restrictions on the file's name. * - * @return + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereFilename() { return new GridFsCriteria("filename"); @@ -68,7 +68,7 @@ public static GridFsCriteria whereFilename() { /** * Creates a {@link GridFsCriteria} for restrictions on the file's content type. * - * @return + * @return new instance of {@link GridFsCriteria}. */ public static GridFsCriteria whereContentType() { return new GridFsCriteria("metadata.".concat(GridFsResource.CONTENT_TYPE_FIELD)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsObject.java new file mode 100644 index 0000000000..f73c0c943f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsObject.java @@ -0,0 +1,158 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import org.bson.Document; +import org.springframework.lang.Nullable; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * A common interface when dealing with GridFs items using Spring Data. + * + * @author Christoph Strobl + * @since 3.0 + */ +public interface GridFsObject { + + /** + * The {@link GridFSFile#getId()} value converted into its simple java type.
          + * A {@link org.bson.BsonString} will be converted to plain {@link String}. + * + * @return can be {@literal null} depending on the implementation. + */ + @Nullable + ID getFileId(); + + /** + * The filename. + * + * @return + */ + String getFilename(); + + /** + * The actual file content. + * + * @return + * @throws IllegalStateException if the content cannot be obtained. + */ + CONTENT getContent(); + + /** + * Additional information like file metadata (eg. contentType). + * + * @return never {@literal null}. + */ + Options getOptions(); + + /** + * Additional, context relevant information. + * + * @author Christoph Strobl + */ + class Options { + + private final Document metadata; + private final int chunkSize; + + private Options(Document metadata, int chunkSize) { + + this.metadata = metadata; + this.chunkSize = chunkSize; + } + + /** + * Static factory to create empty options. + * + * @return new instance of {@link Options}. + */ + public static Options none() { + return new Options(new Document(), -1); + } + + /** + * Static factory method to create {@link Options} with given content type. + * + * @param contentType + * @return new instance of {@link Options}. + */ + public static Options typed(String contentType) { + return new Options(new Document("_contentType", contentType), -1); + } + + /** + * Static factory method to create {@link Options} by extracting information from the given {@link GridFSFile}. + * + * @param gridFSFile can be {@literal null}, returns {@link #none()} in that case. + * @return new instance of {@link Options}. + */ + public static Options from(@Nullable GridFSFile gridFSFile) { + return gridFSFile != null ? new Options(gridFSFile.getMetadata(), gridFSFile.getChunkSize()) : none(); + } + + /** + * Set the associated content type. + * + * @param contentType must not be {@literal null}. + * @return new instance of {@link Options}. + */ + public Options contentType(String contentType) { + + Options target = new Options(new Document(metadata), chunkSize); + target.metadata.put("_contentType", contentType); + return target; + } + + /** + * @param metadata + * @return new instance of {@link Options}. + */ + public Options metadata(Document metadata) { + return new Options(metadata, chunkSize); + } + + /** + * @param chunkSize the file chunk size to use. + * @return new instance of {@link Options}. + */ + public Options chunkSize(int chunkSize) { + return new Options(metadata, chunkSize); + } + + /** + * @return never {@literal null}. + */ + public Document getMetadata() { + return metadata; + } + + /** + * @return the chunk size to use. + */ + public int getChunkSize() { + return chunkSize; + } + + /** + * @return {@literal null} if not set. + */ + @Nullable + String getContentType() { + return (String) metadata.get("_contentType"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java index 2f767e1490..bf5a1d86e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,7 +22,10 @@ import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.gridfs.GridFsUpload.GridFsUploadBuilder; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; import com.mongodb.client.gridfs.GridFSFindIterable; @@ -34,6 +37,7 @@ * @author Thomas Darimont * @author Martin Baumgartner * @author Christoph Strobl + * @author Hartmut Lang */ public interface GridFsOperations extends ResourcePatternResolver { @@ -44,7 +48,9 @@ public interface GridFsOperations extends ResourcePatternResolver { * @param filename must not be {@literal null} or empty. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, String filename); + default ObjectId store(InputStream content, String filename) { + return store(content, filename, null, null); + } /** * Stores the given content into a file with the given name. @@ -53,7 +59,9 @@ public interface GridFsOperations extends ResourcePatternResolver { * @param metadata can be {@literal null}. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, @Nullable Object metadata); + default ObjectId store(InputStream content, @Nullable Object metadata) { + return store(content, null, metadata); + } /** * Stores the given content into a file with the given name. @@ -62,7 +70,9 @@ public interface GridFsOperations extends ResourcePatternResolver { * @param metadata can be {@literal null}. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, @Nullable Document metadata); + default ObjectId store(InputStream content, @Nullable Document metadata) { + return store(content, null, metadata); + } /** * Stores the given content into a file with the given name and content type. @@ -72,7 +82,9 @@ public interface GridFsOperations extends ResourcePatternResolver { * @param contentType can be {@literal null}. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType) { + return store(content, filename, contentType, null); + } /** * Stores the given content into a file with the given name using the given metadata. The metadata object will be @@ -83,7 +95,9 @@ public interface GridFsOperations extends ResourcePatternResolver { * @param metadata can be {@literal null}. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata) { + return store(content, filename, null, metadata); + } /** * Stores the given content into a file with the given name and content type using the given metadata. The metadata @@ -106,19 +120,48 @@ ObjectId store(InputStream content, @Nullable String filename, @Nullable String * @param metadata can be {@literal null}. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata) { + return store(content, filename, null, metadata); + } /** * Stores the given content into a file with the given name and content type using the given metadata. * * @param content must not be {@literal null}. * @param filename must not be {@literal null} or empty. - * @param contentType can be {@literal null}. + * @param contentType can be {@literal null}. If not empty, may override content type within {@literal metadata}. * @param metadata can be {@literal null}. * @return the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just created. */ - ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, - @Nullable Document metadata); + default ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, + @Nullable Document metadata) { + + GridFsUploadBuilder uploadBuilder = GridFsUpload.fromStream(content); + if (StringUtils.hasText(filename)) { + uploadBuilder.filename(filename); + } + if (!ObjectUtils.isEmpty(metadata)) { + uploadBuilder.metadata(metadata); + } + if (StringUtils.hasText(contentType)) { + uploadBuilder.contentType(contentType); + } + + return store(uploadBuilder.build()); + } + + /** + * Stores the given {@link GridFsObject}, likely a {@link GridFsUpload}, into into a file with given + * {@link GridFsObject#getFilename() name}. If the {@link GridFsObject#getFileId()} is set, the file will be stored + * with that id, otherwise the server auto creates a new id.
          + * + * @param upload the {@link GridFsObject} (most likely a {@link GridFsUpload}) to be stored. + * @param id type of the underlying {@link com.mongodb.client.gridfs.model.GridFSFile} + * @return the id of the stored file. Either an auto created value or {@link GridFsObject#getFileId()}, but never + * {@literal null}. + * @since 3.0 + */ + T store(GridFsObject upload); /** * Returns all files matching the given query. Note, that currently {@link Sort} criterias defined at the @@ -136,7 +179,7 @@ ObjectId store(InputStream content, @Nullable String filename, @Nullable String * case no file matches. * * @param query must not be {@literal null}. - * @return + * @return can be {@literal null}. */ @Nullable com.mongodb.client.gridfs.model.GridFSFile findOne(Query query); @@ -152,16 +195,26 @@ ObjectId store(InputStream content, @Nullable String filename, @Nullable String * Returns the {@link GridFsResource} with the given file name. * * @param filename must not be {@literal null}. - * @return the resource if it exists or {@literal null}. + * @return the resource. Use {@link org.springframework.core.io.Resource#exists()} to check if the returned + * {@link GridFsResource} is actually present. * @see ResourcePatternResolver#getResource(String) */ GridFsResource getResource(String filename); + /** + * Returns the {@link GridFsResource} for a {@link com.mongodb.client.gridfs.model.GridFSFile}. + * + * @param file must not be {@literal null}. + * @return the resource for the file. + * @since 2.1 + */ + GridFsResource getResource(com.mongodb.client.gridfs.model.GridFSFile file); + /** * Returns all {@link GridFsResource}s matching the given file name pattern. * * @param filenamePattern must not be {@literal null}. - * @return + * @return an empty array if none found. * @see ResourcePatternResolver#getResources(String) */ GridFsResource[] getResources(String filenamePattern); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperationsSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperationsSupport.java new file mode 100644 index 0000000000..b3d3771f3c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsOperationsSupport.java @@ -0,0 +1,104 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import java.util.Optional; + +import org.bson.Document; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.gridfs.model.GridFSUploadOptions; + +/** + * Base class offering common tasks like query mapping and {@link GridFSUploadOptions} computation to be shared across + * imperative and reactive implementations. + * + * @author Christoph Strobl + * @since 2.2 + */ +class GridFsOperationsSupport { + + private final QueryMapper queryMapper; + private final MongoConverter converter; + + /** + * @param converter must not be {@literal null}. + */ + GridFsOperationsSupport(MongoConverter converter) { + + Assert.notNull(converter, "MongoConverter must not be null"); + + this.converter = converter; + this.queryMapper = new QueryMapper(converter); + } + + /** + * @param query pass the given query though a {@link QueryMapper} to apply type conversion. + * @return never {@literal null}. + */ + protected Document getMappedQuery(Document query) { + return queryMapper.getMappedObject(query, Optional.empty()); + } + + /** + * Compute the {@link GridFSUploadOptions} to be used from the given {@literal contentType} and {@literal metadata} + * {@link Document}. + * + * @param contentType can be {@literal null}. + * @param metadata can be {@literal null} + * @return never {@literal null}. + */ + protected GridFSUploadOptions computeUploadOptionsFor(@Nullable String contentType, @Nullable Document metadata) { + + Document targetMetadata = new Document(); + + if (StringUtils.hasText(contentType)) { + targetMetadata.put(GridFsResource.CONTENT_TYPE_FIELD, contentType); + } + + if (metadata != null) { + targetMetadata.putAll(metadata); + } + + GridFSUploadOptions options = new GridFSUploadOptions(); + options.metadata(targetMetadata); + + return options; + } + + /** + * Convert a given {@literal value} into a {@link Document}. + * + * @param value can be {@literal null}. + * @return an empty {@link Document} if the source value is {@literal null}. + */ + protected Document toDocument(@Nullable Object value) { + + if (value instanceof Document document) { + return document; + } + + Document document = new Document(); + if (value != null) { + converter.write(value, document); + } + return document; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java index d96e34f450..0873432977 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsResource.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,13 +16,16 @@ package org.springframework.data.mongodb.gridfs; import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import org.springframework.core.io.InputStreamResource; import org.springframework.core.io.Resource; -import org.springframework.data.util.Optionals; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import com.mongodb.MongoGridFSException; import com.mongodb.client.gridfs.model.GridFSFile; @@ -35,11 +38,27 @@ * @author Hartmut Lang * @author Mark Paluch */ -public class GridFsResource extends InputStreamResource { +public class GridFsResource extends InputStreamResource implements GridFsObject { static final String CONTENT_TYPE_FIELD = "_contentType"; + private static final ByteArrayInputStream EMPTY_INPUT_STREAM = new ByteArrayInputStream(new byte[0]); - private final GridFSFile file; + private final @Nullable GridFSFile file; + private final String filename; + + /** + * Creates a new, absent {@link GridFsResource}. + * + * @param filename filename of the absent resource. + * @since 2.1 + */ + private GridFsResource(String filename) { + + super(EMPTY_INPUT_STREAM, String.format("GridFs resource [%s]", filename)); + + this.file = null; + this.filename = filename; + } /** * Creates a new {@link GridFsResource} from the given {@link GridFSFile}. @@ -58,44 +77,89 @@ public GridFsResource(GridFSFile file) { */ public GridFsResource(GridFSFile file, InputStream inputStream) { - super(inputStream); + super(inputStream, String.format("GridFs resource [%s]", file.getFilename())); + this.file = file; + this.filename = file.getFilename(); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.AbstractResource#contentLength() + /** + * Obtain an absent {@link GridFsResource}. + * + * @param filename filename of the absent resource, must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 */ + public static GridFsResource absent(String filename) { + + Assert.notNull(filename, "Filename must not be null"); + + return new GridFsResource(filename); + } + + @Override + public InputStream getInputStream() throws IOException, IllegalStateException { + + verifyExists(); + return super.getInputStream(); + } + @Override public long contentLength() throws IOException { - return file.getLength(); + + verifyExists(); + return getGridFSFile().getLength(); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.AbstractResource#getFilename() - */ @Override public String getFilename() throws IllegalStateException { - return file.getFilename(); + return this.filename; + } + + @Override + public boolean exists() { + return this.file != null; } - /* - * (non-Javadoc) - * @see org.springframework.core.io.AbstractResource#lastModified() - */ @Override public long lastModified() throws IOException { - return file.getUploadDate().getTime(); + + verifyExists(); + return getGridFSFile().getUploadDate().getTime(); + } + + @Override + public String getDescription() { + return String.format("GridFs resource [%s]", this.getFilename()); } /** * Returns the {@link Resource}'s id. * * @return never {@literal null}. + * @throws IllegalStateException if the file does not {@link #exists()}. */ public Object getId() { - return file.getId(); + + Assert.state(exists(), () -> String.format("%s does not exist.", getDescription())); + + return getGridFSFile().getId(); + } + + @Override + public Object getFileId() { + + Assert.state(exists(), () -> String.format("%s does not exist.", getDescription())); + return BsonUtils.toJavaType(getGridFSFile().getId()); + } + + /** + * @return the underlying {@link GridFSFile}. Can be {@literal null} if absent. + * @since 2.2 + */ + @Nullable + public GridFSFile getGridFSFile() { + return this.file; } /** @@ -103,15 +167,36 @@ public Object getId() { * * @return never {@literal null}. * @throws com.mongodb.MongoGridFSException in case no content type declared on {@link GridFSFile#getMetadata()} nor - * provided via {@link GridFSFile#getContentType()}. + * provided via {@link GridFSFile}. + * @throws IllegalStateException if the file does not {@link #exists()}. */ - @SuppressWarnings("deprecation") public String getContentType() { - return Optionals - .firstNonEmpty( - () -> Optional.ofNullable(file.getMetadata()).map(it -> it.get(CONTENT_TYPE_FIELD, String.class)), - () -> Optional.ofNullable(file.getContentType())) + Assert.state(exists(), () -> String.format("%s does not exist.", getDescription())); + + return Optional.ofNullable(getGridFSFile().getMetadata()).map(it -> it.get(CONTENT_TYPE_FIELD, String.class)) .orElseThrow(() -> new MongoGridFSException("No contentType data for this GridFS file")); } + + @Override + public InputStream getContent() { + + try { + return getInputStream(); + } catch (IOException e) { + throw new IllegalStateException("Failed to obtain input stream for " + filename, e); + } + } + + @Override + public Options getOptions() { + return Options.from(getGridFSFile()); + } + + private void verifyExists() throws FileNotFoundException { + + if (!exists()) { + throw new FileNotFoundException(String.format("%s does not exist.", getDescription())); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java index faf1f48b3d..8187c7dbc3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,15 +22,16 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.function.Supplier; -import org.bson.BsonObjectId; import org.bson.Document; import org.bson.types.ObjectId; import org.springframework.core.io.support.ResourcePatternResolver; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.MongoConverter; -import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.util.Lazy; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -51,190 +52,142 @@ * @author Martin Baumgartner * @author Christoph Strobl * @author Mark Paluch + * @author Hartmut Lang + * @author Niklas Helge Hanft + * @author Denis Zavedeev */ -public class GridFsTemplate implements GridFsOperations, ResourcePatternResolver { +public class GridFsTemplate extends GridFsOperationsSupport implements GridFsOperations, ResourcePatternResolver { - private final MongoDbFactory dbFactory; - - private final String bucket; - private final MongoConverter converter; - private final QueryMapper queryMapper; + private final Supplier bucketSupplier; /** - * Creates a new {@link GridFsTemplate} using the given {@link MongoDbFactory} and {@link MongoConverter}. + * Creates a new {@link GridFsTemplate} using the given {@link MongoDatabaseFactory} and {@link MongoConverter}. + *

          + * Note that the {@link GridFSBucket} is obtained only once from {@link MongoDatabaseFactory#getMongoDatabase() + * MongoDatabase}. Use {@link #GridFsTemplate(MongoConverter, Supplier)} if you want to use different buckets from the + * same Template instance. * * @param dbFactory must not be {@literal null}. * @param converter must not be {@literal null}. */ - public GridFsTemplate(MongoDbFactory dbFactory, MongoConverter converter) { + public GridFsTemplate(MongoDatabaseFactory dbFactory, MongoConverter converter) { this(dbFactory, converter, null); } /** - * Creates a new {@link GridFsTemplate} using the given {@link MongoDbFactory} and {@link MongoConverter}. + * Creates a new {@link GridFsTemplate} using the given {@link MongoDatabaseFactory} and {@link MongoConverter}. + *

          + * Note that the {@link GridFSBucket} is obtained only once from {@link MongoDatabaseFactory#getMongoDatabase() + * MongoDatabase}. Use {@link #GridFsTemplate(MongoConverter, Supplier)} if you want to use different buckets from the + * same Template instance. * * @param dbFactory must not be {@literal null}. * @param converter must not be {@literal null}. - * @param bucket + * @param bucket can be {@literal null}. */ - public GridFsTemplate(MongoDbFactory dbFactory, MongoConverter converter, String bucket) { - - Assert.notNull(dbFactory, "MongoDbFactory must not be null!"); - Assert.notNull(converter, "MongoConverter must not be null!"); - - this.dbFactory = dbFactory; - this.converter = converter; - this.bucket = bucket; - - this.queryMapper = new QueryMapper(converter); + public GridFsTemplate(MongoDatabaseFactory dbFactory, MongoConverter converter, @Nullable String bucket) { + this(converter, Lazy.of(() -> getGridFs(dbFactory, bucket))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String) + /** + * Creates a new {@link GridFsTemplate} using the given {@link MongoConverter} and {@link Supplier} providing the + * required {@link GridFSBucket}. + * + * @param converter must not be {@literal null}. + * @param gridFSBucket must not be {@literal null}. + * @since 4.2 */ - public ObjectId store(InputStream content, String filename) { - return store(content, filename, (Object) null); - } + public GridFsTemplate(MongoConverter converter, Supplier gridFSBucket) { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.Object) - */ - @Override - public ObjectId store(InputStream content, @Nullable Object metadata) { - return store(content, null, metadata); - } + super(converter); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, com.mongodb.Document) - */ - @Override - public ObjectId store(InputStream content, @Nullable Document metadata) { - return store(content, null, metadata); - } + Assert.notNull(gridFSBucket, "GridFSBucket supplier must not be null"); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.String) - */ - public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType) { - return store(content, filename, contentType, (Object) null); + this.bucketSupplier = gridFSBucket; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.Object) - */ - public ObjectId store(InputStream content, @Nullable String filename, @Nullable Object metadata) { - return store(content, filename, null, metadata); + @Override + public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata) { + return store(content, filename, contentType, toDocument(metadata)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, java.lang.String, java.lang.Object) - */ - public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, @Nullable Object metadata) { + @Override + @SuppressWarnings("unchecked") + public T store(GridFsObject upload) { - Document document = null; + GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(), + upload.getOptions().getMetadata()); - if (metadata != null) { - document = new Document(); - converter.write(metadata, document); + if (upload.getOptions().getChunkSize() > 0) { + uploadOptions.chunkSizeBytes(upload.getOptions().getChunkSize()); } - return store(content, filename, contentType, document); - } + if (upload.getFileId() == null) { + return (T) getGridFs().uploadFromStream(upload.getFilename(), upload.getContent(), uploadOptions); + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.Document) - */ - public ObjectId store(InputStream content, @Nullable String filename, @Nullable Document metadata) { - return this.store(content, filename, null, metadata); + getGridFs().uploadFromStream(BsonUtils.simpleToBsonValue(upload.getFileId()), upload.getFilename(), + upload.getContent(), uploadOptions); + return upload.getFileId(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#store(java.io.InputStream, java.lang.String, com.mongodb.Document) - */ - public ObjectId store(InputStream content, @Nullable String filename, @Nullable String contentType, @Nullable Document metadata) { + @Override + public GridFSFindIterable find(Query query) { - Assert.notNull(content, "InputStream must not be null!"); + Assert.notNull(query, "Query must not be null"); - GridFSUploadOptions options = new GridFSUploadOptions(); + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); - Document mData = new Document(); + GridFSFindIterable iterable = getGridFs().find(queryObject).sort(sortObject); - if (StringUtils.hasText(contentType)) { - mData.put(GridFsResource.CONTENT_TYPE_FIELD, contentType); + if (query.getSkip() > 0) { + iterable = iterable.skip(Math.toIntExact(query.getSkip())); } - if (metadata != null) { - mData.putAll(metadata); + if (query.getLimit() > 0) { + iterable = iterable.limit(query.getLimit()); } - options.metadata(mData); - - return getGridFs().uploadFromStream(filename, content, options); + return iterable; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#find(com.mongodb.Document) - */ - public GridFSFindIterable find(Query query) { - - Assert.notNull(query, "Query must not be null!"); - - Document queryObject = getMappedQuery(query.getQueryObject()); - Document sortObject = getMappedQuery(query.getSortObject()); - - return getGridFs().find(queryObject).sort(sortObject); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#findOne(com.mongodb.Document) - */ + @Override public GridFSFile findOne(Query query) { return find(query).first(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.gridfs.GridFsOperations#delete(org.springframework.data.mongodb.core.query.Query) - */ + @Override public void delete(Query query) { - for (GridFSFile x : find(query)) { - getGridFs().delete(((BsonObjectId) x.getId()).getValue()); + for (GridFSFile gridFSFile : find(query)) { + getGridFs().delete(gridFSFile.getId()); } } - /* - * (non-Javadoc) - * @see org.springframework.core.io.ResourceLoader#getClassLoader() - */ + @Override public ClassLoader getClassLoader() { - return dbFactory.getClass().getClassLoader(); + return null; } - /* - * (non-Javadoc) - * @see org.springframework.core.io.ResourceLoader#getResource(java.lang.String) - */ + @Override public GridFsResource getResource(String location) { - GridFSFile file = findOne(query(whereFilename().is(location))); - return file != null ? new GridFsResource(file, getGridFs().openDownloadStream(location)) : null; + return Optional.ofNullable(findOne(query(whereFilename().is(location)))) // + .map(this::getResource) // + .orElseGet(() -> GridFsResource.absent(location)); } - /* - * (non-Javadoc) - * @see org.springframework.core.io.support.ResourcePatternResolver#getResources(java.lang.String) - */ + @Override + public GridFsResource getResource(GridFSFile file) { + + Assert.notNull(file, "GridFSFile must not be null"); + + return new GridFsResource(file, getGridFs().openDownloadStream(file.getId())); + } + + @Override public GridFsResource[] getResources(String locationPattern) { if (!StringUtils.hasText(locationPattern)) { @@ -246,25 +199,27 @@ public GridFsResource[] getResources(String locationPattern) { if (path.isPattern()) { GridFSFindIterable files = find(query(whereFilename().regex(path.toRegex()))); - List resources = new ArrayList(); + List resources = new ArrayList<>(); for (GridFSFile file : files) { - resources.add(new GridFsResource(file, getGridFs().openDownloadStream(file.getFilename()))); + resources.add(getResource(file)); } - return resources.toArray(new GridFsResource[resources.size()]); + return resources.toArray(new GridFsResource[0]); } return new GridFsResource[] { getResource(locationPattern) }; } - private Document getMappedQuery(Document query) { - return queryMapper.getMappedObject(query, Optional.empty()); + private GridFSBucket getGridFs() { + return this.bucketSupplier.get(); } - private GridFSBucket getGridFs() { + private static GridFSBucket getGridFs(MongoDatabaseFactory dbFactory, @Nullable String bucket) { + + Assert.notNull(dbFactory, "MongoDatabaseFactory must not be null"); - MongoDatabase db = dbFactory.getDb(); + MongoDatabase db = dbFactory.getMongoDatabase(); return bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsUpload.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsUpload.java new file mode 100644 index 0000000000..9f8d9a47d2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/GridFsUpload.java @@ -0,0 +1,232 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import java.io.InputStream; +import java.util.function.Supplier; + +import org.bson.Document; +import org.bson.types.ObjectId; + +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Upload descriptor for a GridFS file upload. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class GridFsUpload implements GridFsObject { + + private final @Nullable ID id; + private final Lazy dataStream; + private final String filename; + private final Options options; + + private GridFsUpload(@Nullable ID id, Lazy dataStream, String filename, Options options) { + + Assert.notNull(dataStream, "Data Stream must not be null"); + Assert.notNull(filename, "Filename must not be null"); + Assert.notNull(options, "Options must not be null"); + + this.id = id; + this.dataStream = dataStream; + this.filename = filename; + this.options = options; + } + + /** + * The {@link GridFSFile#getId()} value converted into its simple java type.
          + * A {@link org.bson.BsonString} will be converted to plain {@link String}. + * + * @return can be {@literal null}. + * @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId() + */ + @Override + @Nullable + public ID getFileId() { + return id; + } + + @Override + public String getFilename() { + return filename; + } + + @Override + public InputStream getContent() { + return dataStream.orElse(InputStream.nullInputStream()); + } + + @Override + public Options getOptions() { + return options; + } + + /** + * Create a new instance of {@link GridFsUpload} for the given {@link InputStream}. + * + * @param stream must not be {@literal null}. + * @return new instance of {@link GridFsUpload}. + */ + public static GridFsUploadBuilder fromStream(InputStream stream) { + return new GridFsUploadBuilder().content(stream); + } + + /** + * Builder to create {@link GridFsUpload} in a fluent way. + * + * @param the target id type. + */ + public static class GridFsUploadBuilder { + + private Object id; + private Lazy dataStream; + private String filename; + private Options options = Options.none(); + + private GridFsUploadBuilder() {} + + /** + * Define the content of the file to upload. + * + * @param stream the upload content. + * @return this. + */ + public GridFsUploadBuilder content(InputStream stream) { + + Assert.notNull(stream, "InputStream must not be null"); + + return content(() -> stream); + } + + /** + * Define the content of the file to upload. + * + * @param stream the upload content. + * @return this. + */ + public GridFsUploadBuilder content(Supplier stream) { + + Assert.notNull(stream, "InputStream Supplier must not be null"); + + this.dataStream = Lazy.of(stream); + return this; + } + + /** + * Set the id to use. + * + * @param id the id to save the content to. + * @param + * @return this. + */ + public GridFsUploadBuilder id(T1 id) { + + this.id = id; + return (GridFsUploadBuilder) this; + } + + /** + * Set the filename. + * + * @param filename the filename to use. + * @return this. + */ + public GridFsUploadBuilder filename(String filename) { + + this.filename = filename; + return this; + } + + /** + * Set additional file information. + * + * @param options must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder options(Options options) { + + Assert.notNull(options, "Options must not be null"); + + this.options = options; + return this; + } + + /** + * Set the file metadata. + * + * @param metadata must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder metadata(Document metadata) { + + this.options = this.options.metadata(metadata); + return this; + } + + /** + * Set the upload chunk size in bytes. + * + * @param chunkSize use negative number for default. + * @return this. + */ + public GridFsUploadBuilder chunkSize(int chunkSize) { + + this.options = this.options.chunkSize(chunkSize); + return this; + } + + /** + * Set id, filename, metadata and chunk size from given file. + * + * @param gridFSFile must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder gridFsFile(GridFSFile gridFSFile) { + + Assert.notNull(gridFSFile, "GridFSFile must not be null"); + + this.id = gridFSFile.getId(); + this.filename = gridFSFile.getFilename(); + this.options = this.options.metadata(gridFSFile.getMetadata()); + this.options = this.options.chunkSize(gridFSFile.getChunkSize()); + + return this; + } + + /** + * Set the content type. + * + * @param contentType must not be {@literal null}. + * @return this. + */ + public GridFsUploadBuilder contentType(String contentType) { + + this.options = this.options.contentType(contentType); + return this; + } + + public GridFsUpload build() { + return new GridFsUpload(id, dataStream, filename, options); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsOperations.java new file mode 100644 index 0000000000..9ee47e0bb9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsOperations.java @@ -0,0 +1,236 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.gridfs.ReactiveGridFsUpload.ReactiveGridFsUploadBuilder; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Collection of operations to store and read files from MongoDB GridFS using reactive infrastructure. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public interface ReactiveGridFsOperations { + + /** + * Stores the given content into a file with the given name. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, String filename) { + return store(content, filename, (Object) null); + } + + /** + * Stores the given content into a file applying the given metadata. + * + * @param content must not be {@literal null}. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable Object metadata) { + return store(content, null, metadata); + } + + /** + * Stores the given content into a file applying the given metadata. + * + * @param content must not be {@literal null}. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable Document metadata) { + return store(content, null, metadata); + } + + /** + * Stores the given content into a file with the given name and content type. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param contentType can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable String contentType) { + return store(content, filename, contentType, (Object) null); + } + + /** + * Stores the given content into a file with the given name using the given metadata. The metadata object will be + * marshalled before writing. + * + * @param content must not be {@literal null}. + * @param filename can be {@literal null} or empty. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable Object metadata) { + return store(content, filename, null, metadata); + } + + /** + * Stores the given content into a file with the given name and content type using the given metadata. The metadata + * object will be marshalled before writing. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param contentType can be {@literal null}. + * @param metadata can be {@literal null} + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + Mono store(Publisher content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata); + + /** + * Stores the given content into a file with the given name using the given metadata. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable Document metadata) { + return store(content, filename, null, metadata); + } + + /** + * Stores the given content into a file with the given name and content type using the given metadata. + * + * @param content must not be {@literal null}. + * @param filename must not be {@literal null} or empty. + * @param contentType can be {@literal null}. If not empty, may override content type within {@literal metadata}. + * @param metadata can be {@literal null}. + * @return a {@link Mono} emitting the {@link ObjectId} of the {@link com.mongodb.client.gridfs.model.GridFSFile} just + * created. + */ + default Mono store(Publisher content, @Nullable String filename, @Nullable String contentType, + @Nullable Document metadata) { + + ReactiveGridFsUploadBuilder uploadBuilder = ReactiveGridFsUpload.fromPublisher(content); + + if (StringUtils.hasText(filename)) { + uploadBuilder.filename(filename); + } + if (!ObjectUtils.isEmpty(metadata)) { + uploadBuilder.metadata(metadata); + } + if (StringUtils.hasText(contentType)) { + uploadBuilder.contentType(contentType); + } + + return store(uploadBuilder.build()); + } + + /** + * Stores the given {@link GridFsObject}, likely a {@link GridFsUpload}, into into a file with given + * {@link GridFsObject#getFilename() name}. If the {@link GridFsObject#getFileId()} is set, the file will be stored + * with that id, otherwise the server auto creates a new id.
          + * + * @param upload the {@link GridFsObject} (most likely a {@link GridFsUpload}) to be stored. + * @param id type of the underlying {@link com.mongodb.client.gridfs.model.GridFSFile} + * @return {@link Mono} emitting the id of the stored file which is either an auto created value or + * {@link GridFsObject#getFileId()}. + * @since 3.0 + */ + Mono store(GridFsObject> upload); + + /** + * Returns a {@link Flux} emitting all files matching the given query.
          + * NOTE: Currently {@link Sort} criteria defined at the {@link Query} will not be regarded as MongoDB + * does not support ordering for GridFS file access. + * + * @see MongoDB Jira: JAVA-431 + * @param query must not be {@literal null}. + * @return {@link Flux#empty()} if no mach found. + */ + Flux find(Query query); + + /** + * Returns a {@link Mono} emitting a single {@link com.mongodb.client.gridfs.model.GridFSFile} matching the given + * query or {@link Mono#empty()} in case no file matches.
          + * NOTE: If more than one file matches the given query the resulting {@link Mono} emits an error. If + * you want to obtain the first found file use {@link #findFirst(Query)}. + * + * @param query must not be {@literal null}. + * @return {@link Mono#empty()} if not match found. + */ + Mono findOne(Query query); + + /** + * Returns a {@link Mono} emitting the frist {@link com.mongodb.client.gridfs.model.GridFSFile} matching the given + * query or {@link Mono#empty()} in case no file matches. + * + * @param query must not be {@literal null}. + * @return {@link Mono#empty()} if not match found. + */ + Mono findFirst(Query query); + + /** + * Deletes all files matching the given {@link Query}. + * + * @param query must not be {@literal null}. + * @return a {@link Mono} signalling operation completion. + */ + Mono delete(Query query); + + /** + * Returns a {@link Mono} emitting the {@link ReactiveGridFsResource} with the given file name. + * + * @param filename must not be {@literal null}. + * @return {@link Mono#empty()} if no match found. + */ + Mono getResource(String filename); + + /** + * Returns a {@link Mono} emitting the {@link ReactiveGridFsResource} for a {@link GridFSFile}. + * + * @param file must not be {@literal null}. + * @return {@link Mono#empty()} if no match found. + */ + Mono getResource(GridFSFile file); + + /** + * Returns a {@link Flux} emitting all {@link ReactiveGridFsResource}s matching the given file name pattern. + * + * @param filenamePattern must not be {@literal null}. + * @return {@link Flux#empty()} if no match found. + */ + Flux getResources(String filenamePattern); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResource.java new file mode 100644 index 0000000000..aec7cadef1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResource.java @@ -0,0 +1,221 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.io.InputStream; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.bson.BsonValue; +import org.reactivestreams.Publisher; +import org.springframework.core.io.Resource; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.core.io.buffer.DataBufferUtils; +import org.springframework.core.io.buffer.DefaultDataBufferFactory; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.reactivestreams.client.gridfs.GridFSDownloadPublisher; + +/** + * Reactive {@link GridFSFile} based {@link Resource} implementation. Note that the {@link #getDownloadStream() content} + * can be consumed only once. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public class ReactiveGridFsResource implements GridFsObject> { + + private final AtomicBoolean consumed = new AtomicBoolean(false); + + private final @Nullable Object id; + private final Options options; + private final String filename; + private final @Nullable GridFSDownloadPublisher downloadPublisher; + private final DataBufferFactory dataBufferFactory; + + /** + * Creates a new, absent {@link ReactiveGridFsResource}. + * + * @param filename filename of the absent resource. + * @param downloadPublisher + */ + public ReactiveGridFsResource(String filename, @Nullable GridFSDownloadPublisher downloadPublisher) { + this(null, filename, Options.none(), downloadPublisher); + } + + /** + * Creates a new, absent {@link ReactiveGridFsResource}. + * + * @param id + * @param filename filename of the absent resource. + * @param options + * @param downloadPublisher + * @since 3.0 + */ + public ReactiveGridFsResource(@Nullable Object id, String filename, Options options, + @Nullable GridFSDownloadPublisher downloadPublisher) { + this(id, filename, options, downloadPublisher, new DefaultDataBufferFactory()); + } + + ReactiveGridFsResource(GridFSFile file, @Nullable GridFSDownloadPublisher downloadPublisher, DataBufferFactory dataBufferFactory) { + this(file.getId(), file.getFilename(), Options.from(file), downloadPublisher, dataBufferFactory); + } + + /** + * Creates a new, absent {@link ReactiveGridFsResource}. + * + * @param id + * @param filename filename of the absent resource. + * @param options + * @param downloadPublisher + * @param dataBufferFactory + * @since 3.0 + */ + ReactiveGridFsResource(@Nullable Object id, String filename, Options options, + @Nullable GridFSDownloadPublisher downloadPublisher, DataBufferFactory dataBufferFactory) { + + this.id = id; + this.filename = filename; + this.options = options; + this.downloadPublisher = downloadPublisher; + this.dataBufferFactory = dataBufferFactory; + } + + /** + * Obtain an absent {@link ReactiveGridFsResource}. + * + * @param filename filename of the absent resource, must not be {@literal null}. + * @return never {@literal null}. + * @since 2.1 + */ + public static ReactiveGridFsResource absent(String filename) { + + Assert.notNull(filename, "Filename must not be null"); + return new ReactiveGridFsResource(filename, null); + } + + @Override + public Object getFileId() { + return id instanceof BsonValue bsonValue ? BsonUtils.toJavaType(bsonValue) : id; + } + + /** + * @see org.springframework.core.io.AbstractResource#getFilename() + */ + public String getFilename() throws IllegalStateException { + return this.filename; + } + + /** + * @return the underlying {@link GridFSFile}. Can be {@literal null} if absent. + * @since 2.2 + */ + public Mono getGridFSFile() { + return downloadPublisher != null ? Mono.from(downloadPublisher.getGridFSFile()) : Mono.empty(); + } + + /** + * Obtain the data as {@link InputStream}.
          + * NOTE: Buffers data in memory. Use {@link #getDownloadStream()} for large files. + * + * @throws IllegalStateException if the underlying {@link Publisher} has already been consumed. + * @see org.springframework.core.io.InputStreamResource#getInputStream() + * @see #getDownloadStream() + * @see DataBufferUtils#join(Publisher) + * @since 3.0 + */ + public Mono getInputStream() throws IllegalStateException { + + return getDownloadStream() // + .transform(DataBufferUtils::join) // + .as(Mono::from) // + .map(DataBuffer::asInputStream); + } + + /** + * Obtain the download stream emitting chunks of data as they come in.
          + * + * @return {@link Flux#empty()} if the file does not exist. + * @throws IllegalStateException if the underlying {@link Publisher} has already been consumed. + * @see org.springframework.core.io.InputStreamResource#getInputStream() + * @see #getDownloadStream() + * @see DataBufferUtils#join(Publisher) + * @since 3.0 + */ + public Flux getDownloadStream() { + + if (downloadPublisher == null) { + return Flux.empty(); + } + + return createDownloadStream(downloadPublisher); + } + + @Override + public Flux getContent() { + return getDownloadStream(); + } + + @Override + public Options getOptions() { + return options; + } + + /** + * Obtain the download stream emitting chunks of data with given {@code chunkSize} as they come in. + * + * @param chunkSize the preferred number of bytes per emitted {@link DataBuffer}. + * @return {@link Flux#empty()} if the file does not exist. + * @throws IllegalStateException if the underlying {@link Publisher} has already been consumed. + * @see org.springframework.core.io.InputStreamResource#getInputStream() + * @see #getDownloadStream() + * @see DataBufferUtils#join(Publisher) + * @since 3.0 + */ + public Flux getDownloadStream(int chunkSize) { + + if (downloadPublisher == null) { + return Flux.empty(); + } + + return createDownloadStream(downloadPublisher.bufferSizeBytes(chunkSize)); + } + + private Flux createDownloadStream(GridFSDownloadPublisher publisher) { + + return Flux.from(publisher) // + .map(dataBufferFactory::wrap) // + .doOnSubscribe(it -> this.verifyStreamStillAvailable()); + } + + public boolean exists() { + return downloadPublisher != null; + } + + private void verifyStreamStillAvailable() { + + if (!consumed.compareAndSet(false, true)) { + throw new IllegalStateException("Stream already consumed."); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplate.java new file mode 100644 index 0000000000..305e55aee4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplate.java @@ -0,0 +1,377 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.nio.ByteBuffer; + +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.core.io.buffer.DefaultDataBufferFactory; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.client.gridfs.model.GridFSUploadOptions; +import com.mongodb.reactivestreams.client.gridfs.GridFSBucket; +import com.mongodb.reactivestreams.client.gridfs.GridFSBuckets; +import com.mongodb.reactivestreams.client.gridfs.GridFSFindPublisher; +import com.mongodb.reactivestreams.client.gridfs.GridFSUploadPublisher; + +/** + * {@link ReactiveGridFsOperations} implementation to store content into MongoDB GridFS. Uses by default + * {@link DefaultDataBufferFactory} to create {@link DataBuffer buffers}. + * + * @author Mark Paluch + * @author Nick Stolwijk + * @author Denis Zavedeev + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.2 + */ +public class ReactiveGridFsTemplate extends GridFsOperationsSupport implements ReactiveGridFsOperations { + + private final DataBufferFactory dataBufferFactory; + private final Mono bucketSupplier; + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link ReactiveMongoDatabaseFactory} and + * {@link MongoConverter}. + *

          + * Note that the {@link GridFSBucket} is obtained only once from + * {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use + * {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from + * the same Template instance. + * + * @param dbFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + */ + public ReactiveGridFsTemplate(ReactiveMongoDatabaseFactory dbFactory, MongoConverter converter) { + this(dbFactory, converter, null); + } + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link ReactiveMongoDatabaseFactory} and + * {@link MongoConverter}. + *

          + * Note that the {@link GridFSBucket} is obtained only once from + * {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use + * {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from + * the same Template instance. + * + * @param dbFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param bucket can be {@literal null}. + */ + public ReactiveGridFsTemplate(ReactiveMongoDatabaseFactory dbFactory, MongoConverter converter, + @Nullable String bucket) { + this(new DefaultDataBufferFactory(), dbFactory, converter, bucket); + } + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link DataBufferFactory}, + * {@link ReactiveMongoDatabaseFactory} and {@link MongoConverter}. + *

          + * Note that the {@link GridFSBucket} is obtained only once from + * {@link ReactiveMongoDatabaseFactory#getMongoDatabase() MongoDatabase}. Use + * {@link #ReactiveGridFsTemplate(MongoConverter, Mono, DataBufferFactory)} if you want to use different buckets from + * the same Template instance. + * + * @param dataBufferFactory must not be {@literal null}. + * @param dbFactory must not be {@literal null}. + * @param converter must not be {@literal null}. + * @param bucket can be {@literal null}. + */ + public ReactiveGridFsTemplate(DataBufferFactory dataBufferFactory, ReactiveMongoDatabaseFactory dbFactory, + MongoConverter converter, @Nullable String bucket) { + this(converter, Mono.defer(Lazy.of(() -> doGetBucket(dbFactory, bucket))), dataBufferFactory); + } + + /** + * Creates a new {@link ReactiveGridFsTemplate} using the given {@link MongoConverter}, {@link Mono} emitting a + * {@link ReactiveMongoDatabaseFactory} and {@link DataBufferFactory}. + * + * @param converter must not be {@literal null}. + * @param gridFSBucket must not be {@literal null}. + * @param dataBufferFactory must not be {@literal null}. + * @since 4.2 + */ + public ReactiveGridFsTemplate(MongoConverter converter, Mono gridFSBucket, + DataBufferFactory dataBufferFactory) { + + super(converter); + + Assert.notNull(gridFSBucket, "GridFSBucket Mono must not be null"); + Assert.notNull(dataBufferFactory, "DataBufferFactory must not be null"); + + this.bucketSupplier = gridFSBucket; + this.dataBufferFactory = dataBufferFactory; + } + + @Override + public Mono store(Publisher content, @Nullable String filename, @Nullable String contentType, + @Nullable Object metadata) { + return store(content, filename, contentType, toDocument(metadata)); + } + + @Override + @SuppressWarnings("unchecked") + public Mono store(GridFsObject> upload) { + + GridFSUploadOptions uploadOptions = computeUploadOptionsFor(upload.getOptions().getContentType(), + upload.getOptions().getMetadata()); + + if (upload.getOptions().getChunkSize() > 0) { + uploadOptions.chunkSizeBytes(upload.getOptions().getChunkSize()); + } + + String filename = upload.getFilename(); + Flux source = Flux.from(upload.getContent()).map(DataBuffer::toByteBuffer); + T fileId = upload.getFileId(); + + if (fileId == null) { + return (Mono) createMono(new AutoIdCreatingUploadCallback(filename, source, uploadOptions)); + } + + UploadCallback callback = new UploadCallback(BsonUtils.simpleToBsonValue(fileId), filename, source, uploadOptions); + return createMono(callback).thenReturn(fileId); + } + + @Override + public Flux find(Query query) { + + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); + + return createFlux(new FindCallback(query, queryObject, sortObject)); + } + + @Override + public Mono findOne(Query query) { + + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); + + return createFlux(new FindLimitCallback(query, queryObject, sortObject, 2)) // + .collectList() // + .handle((files, sink) -> { + + if (files.size() == 1) { + sink.next(files.get(0)); + return; + } + + if (files.size() > 1) { + sink.error(new IncorrectResultSizeDataAccessException( + "Query " + SerializationUtils.serializeToJsonSafely(query) + " returned non unique result.", 1)); + } + }); + } + + @Override + public Mono findFirst(Query query) { + + Document queryObject = getMappedQuery(query.getQueryObject()); + Document sortObject = getMappedQuery(query.getSortObject()); + + return createFlux(new FindLimitCallback(query, queryObject, sortObject, 1)).next(); + } + + @Override + public Mono delete(Query query) { + return find(query).flatMap(it -> createMono(new DeleteCallback(it.getId()))).then(); + } + + @Override + public Mono getResource(String location) { + + Assert.notNull(location, "Filename must not be null"); + + return findOne(query(whereFilename().is(location))).flatMap(this::getResource) + .defaultIfEmpty(ReactiveGridFsResource.absent(location)); + } + + @Override + public Mono getResource(GridFSFile file) { + + Assert.notNull(file, "GridFSFile must not be null"); + + return doGetBucket() + .map(it -> new ReactiveGridFsResource(file, it.downloadToPublisher(file.getId()), dataBufferFactory)); + } + + @Override + public Flux getResources(String locationPattern) { + + if (!StringUtils.hasText(locationPattern)) { + return Flux.empty(); + } + + AntPath path = new AntPath(locationPattern); + + if (path.isPattern()) { + + Flux files = find(query(whereFilename().regex(path.toRegex()))); + return files.flatMap(this::getResource); + } + + return getResource(locationPattern).flux(); + } + + /** + * Create a reusable Mono for a {@link ReactiveBucketCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Mono} wrapping the {@link ReactiveBucketCallback}. + */ + public Mono createMono(ReactiveBucketCallback callback) { + + Assert.notNull(callback, "ReactiveBucketCallback must not be null"); + + return doGetBucket().flatMap(bucket -> Mono.from(callback.doInBucket(bucket))); + } + + /** + * Create a reusable Flux for a {@link ReactiveBucketCallback}. It's up to the developer to choose to obtain a new + * {@link Flux} or to reuse the {@link Flux}. + * + * @param callback must not be {@literal null} + * @return a {@link Flux} wrapping the {@link ReactiveBucketCallback}. + */ + public Flux createFlux(ReactiveBucketCallback callback) { + + Assert.notNull(callback, "ReactiveBucketCallback must not be null"); + + return doGetBucket().flatMapMany(callback::doInBucket); + } + + protected Mono doGetBucket() { + return bucketSupplier; + } + + private static Mono doGetBucket(ReactiveMongoDatabaseFactory dbFactory, @Nullable String bucket) { + + Assert.notNull(dbFactory, "ReactiveMongoDatabaseFactory must not be null"); + + return dbFactory.getMongoDatabase() + .map(db -> bucket == null ? GridFSBuckets.create(db) : GridFSBuckets.create(db, bucket)); + } + + /** + * @param + * @author Mathieu Ouellet + * @since 3.0 + */ + interface ReactiveBucketCallback { + Publisher doInBucket(GridFSBucket bucket); + } + + private static class FindCallback implements ReactiveBucketCallback { + + private final Query query; + private final Document queryObject; + private final Document sortObject; + + public FindCallback(Query query, Document queryObject, Document sortObject) { + + this.query = query; + this.queryObject = queryObject; + this.sortObject = sortObject; + } + + @Override + public GridFSFindPublisher doInBucket(GridFSBucket bucket) { + + GridFSFindPublisher findPublisher = bucket.find(queryObject).sort(sortObject); + + if (query.getLimit() > 0) { + findPublisher = findPublisher.limit(query.getLimit()); + } + + if (query.getSkip() > 0) { + findPublisher = findPublisher.skip(Math.toIntExact(query.getSkip())); + } + + Integer cursorBatchSize = query.getMeta().getCursorBatchSize(); + if (cursorBatchSize != null) { + findPublisher = findPublisher.batchSize(cursorBatchSize); + } + + return findPublisher; + } + } + + private static class FindLimitCallback extends FindCallback { + + private final int limit; + + public FindLimitCallback(Query query, Document queryObject, Document sortObject, int limit) { + + super(query, queryObject, sortObject); + this.limit = limit; + } + + @Override + public GridFSFindPublisher doInBucket(GridFSBucket bucket) { + return super.doInBucket(bucket).limit(limit); + } + } + + private record UploadCallback(BsonValue fileId, String filename, Publisher source, + GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback { + + @Override + public GridFSUploadPublisher doInBucket(GridFSBucket bucket) { + return bucket.uploadFromPublisher(fileId, filename, source, uploadOptions); + } + } + + private record AutoIdCreatingUploadCallback(String filename, Publisher source, + GridFSUploadOptions uploadOptions) implements ReactiveBucketCallback { + + @Override + public GridFSUploadPublisher doInBucket(GridFSBucket bucket) { + return bucket.uploadFromPublisher(filename, source, uploadOptions); + } + } + + private record DeleteCallback(BsonValue id) implements ReactiveBucketCallback { + + @Override + public Publisher doInBucket(GridFSBucket bucket) { + return bucket.delete(id); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsUpload.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsUpload.java new file mode 100644 index 0000000000..2f16c3b06e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsUpload.java @@ -0,0 +1,213 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.client.gridfs.model.GridFSFile; + +/** + * Upload descriptor for a GridFS file upload. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class ReactiveGridFsUpload implements GridFsObject> { + + private final @Nullable ID id; + private final Publisher dataStream; + private final String filename; + private final Options options; + + private ReactiveGridFsUpload(@Nullable ID id, Publisher dataStream, String filename, Options options) { + + Assert.notNull(dataStream, "Data Stream must not be null"); + Assert.notNull(filename, "Filename must not be null"); + Assert.notNull(options, "Options must not be null"); + + this.id = id; + this.dataStream = dataStream; + this.filename = filename; + this.options = options; + } + + /** + * The {@link GridFSFile#getId()} value converted into its simple java type.
          + * A {@link org.bson.BsonString} will be converted to plain {@link String}. + * + * @return can be {@literal null}. + * @see org.springframework.data.mongodb.gridfs.GridFsObject#getFileId() + */ + @Override + @Nullable + public ID getFileId() { + return id; + } + + @Override + public String getFilename() { + return filename; + } + + @Override + public Publisher getContent() { + return dataStream; + } + + @Override + public Options getOptions() { + return options; + } + + /** + * Create a new instance of {@link ReactiveGridFsUpload} for the given {@link Publisher}. + * + * @param source must not be {@literal null}. + * @return new instance of {@link GridFsUpload}. + */ + public static ReactiveGridFsUploadBuilder fromPublisher(Publisher source) { + return new ReactiveGridFsUploadBuilder().content(source); + } + + /** + * Builder to create {@link ReactiveGridFsUpload} in a fluent way. + * + * @param the target id type. + */ + public static class ReactiveGridFsUploadBuilder { + + private @Nullable Object id; + private Publisher dataStream; + private String filename; + private Options options = Options.none(); + + private ReactiveGridFsUploadBuilder() {} + + /** + * Define the content of the file to upload. + * + * @param source the upload content. + * @return this. + */ + public ReactiveGridFsUploadBuilder content(Publisher source) { + this.dataStream = source; + return this; + } + + /** + * Set the id to use. + * + * @param id the id to save the content to. + * @param + * @return this. + */ + public ReactiveGridFsUploadBuilder id(T1 id) { + + this.id = id; + return (ReactiveGridFsUploadBuilder) this; + } + + /** + * Set the filename. + * + * @param filename the filename to use. + * @return this. + */ + public ReactiveGridFsUploadBuilder filename(String filename) { + + this.filename = filename; + return this; + } + + /** + * Set additional file information. + * + * @param options must not be {@literal null}. + * @return this. + */ + public ReactiveGridFsUploadBuilder options(Options options) { + + Assert.notNull(options, "Options must not be null"); + + this.options = options; + return this; + } + + /** + * Set the file metadata. + * + * @param metadata must not be {@literal null}. + * @return + */ + public ReactiveGridFsUploadBuilder metadata(Document metadata) { + + this.options = this.options.metadata(metadata); + return this; + } + + /** + * Set the upload chunk size in bytes. + * + * @param chunkSize use negative number for default. + * @return + */ + public ReactiveGridFsUploadBuilder chunkSize(int chunkSize) { + + this.options = this.options.chunkSize(chunkSize); + return this; + } + + /** + * Set id, filename, metadata and chunk size from given file. + * + * @param gridFSFile must not be {@literal null}. + * @return this. + */ + public ReactiveGridFsUploadBuilder gridFsFile(GridFSFile gridFSFile) { + + Assert.notNull(gridFSFile, "GridFSFile must not be null"); + + this.id = gridFSFile.getId(); + this.filename = gridFSFile.getFilename(); + this.options = this.options.metadata(gridFSFile.getMetadata()); + this.options = this.options.chunkSize(gridFSFile.getChunkSize()); + + return this; + } + + /** + * Set the content type. + * + * @param contentType must not be {@literal null}. + * @return this. + */ + public ReactiveGridFsUploadBuilder contentType(String contentType) { + + this.options = this.options.contentType(contentType); + return this; + } + + public ReactiveGridFsUpload build() { + return new ReactiveGridFsUpload(id, dataStream, filename, options); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java index 7adc3fbe48..5ffe37a4a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AbstractMonitor.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,15 @@ */ package org.springframework.data.mongodb.monitor; +import java.util.List; +import java.util.stream.Collectors; + import org.bson.Document; -import com.mongodb.MongoClient; +import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; +import com.mongodb.connection.ServerDescription; /** * Base class to encapsulate common configuration settings when connecting to a database @@ -26,11 +31,17 @@ * @author Mark Pollack * @author Oliver Gierke * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) public abstract class AbstractMonitor { private final MongoClient mongoClient; + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ protected AbstractMonitor(MongoClient mongoClient) { this.mongoClient = mongoClient; } @@ -46,4 +57,10 @@ public MongoDatabase getDb(String databaseName) { protected MongoClient getMongoClient() { return mongoClient; } + + protected List hosts() { + + return mongoClient.getClusterDescription().getServerDescriptions().stream().map(ServerDescription::getAddress) + .collect(Collectors.toList()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java index 7e68298102..15666fa4d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/AssertMetrics.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,27 @@ */ package org.springframework.data.mongodb.monitor; -import com.mongodb.MongoClient; import org.bson.Document; import org.springframework.jmx.export.annotation.ManagedMetric; import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; -import com.mongodb.DBObject; -import com.mongodb.Mongo; +import com.mongodb.client.MongoClient; /** * JMX Metrics for assertions * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Assertion Metrics") public class AssertMetrics extends AbstractMonitor { + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ public AssertMetrics(MongoClient mongoClient) { super(mongoClient); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java index 9a7bdb8c4d..2ceb75a4f8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BackgroundFlushingMetrics.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,16 +22,22 @@ import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * JMX Metrics for Background Flushing * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Background Flushing Metrics") public class BackgroundFlushingMetrics extends AbstractMonitor { + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ public BackgroundFlushingMetrics(MongoClient mongoClient) { super(mongoClient); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java index 5787488a09..671d017e05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/BtreeIndexCounters.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +20,22 @@ import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * JMX Metrics for B-tree index counters * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Btree Metrics") public class BtreeIndexCounters extends AbstractMonitor { + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ public BtreeIndexCounters(MongoClient mongoClient) { super(mongoClient); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java index e31bd63a27..0d0eb84b35 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ConnectionMetrics.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +20,22 @@ import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * JMX Metrics for Connections * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Connection metrics") public class ConnectionMetrics extends AbstractMonitor { + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ public ConnectionMetrics(MongoClient mongoClient) { super(mongoClient); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java index 2cb78a56d0..6997f5fba8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/GlobalLockMetrics.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,16 +21,22 @@ import org.springframework.jmx.support.MetricType; import com.mongodb.DBObject; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * JMX Metrics for Global Locks * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Global Lock Metrics") public class GlobalLockMetrics extends AbstractMonitor { + /** + * @param mongoClient must not be {@literal null}. + * @since 2.2 + */ public GlobalLockMetrics(MongoClient mongoClient) { super(mongoClient); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java index 2db2b6f831..4dbdebb26f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/MemoryMetrics.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +20,22 @@ import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * JMX Metrics for Memory * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Memory Metrics") public class MemoryMetrics extends AbstractMonitor { + /** + * @param mongoClient + * @since 2.2 + */ public MemoryMetrics(MongoClient mongoClient) { super(mongoClient); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java index ac92fb4f66..1624501490 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/OperationCounters.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,17 +19,24 @@ import org.springframework.jmx.export.annotation.ManagedMetric; import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; +import org.springframework.util.NumberUtils; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * JMX Metrics for Operation counters * * @author Mark Pollack + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Operation Counters") public class OperationCounters extends AbstractMonitor { + /** + * @param mongoClient + * @since 2.2 + */ public OperationCounters(MongoClient mongoClient) { super(mongoClient); } @@ -66,6 +73,6 @@ public int getCommandCount() { private int getOpCounter(String key) { Document opCounters = (Document) getServerStatus().get("opcounters"); - return (Integer) opCounters.get(key); + return NumberUtils.convertNumberToTargetClass((Number) opCounters.get(key), Integer.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java index a6e576287f..3aedf3f29f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/ServerInfo.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,8 +21,9 @@ import org.springframework.jmx.export.annotation.ManagedOperation; import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.jmx.support.MetricType; +import org.springframework.util.StringUtils; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Expose basic server information via JMX @@ -30,11 +31,17 @@ * @author Mark Pollack * @author Thomas Darimont * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Server Information") public class ServerInfo extends AbstractMonitor { - public ServerInfo(MongoClient mongoClient) { + /** + * @param mongoClient + * @since 2.2 + */ + protected ServerInfo(MongoClient mongoClient) { super(mongoClient); } @@ -51,7 +58,7 @@ public String getHostName() throws UnknownHostException { * UnknownHostException is not necessary anymore, but clients could have * called this method in a try..catch(UnknownHostException) already */ - return getMongoClient().getAddress().getHost(); + return StringUtils.collectionToDelimitedString(hosts(), ","); } @ManagedMetric(displayName = "Uptime Estimate") diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java index 0d495584a9..1e1c221b64 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/monitor/package-info.java @@ -1,6 +1,7 @@ /** * MongoDB specific JMX monitoring support. */ +@Deprecated(since = "4.5", forRemoval = true) @org.springframework.lang.NonNullApi package org.springframework.data.mongodb.monitor; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/ContextProviderFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/ContextProviderFactory.java new file mode 100644 index 0000000000..4b37225b5d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/ContextProviderFactory.java @@ -0,0 +1,136 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; +import reactor.core.CoreSubscriber; + +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; + +import org.reactivestreams.Subscriber; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary; +import org.springframework.util.ClassUtils; + +import com.mongodb.ContextProvider; +import com.mongodb.RequestContext; +import com.mongodb.client.SynchronousContextProvider; +import com.mongodb.reactivestreams.client.ReactiveContextProvider; + +/** + * Factory to create a {@link ContextProvider} to propagate the request context across tasks. Requires either + * {@link SynchronousContextProvider} or {@link ReactiveContextProvider} to be present. + * + * @author Mark Paluch + * @since 3.0 + */ +public class ContextProviderFactory { + + private static final boolean SYNCHRONOUS_PRESENT = ClassUtils + .isPresent("com.mongodb.client.SynchronousContextProvider", ContextProviderFactory.class.getClassLoader()); + + private static final boolean REACTIVE_PRESENT = ClassUtils.isPresent( + "com.mongodb.reactivestreams.client.ReactiveContextProvider", ContextProviderFactory.class.getClassLoader()) + && ReactiveWrappers.isAvailable(ReactiveLibrary.PROJECT_REACTOR); + + /** + * Create a {@link ContextProvider} given {@link ObservationRegistry}. The factory method attempts to create a + * {@link ContextProvider} that is capable to propagate request contexts across imperative or reactive usage, + * depending on their class path presence. + * + * @param observationRegistry must not be {@literal null}. + * @return + */ + public static ContextProvider create(ObservationRegistry observationRegistry) { + + if (SYNCHRONOUS_PRESENT && REACTIVE_PRESENT) { + return new CompositeContextProvider(observationRegistry); + } + + if (SYNCHRONOUS_PRESENT) { + return new DefaultSynchronousContextProvider(observationRegistry); + } + + if (REACTIVE_PRESENT) { + return DefaultReactiveContextProvider.INSTANCE; + } + + throw new IllegalStateException( + "Cannot create ContextProvider. Neither SynchronousContextProvider nor ReactiveContextProvider is on the class path."); + } + + record DefaultSynchronousContextProvider( + ObservationRegistry observationRegistry) implements SynchronousContextProvider { + + @Override + public RequestContext getContext() { + + MapRequestContext requestContext = new MapRequestContext(); + + Observation currentObservation = observationRegistry.getCurrentObservation(); + if (currentObservation != null) { + requestContext.put(ObservationThreadLocalAccessor.KEY, currentObservation); + } + + return requestContext; + } + + } + + enum DefaultReactiveContextProvider implements ReactiveContextProvider { + + INSTANCE; + + @Override + public RequestContext getContext(Subscriber subscriber) { + + if (subscriber instanceof CoreSubscriber cs) { + + Map map = cs.currentContext().stream() + .collect(Collectors.toConcurrentMap(Entry::getKey, Entry::getValue)); + + return new MapRequestContext(map); + } + + return new MapRequestContext(); + } + } + + record CompositeContextProvider(DefaultSynchronousContextProvider synchronousContextProvider) + implements + SynchronousContextProvider, + ReactiveContextProvider { + + CompositeContextProvider(ObservationRegistry observationRegistry) { + this(new DefaultSynchronousContextProvider(observationRegistry)); + } + + @Override + public RequestContext getContext() { + return synchronousContextProvider.getContext(); + } + + @Override + public RequestContext getContext(Subscriber subscriber) { + return DefaultReactiveContextProvider.INSTANCE.getContext(subscriber); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/DefaultMongoHandlerObservationConvention.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/DefaultMongoHandlerObservationConvention.java new file mode 100644 index 0000000000..b823ce223b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/DefaultMongoHandlerObservationConvention.java @@ -0,0 +1,121 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.common.KeyValues; + +import java.net.InetSocketAddress; + +import org.springframework.data.mongodb.observability.MongoObservation.LowCardinalityCommandKeyNames; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ConnectionString; +import com.mongodb.ServerAddress; +import com.mongodb.connection.ConnectionDescription; +import com.mongodb.connection.ConnectionId; +import com.mongodb.event.CommandStartedEvent; + +/** + * Default {@link MongoHandlerObservationConvention} implementation. + * + * @author Greg Turnquist + * @author Mark Paluch + * @since 4.0 + */ +class DefaultMongoHandlerObservationConvention implements MongoHandlerObservationConvention { + + @Override + public KeyValues getLowCardinalityKeyValues(MongoHandlerContext context) { + + KeyValues keyValues = KeyValues.of(LowCardinalityCommandKeyNames.DB_SYSTEM.withValue("mongodb"), + LowCardinalityCommandKeyNames.MONGODB_COMMAND.withValue(context.getCommandName())); + + ConnectionString connectionString = context.getConnectionString(); + if (connectionString != null) { + + keyValues = keyValues + .and(LowCardinalityCommandKeyNames.DB_CONNECTION_STRING.withValue(connectionString.getConnectionString())); + + String user = connectionString.getUsername(); + + if (!ObjectUtils.isEmpty(user)) { + keyValues = keyValues.and(LowCardinalityCommandKeyNames.DB_USER.withValue(user)); + } + + } + + if (!ObjectUtils.isEmpty(context.getDatabaseName())) { + keyValues = keyValues.and(LowCardinalityCommandKeyNames.DB_NAME.withValue(context.getDatabaseName())); + } + + if (!ObjectUtils.isEmpty(context.getCollectionName())) { + keyValues = keyValues + .and(LowCardinalityCommandKeyNames.MONGODB_COLLECTION.withValue(context.getCollectionName())); + } + + ConnectionDescription connectionDescription = context.getCommandStartedEvent().getConnectionDescription(); + + if (connectionDescription != null) { + + ServerAddress serverAddress = connectionDescription.getServerAddress(); + + if (serverAddress != null) { + + keyValues = keyValues.and(LowCardinalityCommandKeyNames.NET_TRANSPORT.withValue("IP.TCP"), + LowCardinalityCommandKeyNames.NET_PEER_NAME.withValue(serverAddress.getHost()), + LowCardinalityCommandKeyNames.NET_PEER_PORT.withValue("" + serverAddress.getPort())); + + InetSocketAddress socketAddress = MongoCompatibilityAdapter.serverAddressAdapter(serverAddress) + .getSocketAddress(); + + if (socketAddress != null) { + + keyValues = keyValues.and( + LowCardinalityCommandKeyNames.NET_SOCK_PEER_ADDR.withValue(socketAddress.getHostName()), + LowCardinalityCommandKeyNames.NET_SOCK_PEER_PORT.withValue("" + socketAddress.getPort())); + } + } + + ConnectionId connectionId = connectionDescription.getConnectionId(); + if (connectionId != null) { + keyValues = keyValues.and(LowCardinalityCommandKeyNames.MONGODB_CLUSTER_ID + .withValue(connectionId.getServerId().getClusterId().getValue())); + } + } + + return keyValues; + } + + @Override + public KeyValues getHighCardinalityKeyValues(MongoHandlerContext context) { + return KeyValues.empty(); + } + + @Override + public String getContextualName(MongoHandlerContext context) { + + String collectionName = context.getCollectionName(); + CommandStartedEvent commandStartedEvent = context.getCommandStartedEvent(); + + if (ObjectUtils.isEmpty(collectionName)) { + return commandStartedEvent.getCommandName(); + } + + return collectionName + "." + commandStartedEvent.getCommandName(); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MapRequestContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MapRequestContext.java new file mode 100644 index 0000000000..854e1481fc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MapRequestContext.java @@ -0,0 +1,77 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; + +import com.mongodb.RequestContext; + +/** + * A {@link Map}-based {@link RequestContext}. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @since 4.0.0 + */ +class MapRequestContext implements RequestContext { + + private final Map map; + + public MapRequestContext() { + this(new HashMap<>()); + } + + public MapRequestContext(Map context) { + this.map = context; + } + + @Override + public T get(Object key) { + return (T) map.get(key); + } + + @Override + public boolean hasKey(Object key) { + return map.containsKey(key); + } + + @Override + public boolean isEmpty() { + return map.isEmpty(); + } + + @Override + public void put(Object key, Object value) { + map.put(key, value); + } + + @Override + public void delete(Object key) { + map.remove(key); + } + + @Override + public int size() { + return map.size(); + } + + @Override + public Stream> stream() { + return map.entrySet().stream(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerContext.java new file mode 100644 index 0000000000..cc58aac56e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerContext.java @@ -0,0 +1,150 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.transport.Kind; +import io.micrometer.observation.transport.SenderContext; + +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.bson.BsonDocument; +import org.bson.BsonValue; + +import org.springframework.lang.Nullable; + +import com.mongodb.ConnectionString; +import com.mongodb.RequestContext; +import com.mongodb.event.CommandFailedEvent; +import com.mongodb.event.CommandStartedEvent; +import com.mongodb.event.CommandSucceededEvent; + +/** + * A {@link Observation.Context} that contains MongoDB events. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @author Mark Paluch + * @since 4.0 + */ +public class MongoHandlerContext extends SenderContext { + + /** + * @see https://docs.mongodb.com/manual/reference/command for + * the command reference + */ + private static final Set COMMANDS_WITH_COLLECTION_NAME = new LinkedHashSet<>( + Arrays.asList("aggregate", "count", "distinct", "mapReduce", "geoSearch", "delete", "find", "findAndModify", + "insert", "update", "collMod", "compact", "convertToCapped", "create", "createIndexes", "drop", "dropIndexes", + "killCursors", "listIndexes", "reIndex")); + + private final @Nullable ConnectionString connectionString; + private final CommandStartedEvent commandStartedEvent; + private final RequestContext requestContext; + private final String collectionName; + + private CommandSucceededEvent commandSucceededEvent; + private CommandFailedEvent commandFailedEvent; + + public MongoHandlerContext(@Nullable ConnectionString connectionString, CommandStartedEvent commandStartedEvent, + RequestContext requestContext) { + + super((carrier, key, value) -> {}, Kind.CLIENT); + this.connectionString = connectionString; + this.commandStartedEvent = commandStartedEvent; + this.requestContext = requestContext; + this.collectionName = getCollectionName(commandStartedEvent); + } + + public CommandStartedEvent getCommandStartedEvent() { + return this.commandStartedEvent; + } + + public RequestContext getRequestContext() { + return this.requestContext; + } + + public String getDatabaseName() { + return commandStartedEvent.getDatabaseName(); + } + + public String getCollectionName() { + return this.collectionName; + } + + public String getCommandName() { + return commandStartedEvent.getCommandName(); + } + + @Nullable + public ConnectionString getConnectionString() { + return connectionString; + } + + void setCommandSucceededEvent(CommandSucceededEvent commandSucceededEvent) { + this.commandSucceededEvent = commandSucceededEvent; + } + + void setCommandFailedEvent(CommandFailedEvent commandFailedEvent) { + this.commandFailedEvent = commandFailedEvent; + } + + /** + * Transform the command name into a collection name; + * + * @param event the {@link CommandStartedEvent} + * @return the name of the collection based on the command + */ + @Nullable + private static String getCollectionName(CommandStartedEvent event) { + + String commandName = event.getCommandName(); + BsonDocument command = event.getCommand(); + + if (COMMANDS_WITH_COLLECTION_NAME.contains(commandName)) { + + String collectionName = getNonEmptyBsonString(command.get(commandName)); + + if (collectionName != null) { + return collectionName; + } + } + + // Some other commands, like getMore, have a field like {"collection": collectionName}. + return command == null ? "" : getNonEmptyBsonString(command.get("collection")); + } + + /** + * Utility method to convert {@link BsonValue} into a plain string. + * + * @return trimmed string from {@code bsonValue} or null if the trimmed string was empty or the value wasn't a string + */ + @Nullable + private static String getNonEmptyBsonString(@Nullable BsonValue bsonValue) { + + if (bsonValue == null || !bsonValue.isString()) { + return null; + } + + String stringValue = bsonValue.asString().getValue().trim(); + + return stringValue.isEmpty() ? null : stringValue; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerObservationConvention.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerObservationConvention.java new file mode 100644 index 0000000000..7d1100c582 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoHandlerObservationConvention.java @@ -0,0 +1,33 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationConvention; + +/** + * {@link ObservationConvention} for {@link MongoHandlerContext}. + * + * @author Greg Turnquist + * @since 4 + */ +public interface MongoHandlerObservationConvention extends ObservationConvention { + + @Override + default boolean supportsContext(Observation.Context context) { + return context instanceof MongoHandlerContext; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservation.java new file mode 100644 index 0000000000..9dfc292521 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservation.java @@ -0,0 +1,178 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.common.docs.KeyName; +import io.micrometer.observation.docs.ObservationDocumentation; + +/** + * A MongoDB-based {@link io.micrometer.observation.Observation}. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @since 4.0 + */ +enum MongoObservation implements ObservationDocumentation { + + /** + * Timer created around a MongoDB command execution. + */ + MONGODB_COMMAND_OBSERVATION { + + @Override + public String getName() { + return "spring.data.mongodb.command"; + } + + @Override + public KeyName[] getLowCardinalityKeyNames() { + return LowCardinalityCommandKeyNames.values(); + } + + @Override + public KeyName[] getHighCardinalityKeyNames() { + return new KeyName[0]; + } + + }; + + /** + * Enums related to low cardinality key names for MongoDB commands. + */ + enum LowCardinalityCommandKeyNames implements KeyName { + + /** + * MongoDB database system. + */ + DB_SYSTEM { + @Override + public String asString() { + return "db.system"; + } + }, + + /** + * MongoDB connection string. + */ + DB_CONNECTION_STRING { + @Override + public String asString() { + return "db.connection_string"; + } + }, + + /** + * Network transport. + */ + NET_TRANSPORT { + @Override + public String asString() { + return "net.transport"; + } + }, + + /** + * Name of the database host. + */ + NET_PEER_NAME { + @Override + public String asString() { + return "net.peer.name"; + } + }, + + /** + * Logical remote port number. + */ + NET_PEER_PORT { + @Override + public String asString() { + return "net.peer.port"; + } + }, + + /** + * Mongo peer address. + */ + NET_SOCK_PEER_ADDR { + @Override + public String asString() { + return "net.sock.peer.addr"; + } + }, + + /** + * Mongo peer port. + */ + NET_SOCK_PEER_PORT { + @Override + public String asString() { + return "net.sock.peer.port"; + } + }, + + /** + * MongoDB user. + */ + DB_USER { + @Override + public String asString() { + return "db.user"; + } + }, + + /** + * MongoDB database name. + */ + DB_NAME { + @Override + public String asString() { + return "db.name"; + } + }, + + /** + * MongoDB collection name. + */ + MONGODB_COLLECTION { + @Override + public String asString() { + return "db.mongodb.collection"; + } + }, + + /** + * MongoDB cluster identifier. + */ + MONGODB_CLUSTER_ID { + @Override + public String asString() { + return "spring.data.mongodb.cluster_id"; + } + }, + + /** + * MongoDB command value. + */ + MONGODB_COMMAND { + @Override + public String asString() { + return "db.operation"; + } + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservationCommandListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservationCommandListener.java new file mode 100644 index 0000000000..9360a95de2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/MongoObservationCommandListener.java @@ -0,0 +1,219 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; + +import java.util.function.BiConsumer; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ConnectionString; +import com.mongodb.RequestContext; +import com.mongodb.event.CommandFailedEvent; +import com.mongodb.event.CommandListener; +import com.mongodb.event.CommandStartedEvent; +import com.mongodb.event.CommandSucceededEvent; + +/** + * Implement MongoDB's {@link CommandListener} using Micrometer's {@link Observation} API. + * + * @author OpenZipkin Brave Authors + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @author François Kha + * @since 4.0 + */ +public class MongoObservationCommandListener implements CommandListener { + + private static final Log log = LogFactory.getLog(MongoObservationCommandListener.class); + + private final ObservationRegistry observationRegistry; + private final @Nullable ConnectionString connectionString; + + private final MongoHandlerObservationConvention observationConvention; + + /** + * Create a new {@link MongoObservationCommandListener} to record {@link Observation}s. + * + * @param observationRegistry must not be {@literal null} + */ + public MongoObservationCommandListener(ObservationRegistry observationRegistry) { + this(observationRegistry, null); + } + + /** + * Create a new {@link MongoObservationCommandListener} to record {@link Observation}s. This constructor attaches the + * {@link ConnectionString} to every {@link Observation}. + * + * @param observationRegistry must not be {@literal null} + * @param connectionString can be {@literal null} + */ + public MongoObservationCommandListener(ObservationRegistry observationRegistry, + @Nullable ConnectionString connectionString) { + this(observationRegistry, connectionString, new DefaultMongoHandlerObservationConvention()); + } + + /** + * Create a new {@link MongoObservationCommandListener} to record {@link Observation}s. This constructor attaches the + * {@link ConnectionString} to every {@link Observation} and uses the given {@link MongoHandlerObservationConvention}. + * + * @param observationRegistry must not be {@literal null} + * @param connectionString can be {@literal null} + * @param observationConvention must not be {@literal null} + * @since 4.3 + */ + public MongoObservationCommandListener(ObservationRegistry observationRegistry, + @Nullable ConnectionString connectionString, MongoHandlerObservationConvention observationConvention) { + + Assert.notNull(observationRegistry, "ObservationRegistry must not be null"); + Assert.notNull(observationConvention, "ObservationConvention must not be null"); + + this.observationRegistry = observationRegistry; + this.connectionString = connectionString; + this.observationConvention = observationConvention; + } + + @Override + public void commandStarted(CommandStartedEvent event) { + + if (log.isDebugEnabled()) { + log.debug("Instrumenting the command started event"); + } + + String databaseName = event.getDatabaseName(); + + if ("admin".equals(databaseName)) { + return; // don't instrument commands like "endSessions" + } + + RequestContext requestContext = event.getRequestContext(); + + if (requestContext == null) { + return; + } + + Observation parent = observationFromContext(requestContext); + + if (log.isDebugEnabled()) { + log.debug("Found the following observation passed from the mongo context [" + parent + "]"); + } + + MongoHandlerContext observationContext = new MongoHandlerContext(connectionString, event, requestContext); + observationContext.setRemoteServiceName("mongo"); + + Observation observation = MongoObservation.MONGODB_COMMAND_OBSERVATION + .observation(this.observationRegistry, () -> observationContext) // + .observationConvention(this.observationConvention); + + if (parent != null) { + observation.parentObservation(parent); + } + + observation.start(); + + requestContext.put(ObservationThreadLocalAccessor.KEY, observation); + + if (log.isDebugEnabled()) { + log.debug( + "Created a child observation [" + observation + "] for Mongo instrumentation and put it in Mongo context"); + } + } + + @Override + public void commandSucceeded(CommandSucceededEvent event) { + + doInObservation(event.getRequestContext(), (observation, context) -> { + + context.setCommandSucceededEvent(event); + + if (log.isDebugEnabled()) { + log.debug("Command succeeded - will stop observation [" + observation + "]"); + } + + observation.stop(); + }); + } + + @Override + public void commandFailed(CommandFailedEvent event) { + + doInObservation(event.getRequestContext(), (observation, context) -> { + + context.setCommandFailedEvent(event); + + if (log.isDebugEnabled()) { + log.debug("Command failed - will stop observation [" + observation + "]"); + } + + observation.error(event.getThrowable()); + observation.stop(); + }); + } + + /** + * Performs the given action for the {@link Observation} and {@link MongoHandlerContext} if there is an ongoing Mongo + * Observation. Exceptions thrown by the action are relayed to the caller. + * + * @param requestContext the context to extract the Observation from. + * @param action the action to invoke. + */ + private void doInObservation(@Nullable RequestContext requestContext, + BiConsumer action) { + + if (requestContext == null) { + return; + } + + Observation observation = requestContext.getOrDefault(ObservationThreadLocalAccessor.KEY, null); + if (observation == null || !(observation.getContext() instanceof MongoHandlerContext context)) { + return; + } + + action.accept(observation, context); + } + + /** + * Extract the {@link Observation} from MongoDB's {@link RequestContext}. + * + * @param context + * @return + */ + @Nullable + private static Observation observationFromContext(RequestContext context) { + + Observation observation = context.getOrDefault(ObservationThreadLocalAccessor.KEY, null); + + if (observation != null) { + + if (log.isDebugEnabled()) { + log.debug("Found a observation in Mongo context [" + observation + "]"); + } + return observation; + } + + if (log.isDebugEnabled()) { + log.debug("No observation was found - will not create any child observations"); + } + + return null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/package-info.java new file mode 100644 index 0000000000..d240e12f9e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/observability/package-info.java @@ -0,0 +1,5 @@ +/** + * Infrastructure to provide driver observability using Micrometer. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.observability; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Aggregation.java new file mode 100644 index 0000000000..871f89d041 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Aggregation.java @@ -0,0 +1,148 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.annotation.QueryAnnotation; +import org.springframework.data.mongodb.core.annotation.Collation; + +/** + * The {@link Aggregation} annotation can be used to annotate a {@link org.springframework.data.repository.Repository} + * query method so that it runs the {@link Aggregation#pipeline()} on invocation. + *

          + * Pipeline stages are mapped against the {@link org.springframework.data.repository.Repository} domain type to consider + * {@link org.springframework.data.mongodb.core.mapping.Field field} mappings and may contain simple placeholders + * {@code ?0} as well as {@link org.springframework.expression.spel.standard.SpelExpression SpelExpressions}. + *

          + * Query method {@link org.springframework.data.domain.Sort} and {@link org.springframework.data.domain.Pageable} + * arguments are applied at the end of the pipeline or can be defined manually as part of it. + * + * @author Christoph Strobl + * @since 2.2 + */ +@Collation +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +@QueryAnnotation +@ReadPreference +public @interface Aggregation { + + /** + * Alias for {@link #pipeline()}. Defines the aggregation pipeline to apply. + * + * @return an empty array by default. + * @see #pipeline() + */ + @AliasFor("pipeline") + String[] value() default {}; + + /** + * Defines the aggregation pipeline to apply. + * + *

          +	 *
          +	 * // aggregation resulting in collection with single value
          +	 * @Aggregation("{ '$project': { '_id' : '$lastname' } }")
          +	 * List<String> findAllLastnames();
          +	 *
          +	 * // aggregation with parameter replacement
          +	 * @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }")
          +	 * List<PersonAggregate> groupByLastnameAnd(String property);
          +	 *
          +	 * // aggregation with sort in pipeline
          +	 * @Aggregation(pipeline = {"{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }", "{ '$sort' : { 'lastname' : -1 } }"})
          +	 * List<PersonAggregate> groupByLastnameAnd(String property);
          +	 *
          +	 * // Sort parameter is used for sorting results
          +	 * @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }")
          +	 * List<PersonAggregate> groupByLastnameAnd(String property, Sort sort);
          +	 *
          +	 * // Pageable parameter used for sort, skip and limit
          +	 * @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }")
          +	 * List<PersonAggregate> groupByLastnameAnd(String property, Pageable page);
          +	 *
          +	 * // Single value result aggregation.
          +	 * @Aggregation("{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }")
          +	 * Long sumAge();
          +	 *
          +	 * // Single value wrapped in container object
          +	 * @Aggregation("{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } })
          +	 * SumAge sumAgeAndReturnAggregationResultWrapperWithConcreteType();
          +	 *
          +	 * // Raw aggregation result
          +	 * @Aggregation("{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } })
          +	 * AggregationResults<org.bson.Document>> sumAgeAndReturnAggregationResultWrapper();
          +	 * 
          + * + * @return an empty array by default. + */ + @AliasFor("value") + String[] pipeline() default {}; + + /** + * Defines the collation to apply when executing the aggregation. + * + *
          +	 * // Fixed value
          +	 * @Aggregation(pipeline = "...", collation = "en_US")
          +	 * List<Entry> findAllByFixedCollation();
          +	 *
          +	 * // Fixed value as Document
          +	 * @Aggregation(pipeline = "...", collation = "{ 'locale' :  'en_US' }")
          +	 * List<Entry> findAllByFixedJsonCollation();
          +	 *
          +	 * // Dynamic value as String
          +	 * @Aggregation(pipeline = "...", collation = "?0")
          +	 * List<Entry> findAllByDynamicCollation(String collation);
          +	 *
          +	 * // Dynamic value as Document
          +	 * @Aggregation(pipeline = "...", collation = "{ 'locale' :  ?0 }")
          +	 * List<Entry> findAllByDynamicJsonCollation(String collation);
          +	 *
          +	 * // SpEL expression
          +	 * @Aggregation(pipeline = "...", collation = "?#{[0]}")
          +	 * List<Entry> findAllByDynamicSpElCollation(String collation);
          +	 * 
          + * + * @return an empty {@link String} by default. + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; + + /** + * The mode of the read preference to use. This attribute ({@code @Aggregation(pipeline = { ... }, readPreference = + * "secondary")}) is an alias for: + * + *
          +	 * @@Aggregation(pipeline = { ... })
          +	 * @ReadPreference("secondary")
          +	 * List<PersonAggregate> groupByLastnameAnd(String property);
          +	 * 
          + * + * @return the index name. + * @since 4.2 + * @see ReadPreference#value() + */ + @AliasFor(annotation = ReadPreference.class, attribute = "value") + String readPreference() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java index 6d4cb94ea7..c8f6cf4d0d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/CountQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -41,7 +41,7 @@ * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the * method name then. Alias for {@link Query#value}. * - * @return + * @return an empty String by default. */ @AliasFor(annotation = Query.class) String value() default ""; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java index a31590a486..b1d620046f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/DeleteQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -41,7 +41,7 @@ * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the * method name then. Alias for {@link Query#value}. * - * @return + * @return empty {@link String} by default. */ @AliasFor(annotation = Query.class) String value() default ""; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java index 197bd49c36..0d057340a5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ExistsQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -40,7 +40,7 @@ * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the * method name then. Alias for {@link Query#value}. * - * @return + * @return empty {@link String} by default. */ @AliasFor(annotation = Query.class) String value() default ""; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Hint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Hint.java new file mode 100644 index 0000000000..50db722b15 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Hint.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare index hints for repository query, update and aggregate operations. The index is specified by + * its name. + * + * @author Christoph Strobl + * @since 4.1 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface Hint { + + /** + * The name of the index to use. In case of an {@literal aggregation} the index is evaluated against the initial + * collection or view. + * + * @return the index name. + */ + String value() default ""; + + /** + * The name of the index to use. In case of an {@literal aggregation} the index is evaluated against the initial + * collection or view. + * + * @return the index name. + */ + @AliasFor("value") + String indexName() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java index c350151b95..37109426f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.QueryAnnotation; /** @@ -38,32 +37,28 @@ /** * Set the maximum time limit in milliseconds for processing operations. * - * @return + * @return {@literal -1} by default. * @since 1.10 */ long maxExecutionTimeMs() default -1; /** - * Only scan the specified number of documents. + * Sets the number of documents to return per batch.
          + * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. * - * @return + * @return {@literal 0 (zero)} by default. + * @since 2.1 */ - long maxScanDocuments() default -1; + int cursorBatchSize() default 0; /** * Add a comment to the query. * - * @return + * @return empty {@link String} by default. */ String comment() default ""; - /** - * Using snapshot prevents the cursor from returning a document more than once. - * - * @return - */ - boolean snapshot() default false; - /** * Set {@link org.springframework.data.mongodb.core.query.Meta.CursorOption} to be used when executing query. * @@ -72,4 +67,13 @@ */ org.springframework.data.mongodb.core.query.Meta.CursorOption[] flags() default {}; + /** + * When set to {@literal true}, aggregation stages can write data to disk. + * + * @return {@literal false} by default. + * @since 3.0 + * @see Aggregation + */ + boolean allowDiskUse() default false; + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java index d7c89777ea..5a80e90cd2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/MongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,8 +19,9 @@ import org.springframework.data.domain.Example; import org.springframework.data.domain.Sort; +import org.springframework.data.repository.ListCrudRepository; +import org.springframework.data.repository.ListPagingAndSortingRepository; import org.springframework.data.repository.NoRepositoryBean; -import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.repository.query.QueryByExampleExecutor; /** @@ -30,30 +31,11 @@ * @author Christoph Strobl * @author Thomas Darimont * @author Mark Paluch + * @author Khaled Baklouti */ @NoRepositoryBean -public interface MongoRepository extends PagingAndSortingRepository, QueryByExampleExecutor { - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#saveAll(java.lang.Iterable) - */ - @Override - List saveAll(Iterable entites); - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll() - */ - @Override - List findAll(); - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort) - */ - @Override - List findAll(Sort sort); +public interface MongoRepository + extends ListCrudRepository, ListPagingAndSortingRepository, QueryByExampleExecutor { /** * Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use the @@ -68,7 +50,7 @@ public interface MongoRepository extends PagingAndSortingRepository extends PagingAndSortingRepository List insert(Iterable entities); - /* - * (non-Javadoc) + /** + * Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link List} is + * returned.
          + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
          + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example) */ @Override List findAll(Example example); - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) + /** + * Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be + * found an empty {@link List} is returned.
          + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
          + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * + * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, + * org.springframework.data.domain.Sort) */ @Override List findAll(Example example, Sort sort); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java index 3e013069e7..f0da9965a3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Near.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java index c1d1424ce9..fa15ff5af0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Query.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,9 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.QueryAnnotation; +import org.springframework.data.mongodb.core.annotation.Collation; /** * Annotation to declare finder queries directly on repository methods. Both attributes allow using a placeholder @@ -31,18 +33,22 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ +@Collation @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) @Documented @QueryAnnotation +@Hint +@ReadPreference public @interface Query { /** * Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the * method name then. * - * @return + * @return empty {@link String} by default. */ String value() default ""; @@ -50,7 +56,7 @@ * Defines the fields that should be returned for the given query. Note that only these fields will make it into the * domain object returned. * - * @return + * @return empty {@link String} by default. */ String fields() default ""; @@ -58,7 +64,7 @@ * Returns whether the query defined should be executed as count projection. * * @since 1.3 - * @return + * @return {@literal false} by default. */ boolean count() default false; @@ -66,7 +72,7 @@ * Returns whether the query defined should be executed as exists projection. * * @since 1.10 - * @return + * @return {@literal false} by default. */ boolean exists() default false; @@ -74,7 +80,88 @@ * Returns whether the query should delete matching documents. * * @since 1.5 - * @return + * @return {@literal false} by default. */ boolean delete() default false; + + /** + * Defines a default sort order for the given query. NOTE: The so set defaults can be altered / + * overwritten using an explicit {@link org.springframework.data.domain.Sort} argument of the query method. + * + *
          +	 * 
          +	 *
          +	 * 		@Query(sort = "{ age : -1 }") // order by age descending
          +	 * 		List<Person> findByFirstname(String firstname);
          +	 * 
          +	 * 
          + * + * @return empty {@link String} by default. + * @since 2.1 + */ + String sort() default ""; + + /** + * Defines the collation to apply when executing the query. + * + *
          +	 * // Fixed value
          +	 * @Query(collation = "en_US")
          +	 * List<Entry> findAllByFixedCollation();
          +	 *
          +	 * // Fixed value as Document
          +	 * @Query(collation = "{ 'locale' :  'en_US' }")
          +	 * List<Entry> findAllByFixedJsonCollation();
          +	 *
          +	 * // Dynamic value as String
          +	 * @Query(collation = "?0")
          +	 * List<Entry> findAllByDynamicCollation(String collation);
          +	 *
          +	 * // Dynamic value as Document
          +	 * @Query(collation = "{ 'locale' :  ?0 }")
          +	 * List<Entry> findAllByDynamicJsonCollation(String collation);
          +	 *
          +	 * // SpEL expression
          +	 * @Query(collation = "?#{[0]}")
          +	 * List<Entry> findAllByDynamicSpElCollation(String collation);
          +	 * 
          + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor(annotation = Collation.class, attribute = "value") + String collation() default ""; + + /** + * The name of the index to use. {@code @Query(value = "...", hint = "lastname-idx")} can be used as shortcut for: + * + *
          +	 * @Query(...)
          +	 * @Hint("lastname-idx")
          +	 * List<User> findAllByLastname(String collation);
          +	 * 
          + * + * @return the index name. + * @since 4.1 + * @see Hint#indexName() + */ + @AliasFor(annotation = Hint.class, attribute = "indexName") + String hint() default ""; + + /** + * The mode of the read preference to use. This attribute + * ({@code @Query(value = "...", readPreference = "secondary")}) is an alias for: + * + *
          +	 * @Query(...)
          +	 * @ReadPreference("secondary")
          +	 * List<User> findAllByLastname(String lastname);
          +	 * 
          + * + * @return the index name. + * @since 4.2 + * @see ReadPreference#value() + */ + @AliasFor(annotation = ReadPreference.class, attribute = "value") + String readPreference() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java index eb6c6adcd2..f8a2b34d11 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReactiveMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import org.springframework.data.domain.Sort; import org.springframework.data.repository.NoRepositoryBean; import org.springframework.data.repository.query.ReactiveQueryByExampleExecutor; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; import org.springframework.data.repository.reactive.ReactiveSortingRepository; /** @@ -32,7 +33,8 @@ * @since 2.0 */ @NoRepositoryBean -public interface ReactiveMongoRepository extends ReactiveSortingRepository, ReactiveQueryByExampleExecutor { +public interface ReactiveMongoRepository + extends ReactiveCrudRepository, ReactiveSortingRepository, ReactiveQueryByExampleExecutor { /** * Inserts the given entity. Assumes the instance to be new to be able to apply insertion optimizations. Use the @@ -64,16 +66,33 @@ public interface ReactiveMongoRepository extends ReactiveSortingRepositor */ Flux insert(Publisher entities); - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example) + /** + * Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link Flux} is + * returned.
          + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
          + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * + * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example) */ + @Override Flux findAll(Example example); - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) + /** + * Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be + * found an empty {@link Flux} is returned.
          + * By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when + * sticking with the default type key ({@code _class}), the query has restrictions such as + * _class : { $in : [com.acme.Person] }.
          + * To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with + * {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}. + * + * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example, + * org.springframework.data.domain.Sort) */ + @Override Flux findAll(Example example, Sort sort); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReadPreference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReadPreference.java new file mode 100644 index 0000000000..ddb4a67d1c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/ReadPreference.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to declare read preference for repository and query. + * + * @author Jorge Rodríguez + * @author Christoph Strobl + * @since 4.2 + * @see com.mongodb.ReadPreference + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface ReadPreference { + + /** + * Configure the read preference mode. + * + * @return read preference mode. + */ + String value() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java index 779ce00aab..7bd22059c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Tailable.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Update.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Update.java new file mode 100644 index 0000000000..9bc62aa258 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Update.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * Annotation to declare update operators directly on repository methods. Both attributes allow using a placeholder + * notation of {@code ?0}, {@code ?1} and so on. The update will be applied to documents matching the either method name + * derived or annotated query, but not to any custom implementation methods. + * + * @author Christoph Strobl + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE }) +@Documented +public @interface Update { + + /** + * Takes a MongoDB JSON string to define the actual update to be executed. + * + * @return the MongoDB JSON string representation of the update. Empty string by default. + * @see #update() + */ + @AliasFor("update") + String value() default ""; + + /** + * Takes a MongoDB JSON string to define the actual update to be executed. + * + * @return the MongoDB JSON string representation of the update. Empty string by default. + * @see https://docs.mongodb.com/manual/tutorial/update-documents/ + */ + @AliasFor("value") + String update() default ""; + + /** + * Takes a MongoDB JSON string representation of an aggregation pipeline to define the update stages to be executed. + *

          + * This allows to e.g. define update statement that can evaluate conditionals based on a field value, etc. + * + * @return the MongoDB JSON string representation of the update pipeline. Empty array by default. + * @see https://docs.mongodb.com/manual/tutorial/update-documents-with-aggregation-pipeline + */ + String[] pipeline() default {}; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/AotMongoRepositoryPostProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/AotMongoRepositoryPostProcessor.java new file mode 100644 index 0000000000..d49726f724 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/AotMongoRepositoryPostProcessor.java @@ -0,0 +1,52 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.aot; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.data.mongodb.aot.LazyLoadingProxyAotProcessor; +import org.springframework.data.mongodb.aot.MongoAotPredicates; +import org.springframework.data.repository.config.AotRepositoryContext; +import org.springframework.data.repository.config.RepositoryRegistrationAotProcessor; +import org.springframework.data.util.TypeContributor; +import org.springframework.data.util.TypeUtils; + +/** + * @author Christoph Strobl + */ +public class AotMongoRepositoryPostProcessor extends RepositoryRegistrationAotProcessor { + + private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor(); + + @Override + protected void contribute(AotRepositoryContext repositoryContext, GenerationContext generationContext) { + // do some custom type registration here + super.contribute(repositoryContext, generationContext); + + repositoryContext.getResolvedTypes().stream().filter(MongoAotPredicates.IS_SIMPLE_TYPE.negate()).forEach(type -> { + TypeContributor.contribute(type, it -> true, generationContext); + lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type, generationContext); + }); + } + + @Override + protected void contributeType(Class type, GenerationContext generationContext) { + + if (TypeUtils.type(type).isPartOf("org.springframework.data.mongodb", "com.mongodb")) { + return; + } + super.contributeType(type, generationContext); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHints.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHints.java new file mode 100644 index 0000000000..b1ba6ea3f0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHints.java @@ -0,0 +1,96 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.aot; + +import static org.springframework.data.mongodb.aot.MongoAotPredicates.*; + +import java.util.List; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.aot.MongoAotPredicates; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadata; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.repository.support.ReactiveQuerydslMongoPredicateExecutor; +import org.springframework.data.querydsl.QuerydslUtils; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +class RepositoryRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + List.of(TypeReference.of("org.springframework.data.mongodb.repository.support.SimpleMongoRepository")), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + if (isAopPresent(classLoader)) { + + // required for pushing ReadPreference,... into the default repository implementation + hints.proxies().registerJdkProxy(CrudMethodMetadata.class, // + org.springframework.aop.SpringProxy.class, // + org.springframework.aop.framework.Advised.class, // + org.springframework.core.DecoratingProxy.class); + } + + if (isReactorPresent()) { + + hints.reflection().registerTypes( + List.of( + TypeReference.of("org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository")), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + + if (QuerydslUtils.QUERY_DSL_PRESENT) { + registerQuerydslHints(hints, classLoader); + } + } + + /** + * Register hints for Querydsl integration. + * + * @param hints must not be {@literal null}. + * @param classLoader can be {@literal null}. + * @since 4.0.2 + */ + private static void registerQuerydslHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + if (isReactorPresent()) { + hints.reflection().registerType(ReactiveQuerydslMongoPredicateExecutor.class, + MemberCategory.INVOKE_PUBLIC_METHODS, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS); + + } + + if (MongoAotPredicates.isSyncClientPresent(classLoader)) { + hints.reflection().registerType(QuerydslMongoPredicateExecutor.class, MemberCategory.INVOKE_PUBLIC_METHODS, + MemberCategory.INVOKE_DECLARED_CONSTRUCTORS); + } + } + + private static boolean isAopPresent(@Nullable ClassLoader classLoader) { + return ClassUtils.isPresent("org.springframework.aop.Pointcut", classLoader); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/package-info.java new file mode 100644 index 0000000000..9016519d9b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/aot/package-info.java @@ -0,0 +1,5 @@ +/** + * Ahead-Of-Time processors for MongoDB repositories. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.repository.aot; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java index bd92baf901..0b2515af52 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,9 +19,9 @@ import java.util.Optional; import java.util.Set; -import javax.enterprise.context.spi.CreationalContext; -import javax.enterprise.inject.spi.Bean; -import javax.enterprise.inject.spi.BeanManager; +import jakarta.enterprise.context.spi.CreationalContext; +import jakarta.enterprise.inject.spi.Bean; +import jakarta.enterprise.inject.spi.BeanManager; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; @@ -54,20 +54,15 @@ public MongoRepositoryBean(Bean operations, Set qua super(qualifiers, repositoryType, beanManager, detector); - Assert.notNull(operations, "MongoOperations bean must not be null!"); + Assert.notNull(operations, "MongoOperations bean must not be null"); this.operations = operations; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.cdi.CdiRepositoryBean#create(javax.enterprise.context.spi.CreationalContext, java.lang.Class) - */ @Override - protected T create(CreationalContext creationalContext, Class repositoryType, Optional customImplementation) { + protected T create(CreationalContext creationalContext, Class repositoryType) { MongoOperations mongoOperations = getDependencyInstance(operations, MongoOperations.class); - MongoRepositoryFactory factory = new MongoRepositoryFactory(mongoOperations); - return customImplementation.isPresent() ? factory.getRepository(repositoryType, customImplementation.get()) : factory.getRepository(repositoryType); + return create(() -> new MongoRepositoryFactory(mongoOperations), repositoryType); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java index b095e38e60..c74e9c3f2f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/cdi/MongoRepositoryExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,15 +24,16 @@ import java.util.Optional; import java.util.Set; -import javax.enterprise.event.Observes; -import javax.enterprise.inject.UnsatisfiedResolutionException; -import javax.enterprise.inject.spi.AfterBeanDiscovery; -import javax.enterprise.inject.spi.Bean; -import javax.enterprise.inject.spi.BeanManager; -import javax.enterprise.inject.spi.ProcessBean; +import jakarta.enterprise.event.Observes; +import jakarta.enterprise.inject.UnsatisfiedResolutionException; +import jakarta.enterprise.inject.spi.AfterBeanDiscovery; +import jakarta.enterprise.inject.spi.Bean; +import jakarta.enterprise.inject.spi.BeanManager; +import jakarta.enterprise.inject.spi.ProcessBean; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.repository.cdi.CdiRepositoryBean; import org.springframework.data.repository.cdi.CdiRepositoryExtensionSupport; @@ -45,7 +46,7 @@ */ public class MongoRepositoryExtension extends CdiRepositoryExtensionSupport { - private static final Logger LOG = LoggerFactory.getLogger(MongoRepositoryExtension.class); + private static final Log LOG = LogFactory.getLog(MongoRepositoryExtension.class); private final Map, Bean> mongoOperations = new HashMap, Bean>(); @@ -66,7 +67,7 @@ void processBean(@Observes ProcessBean processBean) { } // Store the EntityManager bean using its qualifiers. - mongoOperations.put(new HashSet(bean.getQualifiers()), (Bean) bean); + mongoOperations.put(new HashSet<>(bean.getQualifiers()), (Bean) bean); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java index 767472cbc4..b6f693e16d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableMongoRepositories.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,6 +23,7 @@ import java.lang.annotation.Target; import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Import; import org.springframework.data.mongodb.core.MongoTemplate; @@ -47,7 +48,8 @@ /** * Alias for the {@link #basePackages()} attribute. Allows for more concise annotation declarations e.g.: - * {@code @EnableMongoRepositories("org.my.pkg")} instead of {@code @EnableMongoRepositories(basePackages="org.my.pkg")}. + * {@code @EnableMongoRepositories("org.my.pkg")} instead of + * {@code @EnableMongoRepositories(basePackages="org.my.pkg")}. */ String[] value() default {}; @@ -80,7 +82,7 @@ * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning * for {@code PersonRepositoryImpl}. * - * @return + * @return {@literal Impl} by default. */ String repositoryImplementationPostfix() default "Impl"; @@ -88,7 +90,7 @@ * Configures the location of where to find the Spring Data named queries properties file. Will default to * {@code META-INFO/mongo-named-queries.properties}. * - * @return + * @return empty {@link String} by default. */ String namedQueriesLocation() default ""; @@ -96,7 +98,7 @@ * Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to * {@link Key#CREATE_IF_NOT_FOUND}. * - * @return + * @return {@link Key#CREATE_IF_NOT_FOUND} by default. */ Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND; @@ -104,35 +106,44 @@ * Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to * {@link MongoRepositoryFactoryBean}. * - * @return + * @return {@link MongoRepositoryFactoryBean} by default. */ Class repositoryFactoryBeanClass() default MongoRepositoryFactoryBean.class; /** * Configure the repository base class to be used to create repository proxies for this particular configuration. * - * @return + * @return {@link DefaultRepositoryBaseClass} by default. * @since 1.8 */ Class repositoryBaseClass() default DefaultRepositoryBaseClass.class; + /** + * Configure a specific {@link BeanNameGenerator} to be used when creating the repository beans. + * @return the {@link BeanNameGenerator} to be used or the base {@link BeanNameGenerator} interface to indicate context default. + * @since 4.4 + */ + Class nameGenerator() default BeanNameGenerator.class; + /** * Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected. * - * @return + * @return {@literal mongoTemplate} by default. */ String mongoTemplateRef() default "mongoTemplate"; /** * Whether to automatically create indexes for query methods defined in the repository interface. * - * @return + * @return {@literal false} by default. */ boolean createIndexesForQueryMethods() default false; /** * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the * repositories infrastructure. + * + * @return {@literal false} by default. */ boolean considerNestedRepositories() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java index 1c8403b4c6..9973bc7bdf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/EnableReactiveMongoRepositories.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,6 +24,7 @@ import java.lang.annotation.Target; import org.springframework.beans.factory.FactoryBean; +import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Import; import org.springframework.data.mongodb.core.MongoTemplate; @@ -84,7 +85,7 @@ * for a repository named {@code PersonRepository} the corresponding implementation class will be looked up scanning * for {@code PersonRepositoryImpl}. * - * @return + * @return {@literal Impl} by default. */ String repositoryImplementationPostfix() default "Impl"; @@ -92,7 +93,7 @@ * Configures the location of where to find the Spring Data named queries properties file. Will default to * {@code META-INF/mongo-named-queries.properties}. * - * @return + * @return empty {@link String} by default. */ String namedQueriesLocation() default ""; @@ -100,7 +101,7 @@ * Returns the key of the {@link QueryLookupStrategy} to be used for lookup queries for query methods. Defaults to * {@link Key#CREATE_IF_NOT_FOUND}. * - * @return + * @return {@link Key#CREATE_IF_NOT_FOUND} by default. */ Key queryLookupStrategy() default Key.CREATE_IF_NOT_FOUND; @@ -108,34 +109,43 @@ * Returns the {@link FactoryBean} class to be used for each repository instance. Defaults to * {@link MongoRepositoryFactoryBean}. * - * @return + * @return {@link ReactiveMongoRepositoryFactoryBean} by default. */ Class repositoryFactoryBeanClass() default ReactiveMongoRepositoryFactoryBean.class; /** * Configure the repository base class to be used to create repository proxies for this particular configuration. * - * @return + * @return {@link DefaultRepositoryBaseClass} by default. */ Class repositoryBaseClass() default DefaultRepositoryBaseClass.class; + /** + * Configure a specific {@link BeanNameGenerator} to be used when creating the repository beans. + * @return the {@link BeanNameGenerator} to be used or the base {@link BeanNameGenerator} interface to indicate context default. + * @since 4.4 + */ + Class nameGenerator() default BeanNameGenerator.class; + /** * Configures the name of the {@link MongoTemplate} bean to be used with the repositories detected. * - * @return + * @return {@literal reactiveMongoTemplate} by default. */ String reactiveMongoTemplateRef() default "reactiveMongoTemplate"; /** * Whether to automatically create indexes for query methods defined in the repository interface. * - * @return + * @return {@literal false} by default. */ boolean createIndexesForQueryMethods() default false; /** * Configures whether nested repository-interfaces (e.g. defined as inner classes) should be discovered by the * repositories infrastructure. + * + * @return {@literal false} by default. */ boolean considerNestedRepositories() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java index e2a6e8d311..508ca16f50 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,19 +28,11 @@ */ class MongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableMongoRepositories.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension() - */ @Override protected RepositoryConfigurationExtension getExtension() { return new MongoRepositoryConfigurationExtension(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java index 53ffc48b4a..2d852a0e07 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigNamespaceHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -27,10 +27,6 @@ */ public class MongoRepositoryConfigNamespaceHandler extends MongoNamespaceHandler { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.config.MongoNamespaceHandler#init() - */ @Override public void init() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java index 8a75aeeff1..9db7be0069 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,27 +19,23 @@ import java.util.Collection; import java.util.Collections; -import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.beans.factory.aot.BeanRegistrationAotProcessor; import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.BeanDefinitionRegistry; -import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.core.annotation.AnnotationAttributes; import org.springframework.data.config.ParsingUtils; -import org.springframework.data.mongodb.config.BeanNames; +import org.springframework.data.mongodb.repository.aot.AotMongoRepositoryPostProcessor; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; import org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource; -import org.springframework.data.repository.config.RepositoryConfigurationExtension; import org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport; -import org.springframework.data.repository.config.RepositoryConfigurationSource; import org.springframework.data.repository.config.XmlRepositoryConfigurationSource; import org.springframework.data.repository.core.RepositoryMetadata; + import org.w3c.dom.Element; /** - * {@link RepositoryConfigurationExtension} for MongoDB. + * {@link org.springframework.data.repository.config.RepositoryConfigurationExtension} for MongoDB. * * @author Oliver Gierke * @author Mark Paluch @@ -49,54 +45,40 @@ public class MongoRepositoryConfigurationExtension extends RepositoryConfigurati private static final String MONGO_TEMPLATE_REF = "mongo-template-ref"; private static final String CREATE_QUERY_INDEXES = "create-query-indexes"; - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModuleName() - */ @Override public String getModuleName() { return "MongoDB"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModulePrefix() - */ @Override - protected String getModulePrefix() { + public String getModulePrefix() { return "mongo"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getRepositoryFactoryBeanClassName() - */ public String getRepositoryFactoryBeanClassName() { return MongoRepositoryFactoryBean.class.getName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingAnnotations() - */ @Override - protected Collection> getIdentifyingAnnotations() { + public String getModuleIdentifier() { + return getModulePrefix(); + } + + @Override + public Collection> getIdentifyingAnnotations() { return Collections.singleton(Document.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingTypes() - */ + @Override + public Class getRepositoryAotProcessor() { + return AotMongoRepositoryPostProcessor.class; + } + @Override protected Collection> getIdentifyingTypes() { return Collections.singleton(MongoRepository.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.XmlRepositoryConfigurationSource) - */ @Override public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) { @@ -106,10 +88,6 @@ public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfiguratio ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods"); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource) - */ @Override public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) { @@ -119,29 +97,6 @@ public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfi builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods")); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#registerBeansForRoot(org.springframework.beans.factory.support.BeanDefinitionRegistry, org.springframework.data.repository.config.RepositoryConfigurationSource) - */ - @Override - public void registerBeansForRoot(BeanDefinitionRegistry registry, RepositoryConfigurationSource configurationSource) { - - super.registerBeansForRoot(registry, configurationSource); - - if (!registry.containsBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME)) { - - RootBeanDefinition definition = new RootBeanDefinition(MongoMappingContext.class); - definition.setRole(AbstractBeanDefinition.ROLE_INFRASTRUCTURE); - definition.setSource(configurationSource.getSource()); - - registry.registerBeanDefinition(BeanNames.MAPPING_CONTEXT_BEAN_NAME, definition); - } - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#useRepositoryConfiguration(org.springframework.data.repository.core.RepositoryMetadata) - */ @Override protected boolean useRepositoryConfiguration(RepositoryMetadata metadata) { return !metadata.isReactiveRepository(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java index 995d81afa2..2c8384be93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -30,19 +30,11 @@ */ class ReactiveMongoRepositoriesRegistrar extends RepositoryBeanDefinitionRegistrarSupport { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableReactiveMongoRepositories.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryBeanDefinitionRegistrarSupport#getExtension() - */ @Override protected RepositoryConfigurationExtension getExtension() { return new ReactiveMongoRepositoryConfigurationExtension(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java index a4c7b75dd9..817cc397c2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -42,36 +42,20 @@ public class ReactiveMongoRepositoryConfigurationExtension extends MongoReposito private static final String MONGO_TEMPLATE_REF = "reactive-mongo-template-ref"; private static final String CREATE_QUERY_INDEXES = "create-query-indexes"; - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getModuleName() - */ @Override public String getModuleName() { return "Reactive MongoDB"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtension#getRepositoryFactoryClassName() - */ public String getRepositoryFactoryClassName() { return ReactiveMongoRepositoryFactoryBean.class.getName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#getIdentifyingTypes() - */ @Override protected Collection> getIdentifyingTypes() { return Collections.singleton(ReactiveMongoRepository.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.XmlRepositoryConfigurationSource) - */ @Override public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfigurationSource config) { @@ -81,10 +65,6 @@ public void postProcess(BeanDefinitionBuilder builder, XmlRepositoryConfiguratio ParsingUtils.setPropertyValue(builder, element, CREATE_QUERY_INDEXES, "createIndexesForQueryMethods"); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#postProcess(org.springframework.beans.factory.support.BeanDefinitionBuilder, org.springframework.data.repository.config.AnnotationRepositoryConfigurationSource) - */ @Override public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfigurationSource config) { @@ -94,10 +74,6 @@ public void postProcess(BeanDefinitionBuilder builder, AnnotationRepositoryConfi builder.addPropertyValue("createIndexesForQueryMethods", attributes.getBoolean("createIndexesForQueryMethods")); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.config.RepositoryConfigurationExtensionSupport#useRepositoryConfiguration(org.springframework.data.repository.core.RepositoryMetadata) - */ @Override protected boolean useRepositoryConfiguration(RepositoryMetadata metadata) { return metadata.isReactiveRepository(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java index 27c7066887..4d0d604a27 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,54 @@ */ package org.springframework.data.mongodb.repository.query; +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueEvaluationContextProvider; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.ExecutableUpdate; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.BasicUpdate; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Update; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.GeoNearExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.SlicedExecution; +import org.springframework.data.mongodb.repository.query.MongoQueryExecution.UpdateExecution; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; import org.springframework.data.repository.query.RepositoryQuery; import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.client.MongoDatabase; /** * Base class for {@link RepositoryQuery} implementations for Mongo. @@ -37,23 +71,36 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ public abstract class AbstractMongoQuery implements RepositoryQuery { private final MongoQueryMethod method; private final MongoOperations operations; private final ExecutableFind executableFind; + private final ExecutableUpdate executableUpdate; + private final Lazy codec = Lazy + .of(() -> new ParameterBindingDocumentCodec(getCodecRegistry())); + private final ValueExpressionDelegate valueExpressionDelegate; + private final ValueEvaluationContextProvider valueEvaluationContextProvider; /** * Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}. * * @param method must not be {@literal null}. * @param operations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated use the constructor version with {@link ValueExpressionDelegate} */ - public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations) { + @Deprecated(since = "4.4.0") + public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations, ExpressionParser expressionParser, + QueryMethodEvaluationContextProvider evaluationContextProvider) { - Assert.notNull(operations, "MongoOperations must not be null!"); - Assert.notNull(method, "MongoQueryMethod must not be null!"); + Assert.notNull(operations, "MongoOperations must not be null"); + Assert.notNull(method, "MongoQueryMethod must not be null"); + Assert.notNull(expressionParser, "SpelExpressionParser must not be null"); + Assert.notNull(evaluationContextProvider, "QueryMethodEvaluationContextProvider must not be null"); this.method = method; this.operations = operations; @@ -62,46 +109,112 @@ public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations) { Class type = metadata.getCollectionEntity().getType(); this.executableFind = operations.query(type); + this.executableUpdate = operations.update(type); + this.valueExpressionDelegate = new ValueExpressionDelegate(new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), evaluationContextProvider.getEvaluationContextProvider()), ValueExpressionParser.create(() -> expressionParser)); + this.valueEvaluationContextProvider = valueExpressionDelegate.createValueContextProvider(method.getParameters()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.RepositoryQuery#getQueryMethod() + /** + * Creates a new {@link AbstractMongoQuery} from the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param delegate must not be {@literal null} + * @since 4.4.0 */ + public AbstractMongoQuery(MongoQueryMethod method, MongoOperations operations, ValueExpressionDelegate delegate) { + + Assert.notNull(operations, "MongoOperations must not be null"); + Assert.notNull(method, "MongoQueryMethod must not be null"); + + this.method = method; + this.operations = operations; + + MongoEntityMetadata metadata = method.getEntityInformation(); + Class type = metadata.getCollectionEntity().getType(); + + this.executableFind = operations.query(type); + this.executableUpdate = operations.update(type); + this.valueExpressionDelegate = delegate; + this.valueEvaluationContextProvider = delegate.createValueContextProvider(method.getParameters()); + } + + @Override public MongoQueryMethod getQueryMethod() { return method; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.RepositoryQuery#execute(java.lang.Object[]) - */ @Override public Object execute(Object[] parameters) { ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(operations.getConverter(), new MongoParametersParameterAccessor(method, parameters)); - Query query = createQuery(accessor); - - applyQueryMetaAttributesWhenPresent(query); ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor); Class typeToRead = processor.getReturnedType().getTypeToRead(); + return processor.processResult(doExecute(method, processor, accessor, typeToRead)); + } + + /** + * Execute the {@link RepositoryQuery} of the given method with the parameters provided by the + * {@link ConvertingParameterAccessor accessor} + * + * @param method the {@link MongoQueryMethod} invoked. Never {@literal null}. + * @param processor {@link ResultProcessor} for post procession. Never {@literal null}. + * @param accessor for providing invocation arguments. Never {@literal null}. + * @param typeToRead the desired component target type. Can be {@literal null}. + */ + @Nullable + protected Object doExecute(MongoQueryMethod method, ResultProcessor processor, ConvertingParameterAccessor accessor, + @Nullable Class typeToRead) { + + Query query = createQuery(accessor); + + applyQueryMetaAttributesWhenPresent(query); + query = applyAnnotatedDefaultSortIfPresent(query); + query = applyAnnotatedCollationIfPresent(query, accessor); + query = applyHintIfPresent(query); + query = applyAnnotatedReadPreferenceIfPresent(query); + FindWithQuery find = typeToRead == null // ? executableFind // : executableFind.as(typeToRead); - MongoQueryExecution execution = getExecution(accessor, find); + return getExecution(accessor, find).execute(query); + } + + /** + * If present apply the {@link com.mongodb.ReadPreference} from the {@link org.springframework.data.mongodb.repository.ReadPreference} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + private Query applyAnnotatedReadPreferenceIfPresent(Query query) { + + if (!method.hasAnnotatedReadPreference()) { + return query; + } - return processor.processResult(execution.execute(query)); + return query.withReadPreference(com.mongodb.ReadPreference.valueOf(method.getAnnotatedReadPreference())); } private MongoQueryExecution getExecution(ConvertingParameterAccessor accessor, FindWithQuery operation) { if (isDeleteQuery()) { return new DeleteExecution(operations, method); - } else if (method.isGeoNearQuery() && method.isPageQuery()) { + } + + if (method.isModifyingQuery()) { + if (isLimiting()) { + throw new IllegalStateException( + String.format("Update method must not be limiting; Offending method: %s", method)); + } + return new UpdateExecution(executableUpdate, method, () -> createUpdate(accessor), accessor); + } + + if (method.isGeoNearQuery() && method.isPageQuery()) { return new PagingGeoNearExecution(operation, method, accessor, this); } else if (method.isGeoNearQuery()) { return new GeoNearExecution(operation, method, accessor); @@ -110,7 +223,10 @@ private MongoQueryExecution getExecution(ConvertingParameterAccessor accessor, F } else if (method.isStreamQuery()) { return q -> operation.matching(q).stream(); } else if (method.isCollectionQuery()) { - return q -> operation.matching(q.with(accessor.getPageable())).all(); + return q -> operation.matching(q.with(accessor.getPageable()).with(accessor.getSort())).all(); + } else if (method.isScrollQuery()) { + return q -> operation.matching(q.with(accessor.getPageable()).with(accessor.getSort())) + .scroll(accessor.getScrollPosition()); } else if (method.isPageQuery()) { return new PagedExecution(operation, accessor.getPageable()); } else if (isCountQuery()) { @@ -119,7 +235,6 @@ private MongoQueryExecution getExecution(ConvertingParameterAccessor accessor, F return q -> operation.matching(q).exists(); } else { return q -> { - TerminatingFind find = operation.matching(q); return isLimiting() ? find.firstValue() : find.oneValue(); }; @@ -135,6 +250,54 @@ Query applyQueryMetaAttributesWhenPresent(Query query) { return query; } + /** + * Add a default sort derived from {@link org.springframework.data.mongodb.repository.Query#sort()} to the given + * {@link Query} if present. + * + * @param query the {@link Query} to potentially apply the sort to. + * @return the query with potential default sort applied. + * @since 2.1 + */ + Query applyAnnotatedDefaultSortIfPresent(Query query) { + + if (!method.hasAnnotatedSort()) { + return query; + } + + return QueryUtils.decorateSort(query, Document.parse(method.getAnnotatedSort())); + } + + /** + * If present apply a {@link org.springframework.data.mongodb.core.query.Collation} derived from the + * {@link org.springframework.data.repository.query.QueryMethod} the given {@link Query}. + * + * @param query must not be {@literal null}. + * @param accessor the {@link ParameterAccessor} used to obtain parameter placeholder replacement values. + * @return + * @since 2.2 + */ + Query applyAnnotatedCollationIfPresent(Query query, ConvertingParameterAccessor accessor) { + + return QueryUtils.applyCollation(query, method.hasAnnotatedCollation() ? method.getAnnotatedCollation() : null, + accessor, getExpressionEvaluatorFor(accessor)); + } + + /** + * If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.1 + */ + Query applyHintIfPresent(Query query) { + + if (!method.hasAnnotatedHint()) { + return query; + } + + return query.withHint(method.getAnnotatedHint()); + } + /** * Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to * {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be @@ -147,6 +310,126 @@ protected Query createCountQuery(ConvertingParameterAccessor accessor) { return applyQueryMetaAttributesWhenPresent(createQuery(accessor)); } + /** + * Retrieves the {@link UpdateDefinition update} from the given + * {@link org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getUpdate() accessor} or creates + * one via by parsing the annotated statement extracted from {@link Update}. + * + * @param accessor never {@literal null}. + * @return the computed {@link UpdateDefinition}. + * @throws IllegalStateException if no update could be found. + * @since 3.4 + */ + protected UpdateDefinition createUpdate(ConvertingParameterAccessor accessor) { + + if (accessor.getUpdate() != null) { + return accessor.getUpdate(); + } + + if (method.hasAnnotatedUpdate()) { + + Update updateSource = method.getUpdateSource(); + if (StringUtils.hasText(updateSource.update())) { + return new BasicUpdate(bindParameters(updateSource.update(), accessor)); + } + if (!ObjectUtils.isEmpty(updateSource.pipeline())) { + return AggregationUpdate.from(parseAggregationPipeline(updateSource.pipeline(), accessor)); + } + } + + throw new IllegalStateException(String.format("No Update provided for method %s.", method)); + } + + /** + * Parse the given aggregation pipeline stages applying values to placeholders to compute the actual list of + * {@link AggregationOperation operations}. + * + * @param sourcePipeline must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the parsed aggregation pipeline. + * @since 3.4 + */ + protected List parseAggregationPipeline(String[] sourcePipeline, + ConvertingParameterAccessor accessor) { + + List stages = new ArrayList<>(sourcePipeline.length); + for (String source : sourcePipeline) { + stages.add(computePipelineStage(source, accessor)); + } + return stages; + } + + private AggregationOperation computePipelineStage(String source, ConvertingParameterAccessor accessor) { + return new StringAggregationOperation(source, getQueryMethod().getDomainClass(), + (it) -> bindParameters(it, accessor)); + } + + protected Document decode(String source, ParameterBindingContext bindingContext) { + return getParameterBindingCodec().decode(source, bindingContext); + } + + private Document bindParameters(String source, ConvertingParameterAccessor accessor) { + return decode(source, prepareBindingContext(source, accessor)); + } + + /** + * Create the {@link ParameterBindingContext binding context} used for SpEL evaluation. + * + * @param source the JSON source. + * @param accessor value provider for parameter binding. + * @return never {@literal null}. + * @since 3.4 + */ + protected ParameterBindingContext prepareBindingContext(String source, ConvertingParameterAccessor accessor) { + + ValueExpressionEvaluator evaluator = getExpressionEvaluatorFor(accessor); + return new ParameterBindingContext(accessor::getBindableValue, evaluator); + } + + /** + * Obtain the {@link ParameterBindingDocumentCodec} used for parsing JSON expressions. + * + * @return never {@literal null}. + * @since 3.4 + */ + protected ParameterBindingDocumentCodec getParameterBindingCodec() { + return codec.get(); + } + + /** + * Obtain a the {@link EvaluationContext} suitable to evaluate expressions backed by the given dependencies. + * + * @param dependencies must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the {@link SpELExpressionEvaluator}. + * @since 2.4 + */ + protected SpELExpressionEvaluator getSpELExpressionEvaluatorFor(ExpressionDependencies dependencies, + ConvertingParameterAccessor accessor) { + + return new DefaultSpELExpressionEvaluator(new SpelExpressionParser(), valueEvaluationContextProvider.getEvaluationContext(accessor.getValues(), dependencies).getEvaluationContext()); + } + + /** + * Obtain a {@link ValueExpressionEvaluator} suitable to evaluate expressions. + * + * @param accessor must not be {@literal null}. + * @return the {@link ValueExpressionEvaluator}. + * @since 4.4.0 + */ + protected ValueExpressionEvaluator getExpressionEvaluatorFor(MongoParameterAccessor accessor) { + return new ValueExpressionDelegateValueExpressionEvaluator(valueExpressionDelegate, (ValueExpression expression) -> + valueEvaluationContextProvider.getEvaluationContext(accessor.getValues(), expression.getExpressionDependencies())); + } + + /** + * @return the {@link CodecRegistry} used. + * @since 2.4 + */ + protected CodecRegistry getCodecRegistry() { + return operations.execute(MongoDatabase::getCodecRegistry); + } + /** * Creates a {@link Query} instance using the given {@link ParameterAccessor} * @@ -185,4 +468,5 @@ protected Query createCountQuery(ConvertingParameterAccessor accessor) { * @since 2.0.4 */ protected abstract boolean isLimiting(); + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java index 2cd617dadb..a5754a4e46 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,32 +17,66 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; +import java.util.ArrayList; +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; import org.reactivestreams.Publisher; + import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.EntityInstantiators; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ReactiveValueEvaluationContextProvider; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueEvaluationContextProvider; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mapping.model.EntityInstantiators; +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection; import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery; import org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind; import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.BasicUpdate; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Update; import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.DeleteExecution; import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution; import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingConverter; import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.ResultProcessingExecution; -import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.TailExecution; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.UpdateExecution; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; import org.springframework.data.repository.query.RepositoryQuery; import org.springframework.data.repository.query.ResultProcessor; -import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoClientSettings; /** * Base class for reactive {@link RepositoryQuery} implementations for MongoDB. * * @author Mark Paluch * @author Christoph Strobl + * @author Jorge Rodríguez * @since 2.0 */ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery { @@ -51,6 +85,9 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery { private final ReactiveMongoOperations operations; private final EntityInstantiators instantiators; private final FindWithProjection findOperationWithProjection; + private final ReactiveUpdate updateOps; + private final ValueExpressionDelegate valueExpressionDelegate; + private final ReactiveValueEvaluationContextProvider valueEvaluationContextProvider; /** * Creates a new {@link AbstractReactiveMongoQuery} from the given {@link MongoQueryMethod} and @@ -58,82 +95,148 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery { * * @param method must not be {@literal null}. * @param operations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated use the constructor version with {@link ValueExpressionDelegate} */ - public AbstractReactiveMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations) { + @Deprecated(since = "4.4.0") + public AbstractReactiveMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations, + ExpressionParser expressionParser, ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { - Assert.notNull(method, "MongoQueryMethod must not be null!"); - Assert.notNull(operations, "ReactiveMongoOperations must not be null!"); + Assert.notNull(method, "MongoQueryMethod must not be null"); + Assert.notNull(operations, "ReactiveMongoOperations must not be null"); + Assert.notNull(expressionParser, "SpelExpressionParser must not be null"); + Assert.notNull(evaluationContextProvider, "ReactiveEvaluationContextExtension must not be null"); this.method = method; this.operations = operations; this.instantiators = new EntityInstantiators(); + this.valueExpressionDelegate = new ValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), + evaluationContextProvider.getEvaluationContextProvider()), + ValueExpressionParser.create(() -> expressionParser)); MongoEntityMetadata metadata = method.getEntityInformation(); Class type = metadata.getCollectionEntity().getType(); this.findOperationWithProjection = operations.query(type); + this.updateOps = operations.update(type); + ValueEvaluationContextProvider valueContextProvider = valueExpressionDelegate + .createValueContextProvider(method.getParameters()); + Assert.isInstanceOf(ReactiveValueEvaluationContextProvider.class, valueContextProvider, + "ValueEvaluationContextProvider must be reactive"); + this.valueEvaluationContextProvider = (ReactiveValueEvaluationContextProvider) valueContextProvider; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.RepositoryQuery#getQueryMethod() + /** + * Creates a new {@link AbstractReactiveMongoQuery} from the given {@link MongoQueryMethod} and + * {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param operations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 */ + public AbstractReactiveMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations, + ValueExpressionDelegate delegate) { + + Assert.notNull(method, "MongoQueryMethod must not be null"); + Assert.notNull(operations, "ReactiveMongoOperations must not be null"); + Assert.notNull(delegate, "ValueExpressionDelegate must not be null"); + + this.method = method; + this.operations = operations; + this.instantiators = new EntityInstantiators(); + this.valueExpressionDelegate = delegate; + + MongoEntityMetadata metadata = method.getEntityInformation(); + Class type = metadata.getCollectionEntity().getType(); + + this.findOperationWithProjection = operations.query(type); + this.updateOps = operations.update(type); + ValueEvaluationContextProvider valueContextProvider = valueExpressionDelegate + .createValueContextProvider(method.getParameters()); + Assert.isInstanceOf(ReactiveValueEvaluationContextProvider.class, valueContextProvider, + "ValueEvaluationContextProvider must be reactive"); + this.valueEvaluationContextProvider = (ReactiveValueEvaluationContextProvider) valueContextProvider; + } + + @Override public MongoQueryMethod getQueryMethod() { return method; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.RepositoryQuery#execute(java.lang.Object[]) - */ - public Object execute(Object[] parameters) { + @Override + public Publisher execute(Object[] parameters) { return method.hasReactiveWrapperParameter() ? executeDeferred(parameters) : execute(new MongoParametersParameterAccessor(method, parameters)); } @SuppressWarnings("unchecked") - private Object executeDeferred(Object[] parameters) { + private Publisher executeDeferred(Object[] parameters) { ReactiveMongoParameterAccessor parameterAccessor = new ReactiveMongoParameterAccessor(method, parameters); - if (getQueryMethod().isCollectionQuery()) { - return Flux.defer(() -> (Publisher) execute(parameterAccessor)); - } - - return Mono.defer(() -> (Mono) execute(parameterAccessor)); + return parameterAccessor.resolveParameters().flatMapMany(this::execute); } - private Object execute(MongoParameterAccessor parameterAccessor) { - - Query query = createQuery(new ConvertingParameterAccessor(operations.getConverter(), parameterAccessor)); + private Publisher execute(MongoParameterAccessor parameterAccessor) { - applyQueryMetaAttributesWhenPresent(query); + ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(operations.getConverter(), + parameterAccessor); - ResultProcessor processor = method.getResultProcessor().withDynamicProjection(parameterAccessor); + ResultProcessor processor = method.getResultProcessor().withDynamicProjection(accessor); Class typeToRead = processor.getReturnedType().getTypeToRead(); - FindWithQuery find = typeToRead == null // - ? findOperationWithProjection // - : findOperationWithProjection.as(typeToRead); + return doExecute(method, processor, accessor, typeToRead); + } + + /** + * Execute the {@link RepositoryQuery} of the given method with the parameters provided by the + * {@link ConvertingParameterAccessor accessor} + * + * @param method the {@link ReactiveMongoQueryMethod} invoked. Never {@literal null}. + * @param processor {@link ResultProcessor} for post procession. Never {@literal null}. + * @param accessor for providing invocation arguments. Never {@literal null}. + * @param typeToRead the desired component target type. Can be {@literal null}. + */ + protected Publisher doExecute(ReactiveMongoQueryMethod method, ResultProcessor processor, + ConvertingParameterAccessor accessor, @Nullable Class typeToRead) { + + return createQuery(accessor).flatMapMany(it -> { + + Query query = it; + applyQueryMetaAttributesWhenPresent(query); + query = applyAnnotatedDefaultSortIfPresent(query); + query = applyAnnotatedCollationIfPresent(query, accessor); + query = applyHintIfPresent(query); + query = applyAnnotatedReadPreferenceIfPresent(query); + + FindWithQuery find = typeToRead == null // + ? findOperationWithProjection // + : findOperationWithProjection.as(typeToRead); - String collection = method.getEntityInformation().getCollectionName(); + String collection = method.getEntityInformation().getCollectionName(); - ReactiveMongoQueryExecution execution = getExecution(query, parameterAccessor, - new ResultProcessingConverter(processor, operations, instantiators), find); + ReactiveMongoQueryExecution execution = getExecution(accessor, + getResultProcessing(processor), find); + return execution.execute(query, processor.getReturnedType().getDomainType(), collection); + }); + } - return execution.execute(query, processor.getReturnedType().getDomainType(), collection); + ResultProcessingConverter getResultProcessing(ResultProcessor processor) { + return new ResultProcessingConverter(processor, operations, instantiators); } /** * Returns the execution instance to use. * - * @param query must not be {@literal null}. * @param accessor must not be {@literal null}. * @param resultProcessing must not be {@literal null}. * @return */ - private ReactiveMongoQueryExecution getExecution(Query query, MongoParameterAccessor accessor, + private ReactiveMongoQueryExecution getExecution(MongoParameterAccessor accessor, Converter resultProcessing, FindWithQuery operation) { return new ResultProcessingExecution(getExecutionToWrap(accessor, operation), resultProcessing); } @@ -142,14 +245,27 @@ private ReactiveMongoQueryExecution getExecutionToWrap(MongoParameterAccessor ac if (isDeleteQuery()) { return new DeleteExecution(operations, method); + } else if (method.isModifyingQuery()) { + + if (isLimiting()) { + throw new IllegalStateException( + String.format("Update method must not be limiting; Offending method: %s", method)); + } + + return new UpdateExecution(updateOps, method, accessor, createUpdate(accessor)); } else if (method.isGeoNearQuery()) { return new GeoNearExecution(operations, accessor, method.getReturnType()); } else if (isTailable(method)) { - return new TailExecution(operations, accessor.getPageable()); + return (q, t, c) -> operation.matching(q.with(accessor.getPageable())).tail(); } else if (method.isCollectionQuery()) { return (q, t, c) -> operation.matching(q.with(accessor.getPageable())).all(); + } else if (method.isScrollQuery()) { + return (q, t, c) -> operation.matching(q.with(accessor.getPageable()).with(accessor.getSort())) + .scroll(accessor.getScrollPosition()); } else if (isCountQuery()) { return (q, t, c) -> operation.matching(q).count(); + } else if (isExistsQuery()) { + return (q, t, c) -> operation.matching(q).exists(); } else { return (q, t, c) -> { @@ -177,6 +293,71 @@ Query applyQueryMetaAttributesWhenPresent(Query query) { return query; } + /** + * Add a default sort derived from {@link org.springframework.data.mongodb.repository.Query#sort()} to the given + * {@link Query} if present. + * + * @param query the {@link Query} to potentially apply the sort to. + * @return the query with potential default sort applied. + * @since 2.1 + */ + Query applyAnnotatedDefaultSortIfPresent(Query query) { + + if (!method.hasAnnotatedSort()) { + return query; + } + + return QueryUtils.decorateSort(query, Document.parse(method.getAnnotatedSort())); + } + + /** + * If present apply a {@link org.springframework.data.mongodb.core.query.Collation} derived from the + * {@link org.springframework.data.repository.query.QueryMethod} the given {@link Query}. + * + * @param query must not be {@literal null}. + * @param accessor the {@link ParameterAccessor} used to obtain parameter placeholder replacement values. + * @return + * @since 2.2 + */ + Query applyAnnotatedCollationIfPresent(Query query, ConvertingParameterAccessor accessor) { + + return QueryUtils.applyCollation(query, method.hasAnnotatedCollation() ? method.getAnnotatedCollation() : null, + accessor, getValueExpressionEvaluator(accessor)); + } + + /** + * If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.1 + */ + Query applyHintIfPresent(Query query) { + + if (!method.hasAnnotatedHint()) { + return query; + } + + return query.withHint(method.getAnnotatedHint()); + } + + /** + * If present apply the {@link com.mongodb.ReadPreference} from the + * {@link org.springframework.data.mongodb.repository.ReadPreference} annotation. + * + * @param query must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + private Query applyAnnotatedReadPreferenceIfPresent(Query query) { + + if (!method.hasAnnotatedReadPreference()) { + return query; + } + + return query.withReadPreference(com.mongodb.ReadPreference.valueOf(method.getAnnotatedReadPreference())); + } + /** * Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to * {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be @@ -185,8 +366,165 @@ Query applyQueryMetaAttributesWhenPresent(Query query) { * @param accessor must not be {@literal null}. * @return */ - protected Query createCountQuery(ConvertingParameterAccessor accessor) { - return applyQueryMetaAttributesWhenPresent(createQuery(accessor)); + protected Mono createCountQuery(ConvertingParameterAccessor accessor) { + return createQuery(accessor).map(this::applyQueryMetaAttributesWhenPresent); + } + + /** + * Retrieves the {@link UpdateDefinition update} from the given + * {@link org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getUpdate() accessor} or creates + * one via by parsing the annotated statement extracted from {@link Update}. + * + * @param accessor never {@literal null}. + * @return the computed {@link UpdateDefinition}. + * @throws IllegalStateException if no update could be found. + * @since 3.4 + */ + protected Mono createUpdate(MongoParameterAccessor accessor) { + + if (accessor.getUpdate() != null) { + return Mono.just(accessor.getUpdate()); + } + + if (method.hasAnnotatedUpdate()) { + Update updateSource = method.getUpdateSource(); + if (StringUtils.hasText(updateSource.update())) { + + String updateJson = updateSource.update(); + return getParameterBindingCodec() // + .flatMap(codec -> expressionEvaluator(updateJson, accessor, codec) // + .map(evaluator -> decode(evaluator, updateJson, accessor, codec))) // + .map(BasicUpdate::fromDocument); + } + if (!ObjectUtils.isEmpty(updateSource.pipeline())) { + return parseAggregationPipeline(updateSource.pipeline(), accessor).map(AggregationUpdate::from); + } + } + + throw new IllegalStateException(String.format("No Update provided for method %s.", method)); + } + + /** + * Parse the given aggregation pipeline stages applying values to placeholders to compute the actual list of + * {@link AggregationOperation operations}. + * + * @param pipeline must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the parsed aggregation pipeline. + * @since 3.4 + */ + protected Mono> parseAggregationPipeline(String[] pipeline, + MongoParameterAccessor accessor) { + + return getCodecRegistry().map(ParameterBindingDocumentCodec::new).flatMap(codec -> { + + List> stages = new ArrayList<>(pipeline.length); + for (String source : pipeline) { + stages.add(computePipelineStage(source, accessor, codec)); + } + return Flux.concat(stages).collectList(); + }); + } + + private Mono computePipelineStage(String source, MongoParameterAccessor accessor, + ParameterBindingDocumentCodec codec) { + + return expressionEvaluator(source, accessor, codec).map(evaluator -> new StringAggregationOperation(source, + AbstractReactiveMongoQuery.this.getQueryMethod().getDomainClass(), + bsonString -> AbstractReactiveMongoQuery.this.decode(evaluator, bsonString, accessor, codec))); + } + + private Mono> expressionEvaluator(String source, + MongoParameterAccessor accessor, ParameterBindingDocumentCodec codec) { + + ExpressionDependencies dependencies = codec.captureExpressionDependencies(source, accessor::getBindableValue, + valueExpressionDelegate.getValueExpressionParser()); + return getValueExpressionEvaluatorLater(dependencies, accessor).zipWith(Mono.just(codec)); + } + + private Document decode(Tuple2 expressionEvaluator, + String source, MongoParameterAccessor accessor, ParameterBindingDocumentCodec codec) { + + ParameterBindingContext bindingContext = new ParameterBindingContext(accessor::getBindableValue, + expressionEvaluator.getT1()); + return codec.decode(source, bindingContext); + } + + /** + * Obtain the {@link ParameterBindingDocumentCodec} used for parsing JSON expressions. + * + * @return never {@literal null}. + * @since 3.4 + */ + protected Mono getParameterBindingCodec() { + return getCodecRegistry().map(ParameterBindingDocumentCodec::new); + } + + /** + * Obtain a {@link Mono publisher} emitting the {@link SpELExpressionEvaluator} suitable to evaluate expressions + * backed by the given dependencies. + * + * @param dependencies must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return a {@link Mono} emitting the {@link SpELExpressionEvaluator} when ready. + * @since 3.4 + * @deprecated since 4.4.0, use + * {@link #getValueExpressionEvaluatorLater(ExpressionDependencies, MongoParameterAccessor)} instead + */ + @Deprecated(since = "4.4.0") + protected Mono getSpelEvaluatorFor(ExpressionDependencies dependencies, + MongoParameterAccessor accessor) { + return valueEvaluationContextProvider.getEvaluationContextLater(accessor.getValues(), dependencies) + .map(evaluationContext -> (SpELExpressionEvaluator) new DefaultSpELExpressionEvaluator( + new SpelExpressionParser(), evaluationContext.getEvaluationContext())) + .defaultIfEmpty(DefaultSpELExpressionEvaluator.unsupported()); + } + + /** + * Obtain a {@link ValueExpressionEvaluator} suitable to evaluate expressions. + * + * @param accessor must not be {@literal null}. + * @since 4.3 + */ + ValueExpressionEvaluator getValueExpressionEvaluator(MongoParameterAccessor accessor) { + + return new ValueExpressionEvaluator() { + + @Override + public T evaluate(String expressionString) { + ValueExpression expression = valueExpressionDelegate.parse(expressionString); + ValueEvaluationContext evaluationContext = valueEvaluationContextProvider + .getEvaluationContext(accessor.getValues(), expression.getExpressionDependencies()); + return (T) expression.evaluate(evaluationContext); + } + }; + } + + /** + * Obtain a {@link Mono publisher} emitting the {@link ValueExpressionEvaluator} suitable to evaluate expressions + * backed by the given dependencies. + * + * @param dependencies must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return a {@link Mono} emitting the {@link ValueExpressionEvaluator} when ready. + * @since 4.3 + */ + protected Mono getValueExpressionEvaluatorLater(ExpressionDependencies dependencies, + MongoParameterAccessor accessor) { + + return valueEvaluationContextProvider.getEvaluationContextLater(accessor.getValues(), dependencies) + .map(evaluationContext -> new ValueExpressionDelegateValueExpressionEvaluator(valueExpressionDelegate, + valueExpression -> evaluationContext)); + } + + /** + * @return a {@link Mono} emitting the {@link CodecRegistry} when ready. + * @since 2.4 + */ + protected Mono getCodecRegistry() { + + return Mono.from(operations.execute(db -> Mono.just(db.getCodecRegistry()))) + .defaultIfEmpty(MongoClientSettings.getDefaultCodecRegistry()); } /** @@ -195,7 +533,7 @@ protected Query createCountQuery(ConvertingParameterAccessor accessor) { * @param accessor must not be {@literal null}. * @return */ - protected abstract Query createQuery(ConvertingParameterAccessor accessor); + protected abstract Mono createQuery(ConvertingParameterAccessor accessor); /** * Returns whether the query should get a count projection applied. @@ -204,6 +542,14 @@ protected Query createCountQuery(ConvertingParameterAccessor accessor) { */ protected abstract boolean isCountQuery(); + /** + * Returns whether the query should get an exists projection applied. + * + * @return + * @since 2.0.9 + */ + protected abstract boolean isExistsQuery(); + /** * Return weather the query should delete matching documents. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java new file mode 100644 index 0000000000..6eb6a5da89 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java @@ -0,0 +1,376 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.time.Duration; +import java.util.Map; +import java.util.function.Function; +import java.util.function.IntUnaryOperator; +import java.util.function.LongUnaryOperator; + +import org.bson.Document; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ReadPreference; + +/** + * Internal utility class to help avoid duplicate code required in both the reactive and the sync {@link Aggregation} + * support offered by repositories. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + * @since 2.2 + */ +abstract class AggregationUtils { + + private AggregationUtils() {} + + /** + * Apply a collation extracted from the given {@literal collationExpression} to the given + * {@link org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder}. Potentially replace parameter + * placeholders with values from the {@link ConvertingParameterAccessor accessor}. + * + * @param builder must not be {@literal null}. + * @param collationExpression must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @return the {@link Query} having proper {@link Collation}. + * @see AggregationOptions#getCollation() + */ + static AggregationOptions.Builder applyCollation(AggregationOptions.Builder builder, + @Nullable String collationExpression, ConvertingParameterAccessor accessor, ValueExpressionEvaluator evaluator) { + + Collation collation = CollationUtils.computeCollation(collationExpression, accessor, evaluator); + return collation == null ? builder : builder.collation(collation); + } + + /** + * Apply {@link Meta#getComment()} and {@link Meta#getCursorBatchSize()}. + * + * @param builder must not be {@literal null}. + * @param queryMethod must not be {@literal null}. + */ + static AggregationOptions.Builder applyMeta(AggregationOptions.Builder builder, MongoQueryMethod queryMethod) { + + Meta meta = queryMethod.getQueryMetaAttributes(); + + if (meta.hasComment()) { + builder.comment(meta.getComment()); + } + + if (meta.getCursorBatchSize() != null) { + builder.cursorBatchSize(meta.getCursorBatchSize()); + } + + if (meta.hasMaxTime()) { + builder.maxTime(Duration.ofMillis(meta.getRequiredMaxTimeMsec())); + } + + if (meta.getAllowDiskUse() != null) { + builder.allowDiskUse(meta.getAllowDiskUse()); + } + + return builder; + } + + /** + * If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation. + * + * @param builder must not be {@literal null}. + * @return never {@literal null}. + * @since 4.1 + */ + static AggregationOptions.Builder applyHint(AggregationOptions.Builder builder, MongoQueryMethod queryMethod) { + + if (!queryMethod.hasAnnotatedHint()) { + return builder; + } + + return builder.hint(queryMethod.getAnnotatedHint()); + } + + /** + * If present apply the preference from the {@link org.springframework.data.mongodb.repository.ReadPreference} + * annotation. + * + * @param builder must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + static AggregationOptions.Builder applyReadPreference(AggregationOptions.Builder builder, + MongoQueryMethod queryMethod) { + + if (!queryMethod.hasAnnotatedReadPreference()) { + return builder; + } + + return builder.readPreference(ReadPreference.valueOf(queryMethod.getAnnotatedReadPreference())); + } + + static AggregationOptions computeOptions(MongoQueryMethod method, ConvertingParameterAccessor accessor, + AggregationPipeline pipeline, ValueExpressionEvaluator evaluator) { + + AggregationOptions.Builder builder = Aggregation.newAggregationOptions(); + + AggregationUtils.applyCollation(builder, method.getAnnotatedCollation(), accessor, evaluator); + AggregationUtils.applyMeta(builder, method); + AggregationUtils.applyHint(builder, method); + AggregationUtils.applyReadPreference(builder, method); + + TypeInformation returnType = method.getReturnType(); + if (returnType.getComponentType() != null) { + returnType = returnType.getRequiredComponentType(); + } + if (ReflectionUtils.isVoid(returnType.getType()) && pipeline.isOutOrMerge()) { + builder.skipOutput(); + } + + return builder.build(); + } + + /** + * Prepares the AggregationPipeline including type discovery and calling {@link AggregationCallback} to run the + * aggregation. + */ + @Nullable + static T doAggregate(AggregationPipeline pipeline, MongoQueryMethod method, ResultProcessor processor, + ConvertingParameterAccessor accessor, + Function evaluatorFunction, AggregationCallback callback) { + + Class sourceType = method.getDomainClass(); + ReturnedType returnedType = processor.getReturnedType(); + // 🙈Interface Projections do not happen on the Aggregation level but through our repository infrastructure. + // Non-projections and raw results (AggregationResults<…>) are handled here. Interface projections read a Document + // and DTO projections read the returned type. + // We also support simple return types (String) that are read from a Document + TypeInformation returnType = method.getReturnType(); + Class returnElementType = (returnType.getComponentType() != null ? returnType.getRequiredComponentType() + : returnType).getType(); + Class entityType; + + boolean isRawAggregationResult = ClassUtils.isAssignable(AggregationResults.class, method.getReturnedObjectType()); + + if (returnElementType.equals(Document.class)) { + entityType = sourceType; + } else { + entityType = returnElementType; + } + + AggregationUtils.appendSortIfPresent(pipeline, accessor, entityType); + + if (method.isSliceQuery()) { + AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor, LongUnaryOperator.identity(), + limit -> limit + 1); + } else { + AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor); + } + + AggregationOptions options = AggregationUtils.computeOptions(method, accessor, pipeline, + evaluatorFunction.apply(accessor)); + TypedAggregation aggregation = new TypedAggregation<>(sourceType, pipeline.getOperations(), options); + + boolean isSimpleReturnType = MongoSimpleTypes.HOLDER.isSimpleType(returnElementType); + Class typeToRead; + + if (isSimpleReturnType) { + typeToRead = Document.class; + } else if (isRawAggregationResult) { + typeToRead = returnElementType; + } else { + + if (returnedType.isProjecting()) { + typeToRead = returnedType.getReturnedType().isInterface() ? Document.class : returnedType.getReturnedType(); + } else { + typeToRead = entityType; + } + } + + return callback.doAggregate(aggregation, sourceType, typeToRead, returnElementType, isSimpleReturnType, + isRawAggregationResult); + } + + static AggregationPipeline computePipeline(AbstractMongoQuery mongoQuery, MongoQueryMethod method, + ConvertingParameterAccessor accessor) { + return new AggregationPipeline(mongoQuery.parseAggregationPipeline(method.getAnnotatedAggregation(), accessor)); + } + + /** + * Append {@code $sort} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is present. + * + * @param aggregationPipeline + * @param accessor + * @param targetType + */ + static void appendSortIfPresent(AggregationPipeline aggregationPipeline, ConvertingParameterAccessor accessor, + @Nullable Class targetType) { + + if (accessor.getSort().isUnsorted()) { + return; + } + + aggregationPipeline.add(ctx -> { + + Document sort = new Document(); + for (Order order : accessor.getSort()) { + sort.append(order.getProperty(), order.isAscending() ? 1 : -1); + } + + return ctx.getMappedObject(new Document("$sort", sort), targetType); + }); + } + + /** + * Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is + * present. + * + * @param aggregationPipeline + * @param accessor + */ + static void appendLimitAndOffsetIfPresent(AggregationPipeline aggregationPipeline, + ConvertingParameterAccessor accessor) { + appendLimitAndOffsetIfPresent(aggregationPipeline, accessor, LongUnaryOperator.identity(), + IntUnaryOperator.identity()); + } + + /** + * Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is + * present. + * + * @param aggregationPipeline + * @param accessor + * @param offsetOperator + * @param limitOperator + * @since 3.3 + */ + static void appendLimitAndOffsetIfPresent(AggregationPipeline aggregationPipeline, + ConvertingParameterAccessor accessor, LongUnaryOperator offsetOperator, IntUnaryOperator limitOperator) { + + Pageable pageable = accessor.getPageable(); + if (pageable.isUnpaged()) { + return; + } + + if (pageable.getOffset() > 0) { + aggregationPipeline.add(Aggregation.skip(offsetOperator.applyAsLong(pageable.getOffset()))); + } + + aggregationPipeline.add(Aggregation.limit(limitOperator.applyAsInt(pageable.getPageSize()))); + } + + /** + * Extract a single entry from the given {@link Document}.
          + *
            + *
          1. empty source: {@literal null}
          2. + *
          3. single entry convert that one
          4. + *
          5. single entry when ignoring {@literal _id} field convert that one
          6. + *
          7. multiple entries first value assignable to the target type
          8. + *
          9. no match IllegalArgumentException
          10. + *
          + * + * @param + * @param source + * @param targetType + * @param converter + * @return can be {@literal null} if source {@link Document#isEmpty() is empty}. + * @throws IllegalArgumentException when none of the above rules is met. + */ + @Nullable + static T extractSimpleTypeResult(@Nullable Document source, Class targetType, MongoConverter converter) { + + if (ObjectUtils.isEmpty(source)) { + return null; + } + + if (source.size() == 1) { + return getPotentiallyConvertedSimpleTypeValue(converter, source.values().iterator().next(), targetType); + } + + Document intermediate = new Document(source); + intermediate.remove(FieldName.ID.name()); + + if (intermediate.size() == 1) { + return getPotentiallyConvertedSimpleTypeValue(converter, intermediate.values().iterator().next(), targetType); + } + + for (Map.Entry entry : intermediate.entrySet()) { + if (entry != null && ClassUtils.isAssignable(targetType, entry.getValue().getClass())) { + return targetType.cast(entry.getValue()); + } + } + + throw new IllegalArgumentException( + String.format("o_O no entry of type %s found in %s.", targetType.getSimpleName(), source.toJson())); + } + + @Nullable + @SuppressWarnings("unchecked") + private static T getPotentiallyConvertedSimpleTypeValue(MongoConverter converter, @Nullable Object value, + Class targetType) { + + if (value == null) { + return null; + } + + if (ClassUtils.isAssignableValue(targetType, value)) { + return (T) value; + } + + return converter.getConversionService().convert(value, targetType); + } + + /** + * Interface to invoke an aggregation along with source, intermediate, and target types. + * + * @param + */ + interface AggregationCallback { + + /** + * @param aggregation + * @param domainType + * @param typeToRead + * @param elementType + * @param simpleType whether the aggregation returns {@link Document} or a + * {@link org.springframework.data.mapping.model.SimpleTypeHolder simple type}. + * @param rawResult whether the aggregation returns {@link AggregationResults}. + * @return + */ + @Nullable + T doAggregate(TypedAggregation aggregation, Class domainType, Class typeToRead, Class elementType, + boolean simpleType, boolean rawResult); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/BooleanUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/BooleanUtil.java new file mode 100644 index 0000000000..74249b40d5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/BooleanUtil.java @@ -0,0 +1,49 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +/** + * Utility class containing methods to interact with boolean values. + * + * @author Mark Paluch + * @since 2.0.9 + */ +final class BooleanUtil { + + private BooleanUtil() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + /** + * Count the number of {@literal true} values. + * + * @param values + * @return the number of values that are {@literal true}. + */ + static int countBooleanTrueValues(boolean... values) { + + int count = 0; + + for (boolean value : values) { + + if (value) { + count++; + } + } + + return count; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/CollationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/CollationUtils.java new file mode 100644 index 0000000000..2aac6b77a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/CollationUtils.java @@ -0,0 +1,107 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.Locale; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; + +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.lang.Nullable; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * Internal utility class to help avoid duplicate code required in both the reactive and the sync {@link Collation} + * support offered by repositories. + * + * @author Christoph Strobl + * @since 2.2 + */ +abstract class CollationUtils { + + private static final ParameterBindingDocumentCodec CODEC = new ParameterBindingDocumentCodec(); + private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)"); + + private CollationUtils() { + } + + /** + * Compute the {@link Collation} by inspecting the {@link ConvertingParameterAccessor#getCollation() parameter + * accessor} or parsing a potentially given {@literal collationExpression}. + * + * @param collationExpression + * @param accessor + * @param expressionEvaluator + * @return can be {@literal null} if neither {@link ConvertingParameterAccessor#getCollation()} nor + * {@literal collationExpression} are present. + */ + @Nullable + static Collation computeCollation(@Nullable String collationExpression, ConvertingParameterAccessor accessor, + ValueExpressionEvaluator expressionEvaluator) { + + if (accessor.getCollation() != null) { + return accessor.getCollation(); + } + + if (!StringUtils.hasText(collationExpression)) { + return null; + } + + if (collationExpression.stripLeading().startsWith("{")) { + + ParameterBindingContext bindingContext = ParameterBindingContext.forExpressions(accessor::getBindableValue, + expressionEvaluator); + + return Collation.from(CODEC.decode(collationExpression, bindingContext)); + } + + Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(collationExpression); + if (!matcher.find()) { + return Collation.parse(collationExpression); + } + + String placeholder = matcher.group(); + Object placeholderValue = accessor.getBindableValue(computeParameterIndex(placeholder)); + + if (collationExpression.startsWith("?")) { + + if (placeholderValue instanceof String) { + return Collation.parse(placeholderValue.toString()); + } + if (placeholderValue instanceof Locale locale) { + return Collation.of(locale); + } + if (placeholderValue instanceof Document document) { + return Collation.from(document); + } + throw new IllegalArgumentException(String.format("Collation must be a String, Locale or Document but was %s", + ObjectUtils.nullSafeClassName(placeholderValue))); + } + + return Collation.parse(collationExpression.replace(placeholder, placeholderValue.toString())); + } + + private static int computeParameterIndex(String parameter) { + return NumberUtils.parseNumber(parameter.replace("?", ""), Integer.class); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java index 0fbb86c0a1..dbf87f2f2e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +20,19 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Optional; +import org.springframework.data.domain.Limit; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParameterAccessor; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -59,98 +62,79 @@ public class ConvertingParameterAccessor implements MongoParameterAccessor { */ public ConvertingParameterAccessor(MongoWriter writer, MongoParameterAccessor delegate) { - Assert.notNull(writer, "MongoWriter must not be null!"); - Assert.notNull(delegate, "MongoParameterAccessor must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); + Assert.notNull(delegate, "MongoParameterAccessor must not be null"); this.writer = writer; this.delegate = delegate; } - /* - * (non-Javadoc) - * - * @see java.lang.Iterable#iterator() - */ public PotentiallyConvertingIterator iterator() { return new ConvertingIterator(delegate.iterator()); } - /* - * (non-Javadoc) - * - * @see org.springframework.data.repository.query.ParameterAccessor#getPageable() - */ + @Override + public ScrollPosition getScrollPosition() { + return delegate.getScrollPosition(); + } + public Pageable getPageable() { return delegate.getPageable(); } - /* - * (non-Javadoc) - * - * @see org.springframework.data.repository.query.ParameterAccessor#getSort() - */ public Sort getSort() { return delegate.getSort(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection() - */ @Override - public Optional> getDynamicProjection() { - return delegate.getDynamicProjection(); + public Class findDynamicProjection() { + return delegate.findDynamicProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getBindableValue(int) - */ public Object getBindableValue(int index) { return getConvertedValue(delegate.getBindableValue(index), null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getDistanceRange() - */ @Override public Range getDistanceRange() { return delegate.getDistanceRange(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getGeoNearLocation() - */ public Point getGeoNearLocation() { return delegate.getGeoNearLocation(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getFullText() - */ public TextCriteria getFullText() { return delegate.getFullText(); } + @Override + public Collation getCollation() { + return delegate.getCollation(); + } + + @Override + public UpdateDefinition getUpdate() { + return delegate.getUpdate(); + } + + @Override + public Limit getLimit() { + return delegate.getLimit(); + } + /** * Converts the given value with the underlying {@link MongoWriter}. * * @param value can be {@literal null}. * @param typeInformation can be {@literal null}. - * @return + * @return can be {@literal null}. */ @Nullable private Object getConvertedValue(Object value, @Nullable TypeInformation typeInformation) { return writer.convertToMongoType(value, typeInformation == null ? null : typeInformation.getActualType()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#hasBindableNullValue() - */ public boolean hasBindableNullValue() { return delegate.hasBindableNullValue(); } @@ -173,26 +157,14 @@ public ConvertingIterator(Iterator delegate) { this.delegate = delegate; } - /* - * (non-Javadoc) - * @see java.util.Iterator#hasNext() - */ public boolean hasNext() { return delegate.hasNext(); } - /* - * (non-Javadoc) - * @see java.util.Iterator#next() - */ public Object next() { return delegate.next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ConvertingParameterAccessor.PotentiallConvertingIterator#nextConverted() - */ public Object nextConverted(MongoPersistentProperty property) { Object next = next(); @@ -204,8 +176,10 @@ public Object nextConverted(MongoPersistentProperty property) { if (property.isAssociation()) { if (next.getClass().isArray() || next instanceof Iterable) { - List dbRefs = new ArrayList(); - for (Object element : asCollection(next)) { + Collection values = asCollection(next); + + List dbRefs = new ArrayList<>(values.size()); + for (Object element : values) { dbRefs.add(writer.toDBRef(element, property)); } @@ -218,10 +192,6 @@ public Object nextConverted(MongoPersistentProperty property) { return getConvertedValue(next, property.getTypeInformation()); } - /* - * (non-Javadoc) - * @see java.util.Iterator#remove() - */ public void remove() { delegate.remove(); } @@ -232,18 +202,21 @@ public void remove() { * array. Will return an empty {@link Collection} in case {@literal null} is given. Will wrap all other types into a * single-element collection. * - * @param source - * @return + * @param source can be {@literal null}, returns an empty {@link List} in that case. + * @return never {@literal null}. */ private static Collection asCollection(@Nullable Object source) { - if (source instanceof Iterable) { + if (source instanceof Iterable iterable) { - List result = new ArrayList(); - for (Object element : (Iterable) source) { - result.add(element); + if(source instanceof Collection collection) { + return new ArrayList<>(collection); } + List result = new ArrayList<>(); + for (Object element : iterable) { + result.add(element); + } return result; } @@ -254,10 +227,6 @@ private static Collection asCollection(@Nullable Object source) { return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues() - */ @Override public Object[] getValues() { return delegate.getValues(); @@ -277,5 +246,4 @@ public interface PotentiallyConvertingIterator extends Iterator { */ Object nextConverted(MongoPersistentProperty property); } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DefaultSpELExpressionEvaluator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DefaultSpELExpressionEvaluator.java new file mode 100644 index 0000000000..16a1e55226 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DefaultSpELExpressionEvaluator.java @@ -0,0 +1,69 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.ExpressionParser; + +/** + * Simple {@link SpELExpressionEvaluator} implementation using {@link ExpressionParser} and {@link EvaluationContext}. + * + * @author Mark Paluch + * @since 3.1 + */ +class DefaultSpELExpressionEvaluator implements SpELExpressionEvaluator { + + private final ExpressionParser parser; + private final EvaluationContext context; + + DefaultSpELExpressionEvaluator(ExpressionParser parser, EvaluationContext context) { + this.parser = parser; + this.context = context; + } + + /** + * Return a {@link SpELExpressionEvaluator} that does not support expression evaluation. + * + * @return a {@link SpELExpressionEvaluator} that does not support expression evaluation. + * @since 3.1 + */ + public static SpELExpressionEvaluator unsupported() { + return NoOpExpressionEvaluator.INSTANCE; + } + + @Override + @SuppressWarnings("unchecked") + public T evaluate(String expression) { + return (T) parser.parseExpression(expression).getValue(context, Object.class); + } + + /** + * {@link SpELExpressionEvaluator} that does not support SpEL evaluation. + * + * @author Mark Paluch + * @since 3.1 + */ + enum NoOpExpressionEvaluator implements SpELExpressionEvaluator { + + INSTANCE; + + @Override + public T evaluate(String expression) { + throw new UnsupportedOperationException("Expression evaluation not supported"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DtoInstantiatingConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DtoInstantiatingConverter.java deleted file mode 100644 index 45de9923d0..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/DtoInstantiatingConverter.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.query; - -import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.EntityInstantiator; -import org.springframework.data.convert.EntityInstantiators; -import org.springframework.data.mapping.PersistentEntity; -import org.springframework.data.mapping.PersistentProperty; -import org.springframework.data.mapping.PersistentPropertyAccessor; -import org.springframework.data.mapping.PreferredConstructor; -import org.springframework.data.mapping.PreferredConstructor.Parameter; -import org.springframework.data.mapping.SimplePropertyHandler; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.model.ParameterValueProvider; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.util.Assert; - -/** - * {@link Converter} to instantiate DTOs from fully equipped domain objects. - * - * @author Oliver Gierke - * @author Mark Paluch - */ -class DtoInstantiatingConverter implements Converter { - - private final Class targetType; - private final MappingContext, ? extends PersistentProperty> context; - private final EntityInstantiator instantiator; - - /** - * Creates a new {@link Converter} to instantiate DTOs. - * - * @param dtoType must not be {@literal null}. - * @param context must not be {@literal null}. - * @param instantiators must not be {@literal null}. - */ - public DtoInstantiatingConverter(Class dtoType, - MappingContext, MongoPersistentProperty> context, - EntityInstantiators instantiator) { - - Assert.notNull(dtoType, "DTO type must not be null!"); - Assert.notNull(context, "MappingContext must not be null!"); - Assert.notNull(instantiator, "EntityInstantiators must not be null!"); - - this.targetType = dtoType; - this.context = context; - this.instantiator = instantiator.getInstantiatorFor(context.getRequiredPersistentEntity(dtoType)); - } - - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ - @Override - public Object convert(Object source) { - - if (targetType.isInterface()) { - return source; - } - - final PersistentEntity sourceEntity = context.getRequiredPersistentEntity(source.getClass()); - final PersistentPropertyAccessor sourceAccessor = sourceEntity.getPropertyAccessor(source); - final PersistentEntity targetEntity = context.getRequiredPersistentEntity(targetType); - final PreferredConstructor> constructor = targetEntity - .getPersistenceConstructor(); - - @SuppressWarnings({ "rawtypes", "unchecked" }) - Object dto = instantiator.createInstance(targetEntity, new ParameterValueProvider() { - - @Override - public Object getParameterValue(Parameter parameter) { - return sourceAccessor.getProperty(sourceEntity.getPersistentProperty(parameter.getName().toString())); - } - }); - - final PersistentPropertyAccessor dtoAccessor = targetEntity.getPropertyAccessor(dto); - - targetEntity.doWithProperties(new SimplePropertyHandler() { - - @Override - public void doWithPersistentProperty(PersistentProperty property) { - - if (constructor.isConstructorParameter(property)) { - return; - } - - dtoAccessor.setProperty(property, - sourceAccessor.getProperty(sourceEntity.getPersistentProperty(property.getName()))); - } - }); - - return dto; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ExpressionEvaluatingParameterBinder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ExpressionEvaluatingParameterBinder.java deleted file mode 100644 index a43b3b027f..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ExpressionEvaluatingParameterBinder.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.query; - -import lombok.EqualsAndHashCode; -import lombok.Value; -import lombok.experimental.UtilityClass; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.xml.bind.DatatypeConverter; - -import org.bson.BSON; -import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding; -import org.springframework.data.repository.query.EvaluationContextProvider; -import org.springframework.expression.EvaluationContext; -import org.springframework.expression.Expression; -import org.springframework.expression.spel.standard.SpelExpressionParser; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; -import org.springframework.util.CollectionUtils; -import org.springframework.util.StringUtils; - -import com.mongodb.DBObject; -import com.mongodb.util.JSON; - -/** - * {@link ExpressionEvaluatingParameterBinder} allows to evaluate, convert and bind parameters to placeholders within a - * {@link String}. - * - * @author Christoph Strobl - * @author Thomas Darimont - * @author Oliver Gierke - * @author Mark Paluch - * @since 1.9 - */ -class ExpressionEvaluatingParameterBinder { - - private final SpelExpressionParser expressionParser; - private final EvaluationContextProvider evaluationContextProvider; - - /** - * Creates new {@link ExpressionEvaluatingParameterBinder} - * - * @param expressionParser must not be {@literal null}. - * @param evaluationContextProvider must not be {@literal null}. - */ - public ExpressionEvaluatingParameterBinder(SpelExpressionParser expressionParser, - EvaluationContextProvider evaluationContextProvider) { - - Assert.notNull(expressionParser, "ExpressionParser must not be null!"); - Assert.notNull(evaluationContextProvider, "EvaluationContextProvider must not be null!"); - - this.expressionParser = expressionParser; - this.evaluationContextProvider = evaluationContextProvider; - } - - /** - * Bind values provided by {@link MongoParameterAccessor} to placeholders in {@literal raw} while considering - * potential conversions and parameter types. - * - * @param raw can be empty. - * @param accessor must not be {@literal null}. - * @param bindingContext must not be {@literal null}. - * @return {@literal null} if given {@code raw} value is empty. - */ - public String bind(String raw, MongoParameterAccessor accessor, BindingContext bindingContext) { - - if (!StringUtils.hasText(raw)) { - return raw; - } - - return replacePlaceholders(raw, accessor, bindingContext); - } - - /** - * Replaced the parameter placeholders with the actual parameter values from the given {@link ParameterBinding}s. - * - * @param input must not be {@literal null} or empty. - * @param accessor must not be {@literal null}. - * @param bindingContext must not be {@literal null}. - * @return - */ - private String replacePlaceholders(String input, MongoParameterAccessor accessor, BindingContext bindingContext) { - - if (!bindingContext.hasBindings()) { - return input; - } - - if (input.matches("^\\?\\d+$")) { - return getParameterValueForBinding(accessor, bindingContext.getParameters(), - bindingContext.getBindings().iterator().next()); - } - - Matcher matcher = createReplacementPattern(bindingContext.getBindings()).matcher(input); - StringBuffer buffer = new StringBuffer(); - - int parameterIndex = 0; - while (matcher.find()) { - - Placeholder placeholder = extractPlaceholder(parameterIndex++, matcher); - ParameterBinding binding = bindingContext.getBindingFor(placeholder); - String valueForBinding = getParameterValueForBinding(accessor, bindingContext.getParameters(), binding); - - // appendReplacement does not like unescaped $ sign and others, so we need to quote that stuff first - matcher.appendReplacement(buffer, Matcher.quoteReplacement(valueForBinding)); - if (StringUtils.hasText(placeholder.getSuffix())) { - buffer.append(placeholder.getSuffix()); - } - - if (placeholder.isQuoted()) { - postProcessQuotedBinding(buffer, valueForBinding, - !binding.isExpression() ? accessor.getBindableValue(binding.getParameterIndex()) : null, - binding.isExpression()); - } - } - - matcher.appendTail(buffer); - return buffer.toString(); - } - - /** - * Sanitize String binding by replacing single quoted values with double quotes which prevents potential single quotes - * contained in replacement to interfere with the Json parsing. Also take care of complex objects by removing the - * quotation entirely. - * - * @param buffer the {@link StringBuffer} to operate upon. - * @param valueForBinding the actual binding value. - * @param raw the raw binding value - * @param isExpression {@literal true} if the binding value results from a SpEL expression. - */ - private void postProcessQuotedBinding(StringBuffer buffer, String valueForBinding, @Nullable Object raw, - boolean isExpression) { - - int quotationMarkIndex = buffer.length() - valueForBinding.length() - 1; - char quotationMark = buffer.charAt(quotationMarkIndex); - - while (quotationMark != '\'' && quotationMark != '"') { - - quotationMarkIndex--; - - if (quotationMarkIndex < 0) { - throw new IllegalArgumentException("Could not find opening quotes for quoted parameter"); - } - - quotationMark = buffer.charAt(quotationMarkIndex); - } - - // remove quotation char before the complex object string - if (valueForBinding.startsWith("{") && (raw instanceof DBObject || isExpression)) { - - buffer.deleteCharAt(quotationMarkIndex); - - } else { - - if (isExpression) { - - buffer.deleteCharAt(quotationMarkIndex); - return; - } - - if (quotationMark == '\'') { - buffer.replace(quotationMarkIndex, quotationMarkIndex + 1, "\""); - } - - buffer.append("\""); - } - } - - /** - * Returns the serialized value to be used for the given {@link ParameterBinding}. - * - * @param accessor must not be {@literal null}. - * @param parameters - * @param binding must not be {@literal null}. - * @return - */ - private String getParameterValueForBinding(MongoParameterAccessor accessor, MongoParameters parameters, - ParameterBinding binding) { - - Object value = binding.isExpression() - ? evaluateExpression(binding.getExpression(), parameters, accessor.getValues()) - : accessor.getBindableValue(binding.getParameterIndex()); - - if (value instanceof String && binding.isQuoted()) { - - if (binding.isExpression() && ((String) value).startsWith("{")) { - return (String) value; - } - - return binding.isExpression() ? JSON.serialize(value) : QuotedString.unquote(JSON.serialize(value)); - } - - if (value instanceof byte[]) { - - String base64representation = DatatypeConverter.printBase64Binary((byte[]) value); - - if (!binding.isQuoted()) { - return "{ '$binary' : '" + base64representation + "', '$type' : '" + BSON.B_GENERAL + "'}"; - } - - return base64representation; - } - - return JSON.serialize(value); - } - - /** - * Evaluates the given {@code expressionString}. - * - * @param expressionString must not be {@literal null} or empty. - * @param parameters must not be {@literal null}. - * @param parameterValues must not be {@literal null}. - * @return - */ - @Nullable - private Object evaluateExpression(String expressionString, MongoParameters parameters, Object[] parameterValues) { - - EvaluationContext evaluationContext = evaluationContextProvider.getEvaluationContext(parameters, parameterValues); - Expression expression = expressionParser.parseExpression(expressionString); - - return expression.getValue(evaluationContext, Object.class); - } - - /** - * Creates a replacement {@link Pattern} for all {@link ParameterBinding#getParameter() binding parameters} including - * a potentially trailing quotation mark. - * - * @param bindings - * @return - */ - private Pattern createReplacementPattern(List bindings) { - - StringBuilder regex = new StringBuilder(); - - for (ParameterBinding binding : bindings) { - - regex.append("|"); - regex.append("(" + Pattern.quote(binding.getParameter()) + ")"); - regex.append("([\\w.]*"); - regex.append("(\\W?['\"]|\\w*')?)"); - } - - return Pattern.compile(regex.substring(1)); - } - - /** - * Extract the placeholder stripping any trailing trailing quotation mark that might have resulted from the - * {@link #createReplacementPattern(List) pattern} used. - * - * @param parameterIndex The actual parameter index. - * @param matcher The actual {@link Matcher}. - * @return - */ - private Placeholder extractPlaceholder(int parameterIndex, Matcher matcher) { - - String rawPlaceholder = matcher.group(parameterIndex * 3 + 1); - String suffix = matcher.group(parameterIndex * 3 + 2); - - if (!StringUtils.hasText(rawPlaceholder)) { - - rawPlaceholder = matcher.group(); - if (rawPlaceholder.matches(".*\\d$")) { - suffix = ""; - } else { - int index = rawPlaceholder.replaceAll("[^\\?0-9]*$", "").length() - 1; - if (index > 0 && rawPlaceholder.length() > index) { - suffix = rawPlaceholder.substring(index + 1); - } - } - if (QuotedString.endsWithQuote(rawPlaceholder)) { - rawPlaceholder = rawPlaceholder.substring(0, - rawPlaceholder.length() - (StringUtils.hasText(suffix) ? suffix.length() : 1)); - } - } - - if (StringUtils.hasText(suffix)) { - - boolean quoted = QuotedString.endsWithQuote(suffix); - - return Placeholder.of(parameterIndex, rawPlaceholder, quoted, - quoted ? QuotedString.unquoteSuffix(suffix) : suffix); - } - return Placeholder.of(parameterIndex, rawPlaceholder, false, null); - } - - /** - * @author Christoph Strobl - * @author Mark Paluch - * @since 1.9 - */ - static class BindingContext { - - final MongoParameters parameters; - final Map bindings; - - /** - * Creates new {@link BindingContext}. - * - * @param parameters - * @param bindings - */ - public BindingContext(MongoParameters parameters, List bindings) { - - this.parameters = parameters; - this.bindings = mapBindings(bindings); - } - - /** - * @return {@literal true} when list of bindings is not empty. - */ - boolean hasBindings() { - return !CollectionUtils.isEmpty(bindings); - } - - /** - * Get unmodifiable list of {@link ParameterBinding}s. - * - * @return never {@literal null}. - */ - public List getBindings() { - return new ArrayList(bindings.values()); - } - - /** - * Get the concrete {@link ParameterBinding} for a given {@literal placeholder}. - * - * @param placeholder must not be {@literal null}. - * @return - * @throws java.util.NoSuchElementException - * @since 1.10 - */ - ParameterBinding getBindingFor(Placeholder placeholder) { - - if (!bindings.containsKey(placeholder)) { - throw new NoSuchElementException(String.format("Could not to find binding for placeholder '%s'.", placeholder)); - } - - return bindings.get(placeholder); - } - - /** - * Get the associated {@link MongoParameters}. - * - * @return - */ - public MongoParameters getParameters() { - return parameters; - } - - private static Map mapBindings(List bindings) { - - Map map = new LinkedHashMap(bindings.size(), 1); - - int parameterIndex = 0; - for (ParameterBinding binding : bindings) { - map.put(Placeholder.of(parameterIndex++, binding.getParameter(), binding.isQuoted(), null), binding); - } - - return map; - } - } - - /** - * Encapsulates a quoted/unquoted parameter placeholder. - * - * @author Mark Paluch - * @since 1.9 - */ - @Value(staticConstructor = "of") - @EqualsAndHashCode(exclude = { "quoted", "suffix" }) - static class Placeholder { - - private int parameterIndex; - private final String parameter; - private final boolean quoted; - private final @Nullable String suffix; - - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return quoted ? String.format("'%s'", parameter + (suffix != null ? suffix : "")) - : parameter + (suffix != null ? suffix : ""); - } - - } - - /** - * Utility to handle quoted strings using single/double quotes. - * - * @author Mark Paluch - */ - @UtilityClass - static class QuotedString { - - /** - * @param string - * @return {@literal true} if {@literal string} ends with a single/double quote. - */ - static boolean endsWithQuote(String string) { - return string.endsWith("'") || string.endsWith("\""); - } - - /** - * Remove trailing quoting from {@literal quoted}. - * - * @param quoted - * @return {@literal quoted} with removed quotes. - */ - public static String unquoteSuffix(String quoted) { - return quoted.substring(0, quoted.length() - 1); - } - - /** - * Remove leading and trailing quoting from {@literal quoted}. - * - * @param quoted - * @return {@literal quoted} with removed quotes. - */ - public static String unquote(String quoted) { - return quoted.substring(1, quoted.length() - 1); - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java index 04728b819a..8678e5a74c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityInformation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,15 @@ */ package org.springframework.data.mongodb.repository.query; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.repository.core.EntityInformation; +import org.springframework.lang.Nullable; /** * Mongo specific {@link EntityInformation}. * * @author Oliver Gierke + * @author Mark Paluch */ public interface MongoEntityInformation extends EntityInformation { @@ -37,4 +40,45 @@ public interface MongoEntityInformation extends EntityInformation * @return */ String getIdAttribute(); + + /** + * Returns whether the entity uses optimistic locking. + * + * @return true if the entity defines a {@link org.springframework.data.annotation.Version} property. + * @since 2.2 + */ + default boolean isVersioned() { + return false; + } + + /** + * Returns the version value for the entity or {@literal null} if the entity is not {@link #isVersioned() versioned}. + * + * @param entity must not be {@literal null} + * @return can be {@literal null}. + * @since 2.2 + */ + @Nullable + default Object getVersion(T entity) { + return null; + } + + /** + * Returns whether the entity defines a specific collation. + * + * @return {@literal true} if the entity defines a collation. + * @since 2.2 + */ + default boolean hasCollation() { + return getCollation() != null; + } + + /** + * Return the collation for the entity or {@literal null} if {@link #hasCollation() not defined}. + * + * @return can be {@literal null}. + * @since 2.2 + */ + @Nullable + Collation getCollation(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java index 9f21064bae..9aa8af8cfe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoEntityMetadata.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,7 +34,7 @@ public interface MongoEntityMetadata extends EntityMetadata { /** * Returns the {@link MongoPersistentEntity} that supposed to determine the collection to be queried. - * + * * @return * @since 2.0.4 */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java index f1798b1c68..5db853e810 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +18,10 @@ import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParameterAccessor; import org.springframework.lang.Nullable; @@ -45,6 +48,7 @@ public interface MongoParameterAccessor extends ParameterAccessor { * * @return */ + @Nullable Point getGeoNearLocation(); /** @@ -56,6 +60,15 @@ public interface MongoParameterAccessor extends ParameterAccessor { @Nullable TextCriteria getFullText(); + /** + * Returns the {@link Collation} to be used for the query. + * + * @return {@literal null} if not set. + * @since 2.2 + */ + @Nullable + Collation getCollation(); + /** * Returns the raw parameter values of the underlying query method. * @@ -63,4 +76,13 @@ public interface MongoParameterAccessor extends ParameterAccessor { * @since 1.8 */ Object[] getValues(); + + /** + * Returns the {@link Update} to be used for an update execution. + * + * @return {@literal null} if not present. + * @since 3.4 + */ + @Nullable + UpdateDefinition getUpdate(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java index 843fbf0be4..1f66d5b77d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParameters.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.repository.query; +import java.io.Serializable; import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; @@ -22,13 +23,18 @@ import org.springframework.core.MethodParameter; import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; +import org.springframework.data.geo.GeoPage; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Near; import org.springframework.data.mongodb.repository.query.MongoParameters.MongoParameter; import org.springframework.data.repository.query.Parameter; import org.springframework.data.repository.query.Parameters; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.repository.query.ParametersSource; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -38,43 +44,71 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Thomas Darimont */ public class MongoParameters extends Parameters { + private static final List> GEO_NEAR_RESULTS = Arrays.asList(GeoResult.class, + GeoResults.class, GeoPage.class); + private final int rangeIndex; private final int maxDistanceIndex; private final @Nullable Integer fullTextIndex; private final @Nullable Integer nearIndex; + private final @Nullable Integer collationIndex; + private final int updateIndex; + private final TypeInformation domainType; + + /** + * Creates a new {@link MongoParameters} instance from the given {@link Method} and {@link MongoQueryMethod}. + * + * @param parametersSource must not be {@literal null}. + * @since 4.5 + */ + public MongoParameters(ParametersSource parametersSource) { + this(parametersSource, isGeoNearQuery(parametersSource.getMethod())); + } + + /** + * Creates a new {@link MongoParameters} instance from the given {@link Method} and {@link MongoQueryMethod}. + * + * @param parametersSource must not be {@literal null}. + * @param isGeoNearMethod indicate if this is a geo-spatial query method + */ + public MongoParameters(ParametersSource parametersSource, boolean isGeoNearMethod) { + this(parametersSource, new NearIndex(parametersSource, isGeoNearMethod)); + } /** * Creates a new {@link MongoParameters} instance from the given {@link Method} and {@link MongoQueryMethod}. * - * @param method must not be {@literal null}. - * @param queryMethod must not be {@literal null}. + * @param parametersSource must not be {@literal null}. + * @param nearIndex the near parameter index. */ - public MongoParameters(Method method, boolean isGeoNearMethod) { + private MongoParameters(ParametersSource parametersSource, NearIndex nearIndex) { - super(method); + super(parametersSource, methodParameter -> new MongoParameter(methodParameter, + parametersSource.getDomainTypeInformation(), nearIndex.nearIndex)); + + Method method = parametersSource.getMethod(); List> parameterTypes = Arrays.asList(method.getParameterTypes()); + this.domainType = parametersSource.getDomainTypeInformation(); this.fullTextIndex = parameterTypes.indexOf(TextCriteria.class); - ClassTypeInformation declaringClassInfo = ClassTypeInformation.from(method.getDeclaringClass()); + TypeInformation declaringClassInfo = TypeInformation.of(parametersSource.getContainingClass()); List> parameterTypeInfo = declaringClassInfo.getParameterTypes(method); this.rangeIndex = getTypeIndex(parameterTypeInfo, Range.class, Distance.class); this.maxDistanceIndex = this.rangeIndex == -1 ? getTypeIndex(parameterTypeInfo, Distance.class, null) : -1; - - int index = findNearIndexInParameters(method); - if (index == -1 && isGeoNearMethod) { - index = getNearIndex(parameterTypes); - } - - this.nearIndex = index; + this.collationIndex = getTypeIndex(parameterTypeInfo, Collation.class, null); + this.updateIndex = QueryUtils.indexOfAssignableParameter(UpdateDefinition.class, parameterTypes); + this.nearIndex = nearIndex.nearIndex; } private MongoParameters(List parameters, int maxDistanceIndex, @Nullable Integer nearIndex, - @Nullable Integer fullTextIndex, int rangeIndex) { + @Nullable Integer fullTextIndex, int rangeIndex, @Nullable Integer collationIndex, int updateIndex, + TypeInformation domainType) { super(parameters); @@ -82,9 +116,45 @@ private MongoParameters(List parameters, int maxDistanceIndex, @ this.fullTextIndex = fullTextIndex; this.maxDistanceIndex = maxDistanceIndex; this.rangeIndex = rangeIndex; + this.collationIndex = collationIndex; + this.updateIndex = updateIndex; + this.domainType = domainType; + } + + static boolean isGeoNearQuery(Method method) { + + Class returnType = method.getReturnType(); + + for (Class type : GEO_NEAR_RESULTS) { + if (type.isAssignableFrom(returnType)) { + return true; + } + } + + if (Iterable.class.isAssignableFrom(returnType)) { + TypeInformation from = TypeInformation.fromReturnTypeOf(method); + return GeoResult.class.equals(from.getRequiredComponentType().getType()); + } + + return false; } - private final int getNearIndex(List> parameterTypes) { + static class NearIndex { + + private final @Nullable Integer nearIndex; + + public NearIndex(ParametersSource parametersSource, boolean isGeoNearMethod) { + + int index = findNearIndexInParameters(parametersSource.getMethod()); + if (index == -1 && isGeoNearMethod) { + index = getNearIndex(Arrays.asList(parametersSource.getMethod().getParameterTypes())); + } + + this.nearIndex = index; + } + } + + private static int getNearIndex(List> parameterTypes) { for (Class reference : Arrays.asList(Point.class, double[].class)) { @@ -97,25 +167,28 @@ private final int getNearIndex(List> parameterTypes) { if (nearIndex == parameterTypes.lastIndexOf(reference)) { return nearIndex; } else { - throw new IllegalStateException("Multiple Point parameters found but none annotated with @Near!"); + throw new IllegalStateException("Multiple Point parameters found but none annotated with @Near"); } } return -1; } - private int findNearIndexInParameters(Method method) { + static int findNearIndexInParameters(Method method) { int index = -1; for (java.lang.reflect.Parameter p : method.getParameters()) { - MongoParameter param = createParameter(MethodParameter.forParameter(p)); - if (param.isManuallyAnnotatedNearParameter()) { - if(index == -1) { - index = param.getIndex(); + MethodParameter methodParameter = MethodParameter.forParameter(p); + + if ((Point.class.isAssignableFrom(methodParameter.getParameterType()) + || methodParameter.getParameterType().equals(double[].class)) + && methodParameter.hasParameterAnnotation(Near.class)) { + if (index == -1) { + index = methodParameter.getParameterIndex(); } else { - throw new IllegalStateException(String.format("Found multiple @Near annotations ond method %s! Only one allowed!", - method.toString())); + throw new IllegalStateException( + String.format("Found multiple @Near annotations ond method %s; Only one allowed", method)); } } @@ -123,17 +196,6 @@ private int findNearIndexInParameters(Method method) { return index; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.Parameters#createParameter(org.springframework.core.MethodParameter) - */ - @Override - protected MongoParameter createParameter(MethodParameter parameter) { - return new MongoParameter(parameter); - } - - - public int getDistanceRangeIndex() { return -1; } @@ -158,13 +220,13 @@ public int getNearIndex() { } /** - * Returns ths inde of the parameter to be used as a textquery param + * Returns the index of the parameter to be used as a text query param * * @return * @since 1.6 */ public int getFullTextParameterIndex() { - return fullTextIndex != null ? fullTextIndex.intValue() : -1; + return fullTextIndex != null ? fullTextIndex : -1; } /** @@ -172,7 +234,7 @@ public int getFullTextParameterIndex() { * @since 1.6 */ public boolean hasFullTextParameter() { - return this.fullTextIndex != null && this.fullTextIndex.intValue() >= 0; + return this.fullTextIndex != null && this.fullTextIndex >= 0; } /** @@ -183,13 +245,30 @@ public int getRangeIndex() { return rangeIndex; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.Parameters#createFrom(java.util.List) + /** + * Returns the index of the {@link Collation} parameter or -1 if not present. + * + * @return -1 if not set. + * @since 2.2 */ + public int getCollationParameterIndex() { + return collationIndex != null ? collationIndex : -1; + } + + /** + * Returns the index of the {@link UpdateDefinition} parameter or -1 if not present. + * + * @return -1 if not present. + * @since 3.4 + */ + public int getUpdateIndex() { + return updateIndex; + } + @Override protected MongoParameters createFrom(List parameters) { - return new MongoParameters(parameters, this.maxDistanceIndex, this.nearIndex, this.fullTextIndex, this.rangeIndex); + return new MongoParameters(parameters, this.maxDistanceIndex, this.nearIndex, this.fullTextIndex, this.rangeIndex, + this.collationIndex, this.updateIndex, this.domainType); } private int getTypeIndex(List> parameterTypes, Class type, @Nullable Class componentType) { @@ -216,36 +295,34 @@ private int getTypeIndex(List> parameterTypes, Class type, * * @author Oliver Gierke */ - class MongoParameter extends Parameter { + static class MongoParameter extends Parameter { private final MethodParameter parameter; + private final @Nullable Integer nearIndex; /** * Creates a new {@link MongoParameter}. * * @param parameter must not be {@literal null}. + * @param domainType must not be {@literal null}. */ - MongoParameter(MethodParameter parameter) { - super(parameter); + MongoParameter(MethodParameter parameter, TypeInformation domainType, @Nullable Integer nearIndex) { + super(parameter, domainType); this.parameter = parameter; + this.nearIndex = nearIndex; if (!isPoint() && hasNearAnnotation()) { - throw new IllegalArgumentException("Near annotation is only allowed at Point parameter!"); + throw new IllegalArgumentException("Near annotation is only allowed at Point parameter"); } } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.Parameter#isSpecialParameter() - */ @Override public boolean isSpecialParameter() { return super.isSpecialParameter() || Distance.class.isAssignableFrom(getType()) || isNearParameter() - || TextCriteria.class.isAssignableFrom(getType()); + || TextCriteria.class.isAssignableFrom(getType()) || Collation.class.isAssignableFrom(getType()); } private boolean isNearParameter() { - Integer nearIndex = MongoParameters.this.nearIndex; return nearIndex != null && nearIndex.equals(getIndex()); } @@ -260,7 +337,6 @@ private boolean isPoint() { private boolean hasNearAnnotation() { return parameter.getParameterAnnotation(Near.class) != null; } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java index d7d42d9c32..ac1931e10c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,14 @@ */ package org.springframework.data.mongodb.repository.query; -import java.util.Arrays; -import java.util.List; - import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Term; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParametersParameterAccessor; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -38,8 +38,7 @@ */ public class MongoParametersParameterAccessor extends ParametersParameterAccessor implements MongoParameterAccessor { - private final MongoQueryMethod method; - private final List values; + final MongoQueryMethod method; /** * Creates a new {@link MongoParametersParameterAccessor}. @@ -52,7 +51,6 @@ public MongoParametersParameterAccessor(MongoQueryMethod method, Object[] values super(method.getParameters(), values); this.method = method; - this.values = Arrays.asList(values); } public Range getDistanceRange() { @@ -66,15 +64,12 @@ public Range getDistanceRange() { } int maxDistanceIndex = mongoParameters.getMaxDistanceIndex(); - Distance maxDistance = maxDistanceIndex == -1 ? null : (Distance) getValue(maxDistanceIndex); + Bound maxDistance = maxDistanceIndex == -1 ? Bound.unbounded() + : Bound.inclusive((Distance) getValue(maxDistanceIndex)); - return new Range(null, maxDistance); + return Range.of(Bound.unbounded(), maxDistance); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getGeoNearLocation() - */ public Point getGeoNearLocation() { int nearIndex = method.getParameters().getNearIndex(); @@ -89,10 +84,9 @@ public Point getGeoNearLocation() { return null; } - if (value instanceof double[]) { - double[] typedValue = (double[]) value; + if (value instanceof double[] typedValue) { if (typedValue.length != 2) { - throw new IllegalArgumentException("The given double[] must have exactly 2 elements!"); + throw new IllegalArgumentException("The given double[] must have exactly 2 elements"); } else { return new Point(typedValue[0], typedValue[1]); } @@ -101,10 +95,6 @@ public Point getGeoNearLocation() { return (Point) value; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getFullText() - */ @Nullable @Override public TextCriteria getFullText() { @@ -116,16 +106,16 @@ protected TextCriteria potentiallyConvertFullText(Object fullText) { Assert.notNull(fullText, "Fulltext parameter must not be 'null'."); - if (fullText instanceof String) { - return TextCriteria.forDefaultLanguage().matching((String) fullText); + if (fullText instanceof String stringValue) { + return TextCriteria.forDefaultLanguage().matching(stringValue); } - if (fullText instanceof Term) { - return TextCriteria.forDefaultLanguage().matching((Term) fullText); + if (fullText instanceof Term term) { + return TextCriteria.forDefaultLanguage().matching(term); } - if (fullText instanceof TextCriteria) { - return ((TextCriteria) fullText); + if (fullText instanceof TextCriteria textCriteria) { + return textCriteria; } throw new IllegalArgumentException( @@ -133,12 +123,25 @@ protected TextCriteria potentiallyConvertFullText(Object fullText) { ClassUtils.getShortName(fullText.getClass()))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues() - */ + @Override + public Collation getCollation() { + + if (method.getParameters().getCollationParameterIndex() == -1) { + return null; + } + + return getValue(method.getParameters().getCollationParameterIndex()); + } + @Override public Object[] getValues() { - return values.toArray(); + return super.getValues(); + } + + @Override + public UpdateDefinition getUpdate() { + + int updateIndex = method.getParameters().getUpdateIndex(); + return updateIndex == -1 ? null : (UpdateDefinition) getValue(updateIndex); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java index 6442146dcb..66a8870623 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryCreator.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,34 +21,41 @@ import java.util.Collection; import java.util.Iterator; import java.util.Optional; +import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.BsonRegularExpression; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Shape; +import org.springframework.data.mapping.PersistentPropertyPath; import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.PersistentPropertyPath; +import org.springframework.data.mongodb.core.geo.GeoJson; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.MetricConversion; import org.springframework.data.mongodb.core.query.MongoRegexCreator; import org.springframework.data.mongodb.core.query.MongoRegexCreator.MatchMode; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor.PotentiallyConvertingIterator; import org.springframework.data.repository.query.parser.AbstractQueryCreator; import org.springframework.data.repository.query.parser.Part; import org.springframework.data.repository.query.parser.Part.IgnoreCaseType; import org.springframework.data.repository.query.parser.Part.Type; import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; /** * Custom query creator to create Mongo criterias. @@ -60,11 +67,11 @@ */ class MongoQueryCreator extends AbstractQueryCreator { - private static final Logger LOG = LoggerFactory.getLogger(MongoQueryCreator.class); - private final MongoParameterAccessor accessor; - private final boolean isGeoNearQuery; + private static final Log LOG = LogFactory.getLog(MongoQueryCreator.class); + private final MongoParameterAccessor accessor; private final MappingContext context; + private final boolean isGeoNearQuery; /** * Creates a new {@link MongoQueryCreator} from the given {@link PartTree}, {@link ConvertingParameterAccessor} and @@ -93,17 +100,13 @@ public MongoQueryCreator(PartTree tree, ConvertingParameterAccessor accessor, super(tree, accessor); - Assert.notNull(context, "MappingContext must not be null!"); + Assert.notNull(context, "MappingContext must not be null"); this.accessor = accessor; this.isGeoNearQuery = isGeoNearQuery; this.context = context; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#create(org.springframework.data.repository.query.parser.Part, java.util.Iterator) - */ @Override protected Criteria create(Part part, Iterator iterator) { @@ -113,15 +116,10 @@ protected Criteria create(Part part, Iterator iterator) { PersistentPropertyPath path = context.getPersistentPropertyPath(part.getProperty()); MongoPersistentProperty property = path.getLeafProperty(); - Criteria criteria = from(part, property, where(path.toDotPath()), (PotentiallyConvertingIterator) iterator); - return criteria; + return from(part, property, where(path.toDotPath()), iterator); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#and(org.springframework.data.repository.query.parser.Part, java.lang.Object, java.util.Iterator) - */ @Override protected Criteria and(Part part, Criteria base, Iterator iterator) { @@ -132,13 +130,9 @@ protected Criteria and(Part part, Criteria base, Iterator iterator) { PersistentPropertyPath path = context.getPersistentPropertyPath(part.getProperty()); MongoPersistentProperty property = path.getLeafProperty(); - return from(part, property, base.and(path.toDotPath()), (PotentiallyConvertingIterator) iterator); + return from(part, property, base.and(path.toDotPath()), iterator); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#or(java.lang.Object, java.lang.Object) - */ @Override protected Criteria or(Criteria base, Criteria criteria) { @@ -146,10 +140,6 @@ protected Criteria or(Criteria base, Criteria criteria) { return result.orOperator(base, criteria); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.parser.AbstractQueryCreator#complete(java.lang.Object, org.springframework.data.domain.Sort) - */ @Override protected Query complete(Criteria criteria, Sort sort) { @@ -187,15 +177,15 @@ private Criteria from(Part part, MongoPersistentProperty property, Criteria crit case LESS_THAN_EQUAL: return criteria.lte(parameters.next()); case BETWEEN: - return criteria.gt(parameters.next()).lt(parameters.next()); + return computeBetweenPart(criteria, parameters); case IS_NOT_NULL: return criteria.ne(null); case IS_NULL: return criteria.is(null); case NOT_IN: - return criteria.nin(nextAsArray(parameters)); + return criteria.nin(nextAsList(parameters, part)); case IN: - return criteria.in(nextAsArray(parameters)); + return criteria.in(nextAsList(parameters, part)); case LIKE: case STARTING_WITH: case ENDING_WITH: @@ -206,7 +196,9 @@ private Criteria from(Part part, MongoPersistentProperty property, Criteria crit case NOT_CONTAINING: return createContainingCriteria(part, property, criteria.not(), parameters); case REGEX: - return criteria.regex(parameters.next().toString()); + + Object param = parameters.next(); + return param instanceof Pattern pattern ? criteria.regex(pattern) : criteria.regex(param.toString()); case EXISTS: return criteria.exists((Boolean) parameters.next()); case TRUE: @@ -232,8 +224,14 @@ private Criteria from(Part part, MongoPersistentProperty property, Criteria crit criteria.near(pointToUse); } - criteria.maxDistance(it.getNormalizedValue()); - minDistance.ifPresent(min -> criteria.minDistance(min.getNormalizedValue())); + if (pointToUse instanceof GeoJson) { // using GeoJson distance is in meters. + + criteria.maxDistance(MetricConversion.getDistanceInMeters(it)); + minDistance.map(MetricConversion::getDistanceInMeters).ifPresent(criteria::minDistance); + } else { + criteria.maxDistance(it.getNormalizedValue()); + minDistance.map(Distance::getNormalizedValue).ifPresent(criteria::minDistance); + } return criteria; @@ -245,30 +243,25 @@ private Criteria from(Part part, MongoPersistentProperty property, Criteria crit return criteria.within((Shape) parameter); case SIMPLE_PROPERTY: - return isSimpleComparisionPossible(part) ? criteria.is(parameters.next()) + return isSimpleComparisonPossible(part) ? criteria.is(parameters.next()) : createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, false); case NEGATING_SIMPLE_PROPERTY: - return isSimpleComparisionPossible(part) ? criteria.ne(parameters.next()) + return isSimpleComparisonPossible(part) ? criteria.ne(parameters.next()) : createLikeRegexCriteriaOrThrow(part, property, criteria, parameters, true); default: - throw new IllegalArgumentException("Unsupported keyword!"); + throw new IllegalArgumentException("Unsupported keyword"); } } - private boolean isSimpleComparisionPossible(Part part) { + private boolean isSimpleComparisonPossible(Part part) { - switch (part.shouldIgnoreCase()) { - case NEVER: - return true; - case WHEN_POSSIBLE: - return part.getProperty().getType() != String.class; - case ALWAYS: - return false; - default: - return true; - } + return switch (part.shouldIgnoreCase()) { + case NEVER -> true; + case WHEN_POSSIBLE -> part.getProperty().getType() != String.class; + case ALWAYS -> false; + }; } /** @@ -326,7 +319,7 @@ private Criteria createContainingCriteria(Part part, MongoPersistentProperty pro Iterator parameters) { if (property.isCollectionLike()) { - return criteria.in(nextAsArray(parameters)); + return criteria.in(nextAsList(parameters, part)); } return addAppropriateLikeRegexTo(criteria, part, parameters.next()); @@ -345,7 +338,7 @@ private Criteria addAppropriateLikeRegexTo(Criteria criteria, Part part, Object if (value == null) { throw new IllegalArgumentException(String.format( - "Argument for creating $regex pattern for property '%s' must not be null!", part.getProperty().getSegment())); + "Argument for creating $regex pattern for property '%s' must not be null", part.getProperty().getSegment())); } return criteria.regex(toLikeRegex(value.toString(), part), toRegexOptions(part)); @@ -355,6 +348,7 @@ private Criteria addAppropriateLikeRegexTo(Criteria criteria, Part part, Object * @param part * @return the regex options or {@literal null}. */ + @Nullable private String toRegexOptions(Part part) { String regexOptions = null; @@ -386,20 +380,38 @@ private T nextAs(Iterator iterator, Class type) { } throw new IllegalArgumentException( - String.format("Expected parameter type of %s but got %s!", type, parameter.getClass())); + String.format("Expected parameter type of %s but got %s", type, parameter.getClass())); } - private Object[] nextAsArray(Iterator iterator) { + private java.util.List nextAsList(Iterator iterator, Part part) { + + Streamable streamable = asStreamable(iterator.next()); + if (!isSimpleComparisonPossible(part)) { + + MatchMode matchMode = toMatchMode(part.getType()); + String regexOptions = toRegexOptions(part); - Object next = iterator.next(); + streamable = streamable.map(it -> { + if (it instanceof String value) { - if (next instanceof Collection) { - return ((Collection) next).toArray(); - } else if (next != null && next.getClass().isArray()) { - return (Object[]) next; + return new BsonRegularExpression(MongoRegexCreator.INSTANCE.toRegularExpression(value, matchMode), + regexOptions); + } + return it; + }); } - return new Object[] { next }; + return streamable.toList(); + } + + private Streamable asStreamable(Object value) { + + if (value instanceof Collection collection) { + return Streamable.of(collection); + } else if (ObjectUtils.isArray(value)) { + return Streamable.of((Object[]) value); + } + return Streamable.of(value); } private String toLikeRegex(String source, Part part) { @@ -416,26 +428,60 @@ private boolean isSpherical(MongoPersistentProperty property) { return false; } - private static MatchMode toMatchMode(Type type) { + /** + * Compute a {@link Type#BETWEEN} typed {@link Part} using {@link Criteria#gt(Object) $gt}, + * {@link Criteria#gte(Object) $gte}, {@link Criteria#lt(Object) $lt} and {@link Criteria#lte(Object) $lte}. + *
          + * In case the first {@literal value} is actually a {@link Range} the lower and upper bounds of the {@link Range} are + * used according to their {@link Bound#isInclusive() inclusion} definition. Otherwise the {@literal value} is used + * for {@literal $gt} and {@link Iterator#next() parameters.next()} as {@literal $lt}. + * + * @param criteria must not be {@literal null}. + * @param parameters must not be {@literal null}. + * @return + * @since 2.2 + */ + private static Criteria computeBetweenPart(Criteria criteria, Iterator parameters) { - switch (type) { - case NOT_CONTAINING: - case CONTAINING: - return MatchMode.CONTAINING; - case STARTING_WITH: - return MatchMode.STARTING_WITH; - case ENDING_WITH: - return MatchMode.ENDING_WITH; - case LIKE: - case NOT_LIKE: - return MatchMode.LIKE; - case REGEX: - return MatchMode.REGEX; - case NEGATING_SIMPLE_PROPERTY: - case SIMPLE_PROPERTY: - return MatchMode.EXACT; - default: - return MatchMode.DEFAULT; + Object value = parameters.next(); + if (!(value instanceof Range range)) { + return criteria.gt(value).lt(parameters.next()); } + + Optional min = range.getLowerBound().getValue(); + Optional max = range.getUpperBound().getValue(); + + min.ifPresent(it -> { + + if (range.getLowerBound().isInclusive()) { + criteria.gte(it); + } else { + criteria.gt(it); + } + }); + + max.ifPresent(it -> { + + if (range.getUpperBound().isInclusive()) { + criteria.lte(it); + } else { + criteria.lt(it); + } + }); + + return criteria; + } + + private static MatchMode toMatchMode(Type type) { + + return switch (type) { + case NOT_CONTAINING, CONTAINING -> MatchMode.CONTAINING; + case STARTING_WITH -> MatchMode.STARTING_WITH; + case ENDING_WITH -> MatchMode.ENDING_WITH; + case LIKE, NOT_LIKE -> MatchMode.LIKE; + case REGEX -> MatchMode.REGEX; + case NEGATING_SIMPLE_PROPERTY, SIMPLE_PROPERTY, IN -> MatchMode.EXACT; + default -> MatchMode.DEFAULT; + }; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java index 4b5b480fa4..dd2b78de59 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryExecution.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,8 @@ */ package org.springframework.data.mongodb.repository.query; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - import java.util.List; +import java.util.function.Supplier; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -30,13 +28,20 @@ import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.ExecutableUpdate; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.util.SliceUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; import com.mongodb.client.result.DeleteResult; @@ -52,6 +57,7 @@ @FunctionalInterface interface MongoQueryExecution { + @Nullable Object execute(Query query); /** @@ -61,16 +67,20 @@ interface MongoQueryExecution { * @author Christoph Strobl * @since 1.5 */ - @RequiredArgsConstructor final class SlicedExecution implements MongoQueryExecution { - private final @NonNull FindWithQuery find; - private final @NonNull Pageable pageable; + private final FindWithQuery find; + private final Pageable pageable; + + public SlicedExecution(ExecutableFindOperation.FindWithQuery find, Pageable pageable) { + + Assert.notNull(find, "Find must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + + this.find = find; + this.pageable = pageable; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryExecution#execute(org.springframework.data.mongodb.core.query.Query) - */ @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public Object execute(Query query) { @@ -78,7 +88,7 @@ public Object execute(Query query) { int pageSize = pageable.getPageSize(); // Apply Pageable but tweak limit to peek into next page - Query modifiedQuery = query.with(pageable).limit(pageSize + 1); + Query modifiedQuery = SliceUtils.limitResult(query, pageable).with(pageable.getSort()); List result = find.matching(modifiedQuery).all(); boolean hasNext = result.size() > pageSize; @@ -92,17 +102,22 @@ public Object execute(Query query) { * * @author Oliver Gierke * @author Mark Paluch + * @author Christoph Strobl */ - @RequiredArgsConstructor final class PagedExecution implements MongoQueryExecution { - private final @NonNull FindWithQuery operation; - private final @NonNull Pageable pageable; + private final FindWithQuery operation; + private final Pageable pageable; + + public PagedExecution(ExecutableFindOperation.FindWithQuery operation, Pageable pageable) { + + Assert.notNull(operation, "Operation must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + + this.operation = operation; + this.pageable = pageable; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryExecution#execute(org.springframework.data.mongodb.core.query.Query) - */ @Override public Object execute(Query query) { @@ -120,7 +135,7 @@ public Object execute(Query query) { return PageableExecutionUtils.getPage(matching.all(), pageable, () -> { - long count = matching.count(); + long count = operation.matching(Query.of(query).skip(-1).limit(-1)).count(); return overallLimit != 0 ? Math.min(count, overallLimit) : count; }); } @@ -131,17 +146,24 @@ public Object execute(Query query) { * * @author Oliver Gierke */ - @RequiredArgsConstructor class GeoNearExecution implements MongoQueryExecution { - private final @NonNull FindWithQuery operation; - private final @NonNull MongoQueryMethod method; - private final @NonNull MongoParameterAccessor accessor; + private final FindWithQuery operation; + private final MongoQueryMethod method; + private final MongoParameterAccessor accessor; + + public GeoNearExecution(ExecutableFindOperation.FindWithQuery operation, MongoQueryMethod method, + MongoParameterAccessor accessor) { + + Assert.notNull(operation, "Operation must not be null"); + Assert.notNull(method, "Method must not be null"); + Assert.notNull(accessor, "Accessor must not be null"); + + this.operation = operation; + this.method = method; + this.accessor = accessor; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryExecution#execute(org.springframework.data.mongodb.core.query.Query) - */ @Override public Object execute(Query query) { @@ -202,10 +224,6 @@ final class PagingGeoNearExecution extends GeoNearExecution { this.mongoQuery = query; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryExecution.GeoNearExecution#execute(org.springframework.data.mongodb.core.query.Query) - */ @Override public Object execute(Query query) { @@ -228,18 +246,26 @@ public Object execute(Query query) { /** * {@link MongoQueryExecution} removing documents matching the query. * + * @author Oliver Gierke + * @author Mark Paluch + * @author Artyom Gabeev + * @author Christoph Strobl * @since 1.5 */ - @RequiredArgsConstructor final class DeleteExecution implements MongoQueryExecution { - private final @NonNull MongoOperations operations; - private final @NonNull MongoQueryMethod method; + private final MongoOperations operations; + private final MongoQueryMethod method; + + public DeleteExecution(MongoOperations operations, MongoQueryMethod method) { + + Assert.notNull(operations, "Operations must not be null"); + Assert.notNull(method, "Method must not be null"); + + this.operations = operations; + this.method = method; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryExecution#execute(org.springframework.data.mongodb.core.query.Query) - */ @Override public Object execute(Query query) { @@ -250,8 +276,41 @@ public Object execute(Query query) { return operations.findAllAndRemove(query, type, collectionName); } + if (method.isQueryForEntity() && !ClassUtils.isPrimitiveOrWrapper(method.getReturnedObjectType())) { + return operations.findAndRemove(query, type, collectionName); + } + DeleteResult writeResult = operations.remove(query, type, collectionName); - return writeResult != null ? writeResult.getDeletedCount() : 0L; + return writeResult.wasAcknowledged() ? writeResult.getDeletedCount() : 0L; + } + } + + /** + * {@link MongoQueryExecution} updating documents matching the query. + * + * @author Christph Strobl + * @since 3.4 + */ + final class UpdateExecution implements MongoQueryExecution { + + private final ExecutableUpdate updateOps; + private Supplier updateDefinitionSupplier; + private final MongoParameterAccessor accessor; + + UpdateExecution(ExecutableUpdate updateOps, MongoQueryMethod method, Supplier updateSupplier, + MongoParameterAccessor accessor) { + + this.updateOps = updateOps; + this.updateDefinitionSupplier = updateSupplier; + this.accessor = accessor; + } + + @Override + public Object execute(Query query) { + + return updateOps.matching(query.with(accessor.getSort())) // + .apply(updateDefinitionSupplier.get()) // + .all().getModifiedCount(); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java index e0a0184f5f..d3fe22b4ef 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,38 @@ */ package org.springframework.data.mongodb.repository.query; -import java.io.Serializable; +import java.lang.annotation.Annotation; import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.function.Function; import org.springframework.core.annotation.AnnotatedElementUtils; -import org.springframework.core.annotation.AnnotationUtils; -import org.springframework.data.geo.GeoPage; -import org.springframework.data.geo.GeoResult; -import org.springframework.data.geo.GeoResults; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.annotation.Collation; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Hint; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.mongodb.repository.ReadPreference; import org.springframework.data.mongodb.repository.Tailable; +import org.springframework.data.mongodb.repository.Update; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.query.ParametersSource; import org.springframework.data.repository.query.QueryMethod; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReflectionUtils; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ConcurrentReferenceHashMap; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; @@ -49,16 +56,16 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ public class MongoQueryMethod extends QueryMethod { - @SuppressWarnings("unchecked") private static final List> GEO_NEAR_RESULTS = Arrays - .asList(GeoResult.class, GeoResults.class, GeoPage.class); - private final Method method; private final MappingContext, MongoPersistentProperty> mappingContext; + private final Map, Optional> annotationCache; private @Nullable MongoEntityMetadata metadata; + private final Lazy isModifying = Lazy.of(this::resolveModifyingQueryIndicators); /** * Creates a new {@link MongoQueryMethod} from the given {@link Method}. @@ -70,22 +77,28 @@ public class MongoQueryMethod extends QueryMethod { */ public MongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, MappingContext, MongoPersistentProperty> mappingContext) { + this(method, metadata, projectionFactory, mappingContext, MongoParameters::new); + } + + /** + * Creates a new {@link MongoQueryMethod} from the given {@link Method}. + * + * @param method must not be {@literal null}. + * @param metadata must not be {@literal null}. + * @param projectionFactory must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + */ + MongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, + MappingContext, MongoPersistentProperty> mappingContext, + Function parametersFunction) { - super(method, metadata, projectionFactory); + super(method, metadata, projectionFactory, parametersFunction); - Assert.notNull(mappingContext, "MappingContext must not be null!"); + Assert.notNull(mappingContext, "MappingContext must not be null"); this.method = method; this.mappingContext = mappingContext; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#getParameters(java.lang.reflect.Method) - */ - @Override - protected MongoParameters createParameters(Method method) { - return new MongoParameters(method, isGeoNearQuery(method)); + this.annotationCache = new ConcurrentReferenceHashMap<>(); } /** @@ -110,9 +123,8 @@ String getAnnotatedQuery() { private Optional findAnnotatedQuery() { - return Optional.ofNullable(getQueryAnnotation()) // - .map(AnnotationUtils::getValue) // - .map(it -> (String) it) // + return lookupQueryAnnotation() // + .map(Query::value) // .filter(StringUtils::hasText); } @@ -121,18 +133,15 @@ private Optional findAnnotatedQuery() { * * @return */ + @Nullable String getFieldSpecification() { - return Optional.ofNullable(getQueryAnnotation()) // - .map(it -> (String) AnnotationUtils.getValue(it, "fields")) // + return lookupQueryAnnotation() // + .map(Query::fields) // .filter(StringUtils::hasText) // .orElse(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#getEntityInformation() - */ @Override @SuppressWarnings("unchecked") public MongoEntityMetadata getEntityInformation() { @@ -156,18 +165,17 @@ public MongoEntityMetadata getEntityInformation() { MongoPersistentEntity collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity : managedEntity; - this.metadata = new SimpleMongoEntityMetadata((Class) returnedEntity.getType(), - collectionEntity); + this.metadata = new SimpleMongoEntityMetadata<>((Class) returnedEntity.getType(), collectionEntity); } } return this.metadata; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#getParameters() - */ + protected Class getDomainClass() { + return super.getDomainClass(); + } + @Override public MongoParameters getParameters() { return (MongoParameters) super.getParameters(); @@ -179,25 +187,7 @@ public MongoParameters getParameters() { * @return */ public boolean isGeoNearQuery() { - return isGeoNearQuery(this.method); - } - - private boolean isGeoNearQuery(Method method) { - - Class returnType = method.getReturnType(); - - for (Class type : GEO_NEAR_RESULTS) { - if (type.isAssignableFrom(returnType)) { - return true; - } - } - - if (Iterable.class.isAssignableFrom(returnType)) { - TypeInformation from = ClassTypeInformation.fromReturnTypeOf(method); - return GeoResult.class.equals(from.getRequiredComponentType().getType()); - } - - return false; + return MongoParameters.isGeoNearQuery(this.method); } /** @@ -207,11 +197,15 @@ private boolean isGeoNearQuery(Method method) { */ @Nullable Query getQueryAnnotation() { - return AnnotatedElementUtils.findMergedAnnotation(method, Query.class); + return lookupQueryAnnotation().orElse(null); + } + + Optional lookupQueryAnnotation() { + return doFindAnnotation(Query.class); } TypeInformation getReturnType() { - return ClassTypeInformation.fromReturnTypeOf(method); + return TypeInformation.fromReturnTypeOf(method); } /** @@ -230,7 +224,7 @@ public boolean hasQueryMetaAttributes() { */ @Nullable Meta getMetaAnnotation() { - return AnnotatedElementUtils.findMergedAnnotation(method, Meta.class); + return doFindAnnotation(Meta.class).orElse(null); } /** @@ -241,7 +235,7 @@ Meta getMetaAnnotation() { */ @Nullable Tailable getTailableAnnotation() { - return AnnotatedElementUtils.findMergedAnnotation(method, Tailable.class); + return doFindAnnotation(Tailable.class).orElse(null); } /** @@ -262,18 +256,14 @@ public org.springframework.data.mongodb.core.query.Meta getQueryMetaAttributes() metaAttributes.setMaxTimeMsec(meta.maxExecutionTimeMs()); } - if (meta.maxScanDocuments() > 0) { - metaAttributes.setMaxScan(meta.maxScanDocuments()); + if (meta.cursorBatchSize() != 0) { + metaAttributes.setCursorBatchSize(meta.cursorBatchSize()); } if (StringUtils.hasText(meta.comment())) { metaAttributes.setComment(meta.comment()); } - if (meta.snapshot()) { - metaAttributes.setSnapshot(meta.snapshot()); - } - if (!ObjectUtils.isEmpty(meta.flags())) { for (org.springframework.data.mongodb.core.query.Meta.CursorOption option : meta.flags()) { @@ -281,6 +271,254 @@ public org.springframework.data.mongodb.core.query.Meta getQueryMetaAttributes() } } + if (meta.allowDiskUse()) { + metaAttributes.setAllowDiskUse(meta.allowDiskUse()); + } + return metaAttributes; } + + /** + * Check if the query method is decorated with a non-empty {@link Query#sort()}. + * + * @return true if method annotated with {@link Query} having a non-empty sort attribute. + * @since 2.1 + */ + public boolean hasAnnotatedSort() { + return lookupQueryAnnotation().map(Query::sort).filter(StringUtils::hasText).isPresent(); + } + + /** + * Get the sort value, used as default, extracted from the {@link Query} annotation. + * + * @return the {@link Query#sort()} value. + * @throws IllegalStateException if method not annotated with {@link Query}. Make sure to check + * {@link #hasAnnotatedQuery()} first. + * @since 2.1 + */ + public String getAnnotatedSort() { + + return lookupQueryAnnotation().map(Query::sort).orElseThrow(() -> new IllegalStateException( + "Expected to find @Query annotation but did not; Make sure to check hasAnnotatedSort() before.")); + } + + /** + * Check if the query method is decorated with a non-empty {@link ReadPreference}. + * + * @return true if method annotated with {@link Query} or {@link Aggregation} having a non-empty collation attribute. + * @since 4.2 + */ + public boolean hasAnnotatedReadPreference() { + return doFindReadPreferenceAnnotation().map(ReadPreference::value).filter(StringUtils::hasText).isPresent(); + } + + /** + * Get the {@link com.mongodb.ReadPreference} extracted from the {@link ReadPreference} annotation. + * + * @return the name of the {@link ReadPreference}. + * @throws IllegalStateException if method not annotated with {@link Query}. Make sure to check + * {@link #hasAnnotatedReadPreference()} first. + * @since 4.2 + */ + public String getAnnotatedReadPreference() { + + return doFindReadPreferenceAnnotation().map(ReadPreference::value).orElseThrow(() -> new IllegalStateException( + "Expected to find @ReadPreference annotation but did not; Make sure to check hasAnnotatedReadPreference() before.")); + } + + /** + * Get {@link com.mongodb.ReadPreference#getName() name} from query. First check if the method is annotated. If not, + * check if the class is annotated. So if the method and the class are annotated with @ReadPreference, the method + * annotation takes precedence. + * + * @return the {@link ReadPreference} + * @since 4.2 + */ + private Optional doFindReadPreferenceAnnotation() { + return doFindAnnotation(ReadPreference.class).or(() -> doFindAnnotationInClass(ReadPreference.class)); + } + + /** + * Check if the query method is decorated with a non-empty {@link Query#collation()} or + * {@link Aggregation#collation()}. + * + * @return true if method annotated with {@link Query} or {@link Aggregation} having a non-empty collation attribute. + * @since 2.2 + */ + public boolean hasAnnotatedCollation() { + return doFindAnnotation(Collation.class).map(Collation::value).filter(StringUtils::hasText).isPresent(); + } + + /** + * Get the collation value extracted from the {@link Query} or {@link Aggregation} annotation. + * + * @return the {@link Query#collation()} or {@link Aggregation#collation()} value. + * @throws IllegalStateException if method not annotated with {@link Query} or {@link Aggregation}. Make sure to check + * {@link #hasAnnotatedQuery()} first. + * @since 2.2 + */ + public String getAnnotatedCollation() { + + return doFindAnnotation(Collation.class).map(Collation::value) // + .orElseThrow(() -> new IllegalStateException( + "Expected to find @Collation annotation but did not; Make sure to check hasAnnotatedCollation() before.")); + } + + /** + * Returns whether the method has an annotated query. + * + * @return true if {@link Aggregation} is present. + * @since 2.2 + */ + public boolean hasAnnotatedAggregation() { + return findAnnotatedAggregation().isPresent(); + } + + /** + * Returns the aggregation pipeline declared in a {@link Aggregation} annotation. + * + * @return the aggregation pipeline. + * @throws IllegalStateException if method not annotated with {@link Aggregation}. Make sure to check + * {@link #hasAnnotatedAggregation()} first. + * @since 2.2 + */ + public String[] getAnnotatedAggregation() { + return findAnnotatedAggregation().orElseThrow(() -> new IllegalStateException( + "Expected to find @Aggregation annotation but did not; Make sure to check hasAnnotatedAggregation() before.")); + } + + /** + * @return {@literal true} if the {@link Hint} annotation is present and the index name is not empty. + * @since 4.1 + */ + public boolean hasAnnotatedHint() { + return doFindAnnotation(Hint.class).map(Hint::indexName).filter(StringUtils::hasText).isPresent(); + } + + /** + * Returns the aggregation pipeline declared via a {@link Hint} annotation. + * + * @return the index name (might be empty). + * @throws IllegalStateException if the method is not annotated with {@link Hint} + * @since 4.1 + */ + public String getAnnotatedHint() { + return doFindAnnotation(Hint.class).map(Hint::indexName).orElseThrow(() -> new IllegalStateException( + "Expected to find @Hint annotation but did not; Make sure to check hasAnnotatedHint() before.")); + } + + private Optional findAnnotatedAggregation() { + + return lookupAggregationAnnotation() // + .map(Aggregation::pipeline) // + .filter(it -> !ObjectUtils.isEmpty(it)); + } + + Optional lookupAggregationAnnotation() { + return doFindAnnotation(Aggregation.class); + } + + Optional lookupUpdateAnnotation() { + return doFindAnnotation(Update.class); + } + + @SuppressWarnings("unchecked") + private Optional doFindAnnotation(Class annotationType) { + + return (Optional) this.annotationCache.computeIfAbsent(annotationType, + it -> Optional.ofNullable(AnnotatedElementUtils.findMergedAnnotation(method, it))); + } + + @SuppressWarnings("unchecked") + private Optional doFindAnnotationInClass(Class annotationType) { + + Optional mergedAnnotation = Optional + .ofNullable(AnnotatedElementUtils.findMergedAnnotation(method.getDeclaringClass(), annotationType)); + annotationCache.put(annotationType, mergedAnnotation); + + return (Optional) mergedAnnotation; + } + + @Override + public boolean isModifyingQuery() { + return isModifying.get(); + } + + private boolean resolveModifyingQueryIndicators() { + return hasAnnotatedUpdate() + || QueryUtils.indexOfAssignableParameter(UpdateDefinition.class, method.getParameterTypes()) != -1; + } + + /** + * @return {@literal true} if {@link Update} annotation is present. + * @since 3.4 + */ + public boolean hasAnnotatedUpdate() { + return lookupUpdateAnnotation().isPresent(); + } + + /** + * @return the {@link Update} or {@literal null} if not present. + * @since 3.4 + */ + public Update getUpdateSource() { + return lookupUpdateAnnotation().orElse(null); + } + + /** + * Verify the actual {@link QueryMethod} is valid in terms of supported return and parameter types. + * + * @since 3.4 + * @throws IllegalStateException + */ + public void verify() { + + if (isModifyingQuery()) { + + if (isCollectionQuery() || isScrollQuery() || isSliceQuery() || isPageQuery() || isGeoNearQuery() + || !isNumericOrVoidReturnValue()) { // + throw new IllegalStateException( + String.format( + "Update method may be void or return a numeric value (the number of updated documents)." + + " Offending Method: %s.%s", + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + + if (hasAnnotatedUpdate()) { // must define either an update or an update pipeline + if (!StringUtils.hasText(getUpdateSource().update()) && ObjectUtils.isEmpty(getUpdateSource().pipeline())) { + throw new IllegalStateException( + String.format( + "Update method must define either 'Update#update' or 'Update#pipeline' attribute;" + + " Offending Method: %s.%s", + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + } + } + + if (hasAnnotatedAggregation()) { + for (String stage : getAnnotatedAggregation()) { + if (BsonUtils.isJsonArray(stage)) { + throw new IllegalStateException(String.format( + """ + Invalid aggregation pipeline. Please split the definition from @Aggregation("[{...}, {...}]") to @Aggregation({ "{...}", "{...}" }). + Offending Method: %s.%s + """, + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + } + } + } + + private boolean isNumericOrVoidReturnValue() { + + Class resultType = getReturnedObjectType(); + if (ReactiveWrappers.usesReactiveType(resultType)) { + resultType = getReturnType().getComponentType().getType(); + } + + boolean isUpdateCountReturnType = ClassUtils.isAssignable(Number.class, resultType); + boolean isVoidReturnType = ReflectionUtils.isVoid(resultType); + + return isUpdateCountReturnType || isVoidReturnType; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java index 9afc07441a..afabf9c37e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,6 +16,10 @@ package org.springframework.data.mongodb.repository.query; import org.bson.Document; +import org.bson.json.JsonParseException; + +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; @@ -25,16 +29,16 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; import org.springframework.data.repository.query.RepositoryQuery; import org.springframework.data.repository.query.ResultProcessor; import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.ValueExpressionDelegate; import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.expression.ExpressionParser; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.util.JSON; -import com.mongodb.util.JSONParseException; - /** * {@link RepositoryQuery} implementation for Mongo. * @@ -55,10 +59,33 @@ public class PartTreeMongoQuery extends AbstractMongoQuery { * * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4, use the constructors accepting {@link QueryMethodValueEvaluationContextAccessor} instead. */ - public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations) { + @Deprecated(since = "4.4.0") + public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, ExpressionParser expressionParser, + QueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); - super(method, mongoOperations); + this.processor = method.getResultProcessor(); + this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); + this.isGeoNearQuery = method.isGeoNearQuery(); + this.context = mongoOperations.getConverter().getMappingContext(); + } + + /** + * Creates a new {@link PartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public PartTreeMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + + super(method, mongoOperations, delegate); this.processor = method.getResultProcessor(); this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); @@ -75,10 +102,6 @@ public PartTree getTree() { return tree; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, boolean) - */ @Override protected Query createQuery(ConvertingParameterAccessor accessor) { @@ -114,57 +137,37 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { try { - BasicQuery result = new BasicQuery(query.getQueryObject(), new Document((BasicDBObject) JSON.parse(fieldSpec))); + BasicQuery result = new BasicQuery(query.getQueryObject(), Document.parse(fieldSpec)); result.setSortObject(query.getSortObject()); return result; - } catch (JSONParseException o_O) { - throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()), + } catch (JsonParseException o_O) { + throw new IllegalStateException(String.format("Invalid query or field specification in %s", getQueryMethod()), o_O); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createCountQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) - */ @Override protected Query createCountQuery(ConvertingParameterAccessor accessor) { return new MongoQueryCreator(tree, accessor, context, false).createQuery(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() - */ @Override protected boolean isCountQuery() { return tree.isCountProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isExistsQuery() - */ @Override protected boolean isExistsQuery() { return tree.isExistsProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery() - */ @Override protected boolean isDeleteQuery() { return tree.isDelete(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isLimiting() - */ @Override protected boolean isLimiting() { return tree.isLimiting(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java new file mode 100644 index 0000000000..431510f11b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/QueryUtils.java @@ -0,0 +1,124 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.Arrays; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +/** + * Internal utility class to help avoid duplicate code required in both the reactive and the sync {@link Query} support + * offered by repositories. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + * @currentRead Assassin's Apprentice - Robin Hobb + */ +class QueryUtils { + + protected static final Log LOGGER = LogFactory.getLog(QueryUtils.class); + + /** + * Decorate {@link Query} and add a default sort expression to the given {@link Query}. Attributes of the given + * {@code sort} may be overwritten by the sort explicitly defined by the {@link Query} itself. + * + * @param query the {@link Query} to decorate. + * @param defaultSort the default sort expression to apply to the query. + * @return the query having the given {@code sort} applied. + */ + static Query decorateSort(Query query, Document defaultSort) { + + if (defaultSort.isEmpty()) { + return query; + } + + BasicQuery defaultSortQuery = query instanceof BasicQuery bq ? bq : new BasicQuery(query); + + Document combinedSort = new Document(defaultSort); + combinedSort.putAll(defaultSortQuery.getSortObject()); + defaultSortQuery.setSortObject(combinedSort); + + return defaultSortQuery; + } + + /** + * Apply a collation extracted from the given {@literal collationExpression} to the given {@link Query}. Potentially + * replace parameter placeholders with values from the {@link ConvertingParameterAccessor accessor}. + * + * @param query must not be {@literal null}. + * @param collationExpression must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param expressionEvaluator must not be {@literal null}. + * @return the {@link Query} having proper {@link Collation}. + * @see Query#collation(Collation) + * @since 2.2 + */ + static Query applyCollation(Query query, @Nullable String collationExpression, ConvertingParameterAccessor accessor, + ValueExpressionEvaluator expressionEvaluator) { + + Collation collation = CollationUtils.computeCollation(collationExpression, accessor, expressionEvaluator); + return collation == null ? query : query.collation(collation); + } + + /** + * Get the first index of the parameter that can be assigned to the given type. + * + * @param type the type to look for. + * @param parameters the actual parameters. + * @return -1 if not found. + * @since 3.4 + */ + static int indexOfAssignableParameter(Class type, Class[] parameters) { + return indexOfAssignableParameter(type, Arrays.asList(parameters)); + } + + /** + * Get the first index of the parameter that can be assigned to the given type. + * + * @param type the type to look for. + * @param parameters the actual parameters. + * @return -1 if not found. + * @since 3.4 + */ + static int indexOfAssignableParameter(Class type, List> parameters) { + + if (parameters.isEmpty()) { + return -1; + } + + int i = 0; + for (Class parameterType : parameters) { + if (ClassUtils.isAssignable(type, parameterType)) { + return i; + } + i++; + } + return -1; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java index 8df3926983..324f01d61f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,17 +17,20 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; -import reactor.core.publisher.MonoProcessor; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import org.reactivestreams.Publisher; import org.springframework.data.repository.util.ReactiveWrapperConverters; -import org.springframework.data.repository.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers; /** * Reactive {@link org.springframework.data.repository.query.ParametersParameterAccessor} implementation that subscribes - * to reactive parameter wrapper types upon creation. This class performs synchronization when acessing parameters. + * to reactive parameter wrapper types upon creation. This class performs synchronization when accessing parameters. * * @author Mark Paluch * @author Christoph Strobl @@ -36,44 +39,12 @@ class ReactiveMongoParameterAccessor extends MongoParametersParameterAccessor { private final Object[] values; - private final List> subscriptions; public ReactiveMongoParameterAccessor(MongoQueryMethod method, Object[] values) { super(method, values); - this.values = values; - this.subscriptions = new ArrayList<>(values.length); - - for (int i = 0; i < values.length; i++) { - - Object value = values[i]; - - if (value == null || !ReactiveWrappers.supports(value.getClass())) { - subscriptions.add(null); - continue; - } - - if (ReactiveWrappers.isSingleValueType(value.getClass())) { - subscriptions.add(ReactiveWrapperConverters.toWrapper(value, Mono.class).toProcessor()); - } else { - subscriptions.add(ReactiveWrapperConverters.toWrapper(value, Flux.class).collectList().toProcessor()); - } - } - } - /* (non-Javadoc) - * @see org.springframework.data.repository.query.ParametersParameterAccessor#getValue(int) - */ - @SuppressWarnings("unchecked") - @Override - protected T getValue(int index) { - - if (subscriptions.get(index) != null) { - return (T) subscriptions.get(index).block(); - } - - return super.getValue(index); } /* (non-Javadoc) @@ -82,17 +53,71 @@ protected T getValue(int index) { @Override public Object[] getValues() { - Object[] result = new Object[values.length]; + Object[] result = new Object[super.getValues().length]; for (int i = 0; i < result.length; i++) { result[i] = getValue(i); } return result; } - /* (non-Javadoc) - * @see org.springframework.data.repository.query.ParametersParameterAccessor#getBindableValue(int) - */ public Object getBindableValue(int index) { return getValue(getParameters().getBindableParameter(index).getIndex()); } + + /** + * Resolve parameters that were provided through reactive wrapper types. Flux is collected into a list, values from + * Mono's are used directly. + * + * @return + */ + @SuppressWarnings("unchecked") + public Mono resolveParameters() { + + boolean hasReactiveWrapper = false; + + for (Object value : values) { + if (value == null || !ReactiveWrappers.supports(value.getClass())) { + continue; + } + + hasReactiveWrapper = true; + break; + } + + if (!hasReactiveWrapper) { + return Mono.just(this); + } + + Object[] resolved = new Object[values.length]; + Map> holder = new ConcurrentHashMap<>(); + List> publishers = new ArrayList<>(); + + for (int i = 0; i < values.length; i++) { + + Object value = resolved[i] = values[i]; + if (value == null || !ReactiveWrappers.supports(value.getClass())) { + continue; + } + + if (ReactiveWrappers.isSingleValueType(value.getClass())) { + + int index = i; + publishers.add(ReactiveWrapperConverters.toWrapper(value, Mono.class) // + .map(Optional::of) // + .defaultIfEmpty(Optional.empty()) // + .doOnNext(it -> holder.put(index, (Optional) it))); + } else { + + int index = i; + publishers.add(ReactiveWrapperConverters.toWrapper(value, Flux.class) // + .collectList() // + .doOnNext(it -> holder.put(index, Optional.of(it)))); + } + } + + return Flux.merge(publishers).then().thenReturn(resolved).map(values -> { + holder.forEach((index, v) -> values[index] = v.orElse(null)); + return new ReactiveMongoParameterAccessor(method, values); + }); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java index 267dfc6ec3..d18c6a989c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecution.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,33 @@ */ package org.springframework.data.mongodb.repository.query; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import org.reactivestreams.Publisher; import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.EntityInstantiators; +import org.springframework.data.convert.DtoInstantiatingConverter; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Point; +import org.springframework.data.mapping.model.EntityInstantiators; import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ResultProcessor; import org.springframework.data.repository.query.ReturnedType; -import org.springframework.data.repository.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReflectionUtils; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import org.springframework.util.ClassUtils; -import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** * Set of classes to contain query execution strategies. Depending (mostly) on the return type of a @@ -49,50 +54,36 @@ */ interface ReactiveMongoQueryExecution { - Object execute(Query query, Class type, String collection); - - /** - * {@link ReactiveMongoQueryExecution} for collection returning queries using tailable cursors. - * - * @author Mark Paluch - */ - @RequiredArgsConstructor - final class TailExecution implements ReactiveMongoQueryExecution { - - private final @NonNull ReactiveMongoOperations operations; - private final Pageable pageable; - - @Override - public Object execute(Query query, Class type, String collection) { - return operations.tail(query.with(pageable), type, collection); - } - } + Publisher execute(Query query, Class type, String collection); /** * {@link MongoQueryExecution} to execute geo-near queries. * * @author Mark Paluch */ - @RequiredArgsConstructor - class GeoNearExecution implements ReactiveMongoQueryExecution { + final class GeoNearExecution implements ReactiveMongoQueryExecution { private final ReactiveMongoOperations operations; private final MongoParameterAccessor accessor; private final TypeInformation returnType; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + public GeoNearExecution(ReactiveMongoOperations operations, MongoParameterAccessor accessor, + TypeInformation returnType) { + + this.operations = operations; + this.accessor = accessor; + this.returnType = returnType; + } + @Override - public Object execute(Query query, Class type, String collection) { + public Publisher execute(Query query, Class type, String collection) { Flux> results = doExecuteQuery(query, type, collection); return isStreamOfGeoResult() ? results : results.map(GeoResult::getContent); } @SuppressWarnings({ "unchecked", "rawtypes" }) - protected Flux> doExecuteQuery(@Nullable Query query, Class type, String collection) { + private Flux> doExecuteQuery(@Nullable Query query, Class type, String collection) { Point nearLocation = accessor.getGeoNearLocation(); NearQuery nearQuery = NearQuery.near(nearLocation); @@ -118,7 +109,7 @@ private boolean isStreamOfGeoResult() { } TypeInformation componentType = returnType.getComponentType(); - return componentType != null && GeoResult.class.equals(componentType.getType()); + return (componentType != null) && GeoResult.class.equals(componentType.getType()); } } @@ -126,25 +117,62 @@ private boolean isStreamOfGeoResult() { * {@link ReactiveMongoQueryExecution} removing documents matching the query. * * @author Mark Paluch + * @author Artyom Gabeev */ - @RequiredArgsConstructor final class DeleteExecution implements ReactiveMongoQueryExecution { private final ReactiveMongoOperations operations; private final MongoQueryMethod method; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery.Execution#execute(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + public DeleteExecution(ReactiveMongoOperations operations, MongoQueryMethod method) { + this.operations = operations; + this.method = method; + } + @Override - public Object execute(Query query, Class type, String collection) { + public Publisher execute(Query query, Class type, String collection) { if (method.isCollectionQuery()) { return operations.findAllAndRemove(query, type, collection); } - return operations.remove(query, type, collection).map(DeleteResult::getDeletedCount); + if (method.isQueryForEntity() && !ClassUtils.isPrimitiveOrWrapper(method.getReturnedObjectType())) { + return operations.findAndRemove(query, type, collection); + } + + return operations.remove(query, type, collection) + .map(deleteResult -> deleteResult.wasAcknowledged() ? deleteResult.getDeletedCount() : 0L); + } + } + + /** + * {@link MongoQueryExecution} updating documents matching the query. + * + * @author Christph Strobl + * @since 3.4 + */ + final class UpdateExecution implements ReactiveMongoQueryExecution { + + private final ReactiveUpdate updateOps; + private final MongoParameterAccessor accessor; + private Mono update; + + UpdateExecution(ReactiveUpdate updateOps, ReactiveMongoQueryMethod method, MongoParameterAccessor accessor, + Mono update) { + + this.updateOps = updateOps; + this.accessor = accessor; + this.update = update; + } + + @Override + public Publisher execute(Query query, Class type, String collection) { + + return update.flatMap(it -> updateOps.inCollection(collection) // + .matching(query.with(accessor.getSort())) // actually we could do it unsorted + .apply(it) // + .all() // + .map(UpdateResult::getModifiedCount)); } } @@ -152,15 +180,23 @@ public Object execute(Query query, Class type, String collection) { * An {@link ReactiveMongoQueryExecution} that wraps the results of the given delegate with the given result * processing. */ - @RequiredArgsConstructor final class ResultProcessingExecution implements ReactiveMongoQueryExecution { - private final @NonNull ReactiveMongoQueryExecution delegate; - private final @NonNull Converter converter; + private final ReactiveMongoQueryExecution delegate; + private final Converter converter; + + public ResultProcessingExecution(ReactiveMongoQueryExecution delegate, Converter converter) { + + Assert.notNull(delegate, "Delegate must not be null"); + Assert.notNull(converter, "Converter must not be null"); + + this.delegate = delegate; + this.converter = converter; + } @Override - public Object execute(Query query, Class type, String collection) { - return converter.convert(delegate.execute(query, type, collection)); + public Publisher execute(Query query, Class type, String collection) { + return (Publisher) converter.convert(delegate.execute(query, type, collection)); } } @@ -169,26 +205,48 @@ public Object execute(Query query, Class type, String collection) { * * @author Mark Paluch */ - @RequiredArgsConstructor final class ResultProcessingConverter implements Converter { - private final @NonNull ResultProcessor processor; - private final @NonNull ReactiveMongoOperations operations; - private final @NonNull EntityInstantiators instantiators; + private final ResultProcessor processor; + private final ReactiveMongoOperations operations; + private final EntityInstantiators instantiators; + + public ResultProcessingConverter(ResultProcessor processor, ReactiveMongoOperations operations, + EntityInstantiators instantiators) { + + Assert.notNull(processor, "Processor must not be null"); + Assert.notNull(operations, "Operations must not be null"); + Assert.notNull(instantiators, "Instantiators must not be null"); + + this.processor = processor; + this.operations = operations; + this.instantiators = instantiators; + } - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ @Override public Object convert(Object source) { ReturnedType returnedType = processor.getReturnedType(); + if (ReflectionUtils.isVoid(returnedType.getReturnedType())) { + + if (source instanceof Mono mono) { + return mono.then(); + } + + if (source instanceof Publisher publisher) { + return Flux.from(publisher).then(); + } + } + if (ClassUtils.isPrimitiveOrWrapper(returnedType.getReturnedType())) { return source; } + if (!operations.getConverter().getMappingContext().hasPersistentEntityFor(returnedType.getReturnedType())) { + return source; + } + Converter converter = new DtoInstantiatingConverter(returnedType.getReturnedType(), operations.getConverter().getMappingContext(), instantiators); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java index 0ad0d91d5e..16354c2ff0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethod.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.springframework.data.repository.util.ClassUtils.*; - import java.lang.reflect.Method; import org.springframework.dao.InvalidDataAccessApiUsageException; @@ -32,8 +30,9 @@ import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.repository.core.RepositoryMetadata; import org.springframework.data.repository.util.ReactiveWrapperConverters; -import org.springframework.data.repository.util.ReactiveWrappers; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReflectionUtils; import org.springframework.data.util.TypeInformation; import org.springframework.util.ClassUtils; @@ -46,10 +45,11 @@ */ public class ReactiveMongoQueryMethod extends MongoQueryMethod { - private static final ClassTypeInformation PAGE_TYPE = ClassTypeInformation.from(Page.class); - private static final ClassTypeInformation SLICE_TYPE = ClassTypeInformation.from(Slice.class); + private static final TypeInformation PAGE_TYPE = TypeInformation.of(Page.class); + private static final TypeInformation SLICE_TYPE = TypeInformation.of(Slice.class); private final Method method; + private final Lazy isCollectionQuery; /** * Creates a new {@link ReactiveMongoQueryMethod} from the given {@link Method}. @@ -62,98 +62,46 @@ public class ReactiveMongoQueryMethod extends MongoQueryMethod { public ReactiveMongoQueryMethod(Method method, RepositoryMetadata metadata, ProjectionFactory projectionFactory, MappingContext, MongoPersistentProperty> mappingContext) { - super(method, metadata, projectionFactory, mappingContext); - - if (hasParameterOfType(method, Pageable.class)) { - - TypeInformation returnType = ClassTypeInformation.fromReturnTypeOf(method); - - boolean multiWrapper = ReactiveWrappers.isMultiValueType(returnType.getType()); - boolean singleWrapperWithWrappedPageableResult = ReactiveWrappers.isSingleValueType(returnType.getType()) - && (PAGE_TYPE.isAssignableFrom(returnType.getRequiredComponentType()) - || SLICE_TYPE.isAssignableFrom(returnType.getRequiredComponentType())); - - if (singleWrapperWithWrappedPageableResult) { - throw new InvalidDataAccessApiUsageException( - String.format("'%s.%s' must not use sliced or paged execution. Please use Flux.buffer(size, skip).", - ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); - } - - if (!multiWrapper && !singleWrapperWithWrappedPageableResult) { - throw new IllegalStateException(String.format( - "Method has to use a either multi-item reactive wrapper return type or a wrapped Page/Slice type. Offending method: %s", - method.toString())); - } - - if (hasParameterOfType(method, Sort.class)) { - throw new IllegalStateException(String.format("Method must not have Pageable *and* Sort parameter. " - + "Use sorting capabilities on Pageble instead! Offending method: %s", method.toString())); - } - } + super(method, metadata, projectionFactory, mappingContext, parametersSource -> { + return new MongoParameters(parametersSource, + MongoParameters.isGeoNearQuery(parametersSource.getMethod()) || isGeoNearQuery(parametersSource.getMethod())); + }); this.method = method; + this.isCollectionQuery = Lazy.of(() -> (!(isPageQuery() || isSliceQuery() || isScrollQuery()) + && ReactiveWrappers.isMultiValueType(metadata.getReturnType(method).getType()) || super.isCollectionQuery())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryMethod#createParameters(java.lang.reflect.Method) - */ - @Override - protected MongoParameters createParameters(Method method) { - return new MongoParameters(method, isGeoNearQuery(method)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#isCollectionQuery() - */ @Override public boolean isCollectionQuery() { - return !(isPageQuery() || isSliceQuery()) && ReactiveWrappers.isMultiValueType(method.getReturnType()); + return isCollectionQuery.get(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoQueryMethod#isGeoNearQuery() - */ @Override public boolean isGeoNearQuery() { return isGeoNearQuery(method); } - private boolean isGeoNearQuery(Method method) { + private static boolean isGeoNearQuery(Method method) { if (ReactiveWrappers.supports(method.getReturnType())) { - TypeInformation from = ClassTypeInformation.fromReturnTypeOf(method); + TypeInformation from = TypeInformation.fromReturnTypeOf(method); return GeoResult.class.equals(from.getRequiredComponentType().getType()); } return false; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#isModifyingQuery() - */ @Override public boolean isModifyingQuery() { return super.isModifyingQuery(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#isQueryForEntity() - */ @Override public boolean isQueryForEntity() { return super.isQueryForEntity(); } - /* - * All reactive query methods are streaming queries. - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryMethod#isStreamQuery() - */ @Override public boolean isStreamQuery() { return true; @@ -175,4 +123,40 @@ public boolean hasReactiveWrapperParameter() { return false; } + @Override + public void verify() { + + if (ReflectionUtils.hasParameterOfType(method, Pageable.class)) { + + TypeInformation returnType = TypeInformation.fromReturnTypeOf(method); + + boolean multiWrapper = ReactiveWrappers.isMultiValueType(returnType.getType()); + boolean singleWrapperWithWrappedPageableResult = ReactiveWrappers.isSingleValueType(returnType.getType()) + && (PAGE_TYPE.isAssignableFrom(returnType.getRequiredComponentType()) + || SLICE_TYPE.isAssignableFrom(returnType.getRequiredComponentType())); + + if (ReflectionUtils.hasParameterOfType(method, Sort.class)) { + throw new IllegalStateException(String.format("Method must not have Pageable *and* Sort parameter;" + + " Use sorting capabilities on Pageable instead; Offending method: %s", method)); + } + + if (isScrollQuery()) { + return; + } + + if (singleWrapperWithWrappedPageableResult) { + throw new InvalidDataAccessApiUsageException( + String.format("'%s.%s' must not use sliced or paged execution; Please use Flux.buffer(size, skip).", + ClassUtils.getShortName(method.getDeclaringClass()), method.getName())); + } + + if (!multiWrapper) { + throw new IllegalStateException(String.format( + "Method has to use a either multi-item reactive wrapper return type or a wrapped Page/Slice type; Offending method: %s", + method)); + } + } + + super.verify(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java index 4f929506e5..5787cca5a5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactivePartTreeMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,11 @@ */ package org.springframework.data.mongodb.repository.query; +import reactor.core.publisher.Mono; + import org.bson.Document; +import org.bson.json.JsonParseException; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.ReactiveMongoOperations; @@ -24,14 +28,16 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.repository.query.QueryMethod; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; import org.springframework.data.repository.query.RepositoryQuery; import org.springframework.data.repository.query.ResultProcessor; import org.springframework.data.repository.query.ReturnedType; +import org.springframework.data.repository.query.ValueExpressionDelegate; import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.expression.ExpressionParser; import org.springframework.util.StringUtils; -import com.mongodb.util.JSONParseException; - /** * Reactive PartTree {@link RepositoryQuery} implementation for Mongo. * @@ -51,10 +57,33 @@ public class ReactivePartTreeMongoQuery extends AbstractReactiveMongoQuery { * * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link QueryMethodValueEvaluationContextAccessor} instead. */ - public ReactivePartTreeMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations) { + @Deprecated(since = "4.4.0") + public ReactivePartTreeMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ExpressionParser expressionParser, ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); - super(method, mongoOperations); + this.processor = method.getResultProcessor(); + this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); + this.isGeoNearQuery = method.isGeoNearQuery(); + this.context = mongoOperations.getConverter().getMappingContext(); + } + + /** + * Creates a new {@link ReactivePartTreeMongoQuery} from the given {@link QueryMethod} and {@link MongoTemplate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactivePartTreeMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + + super(method, mongoOperations, delegate); this.processor = method.getResultProcessor(); this.tree = new PartTree(method.getName(), processor.getReturnedType().getDomainType()); @@ -71,16 +100,25 @@ public PartTree getTree() { return tree; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, boolean) - */ @Override - protected Query createQuery(ConvertingParameterAccessor accessor) { + protected Mono createQuery(ConvertingParameterAccessor accessor) { + return Mono.fromSupplier(() -> createQueryInternal(accessor, false)); + } + + @Override + protected Mono createCountQuery(ConvertingParameterAccessor accessor) { + return Mono.fromSupplier(() -> createQueryInternal(accessor, true)); + } + + private Query createQueryInternal(ConvertingParameterAccessor accessor, boolean isCountQuery) { - MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, isGeoNearQuery); + MongoQueryCreator creator = new MongoQueryCreator(tree, accessor, context, !isCountQuery && isGeoNearQuery); Query query = creator.createQuery(); + if (isCountQuery) { + return query; + } + if (tree.isLimiting()) { query.limit(tree.getMaxResults()); } @@ -109,44 +147,27 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { result.setSortObject(query.getSortObject()); return result; - - } catch (JSONParseException o_O) { - throw new IllegalStateException(String.format("Invalid query or field specification in %s!", getQueryMethod()), + } catch (JsonParseException o_O) { + throw new IllegalStateException(String.format("Invalid query or field specification in %s", getQueryMethod()), o_O); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#createCountQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) - */ @Override - protected Query createCountQuery(ConvertingParameterAccessor accessor) { - return new MongoQueryCreator(tree, accessor, context, false).createQuery(); + protected boolean isCountQuery() { + return tree.isCountProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isCountQuery() - */ @Override - protected boolean isCountQuery() { - return tree.isCountProjection(); + protected boolean isExistsQuery() { + return tree.isExistsProjection(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isDeleteQuery() - */ @Override protected boolean isDeleteQuery() { return tree.isDelete(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isLimiting() - */ @Override protected boolean isLimiting() { return tree.isLimiting(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregation.java new file mode 100644 index 0000000000..ff01d8f8a3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregation.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.expression.ExpressionParser; +import org.springframework.lang.Nullable; + +/** + * A reactive {@link org.springframework.data.repository.query.RepositoryQuery} to use a plain JSON String to create an + * {@link AggregationOperation aggregation} pipeline to actually execute. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +public class ReactiveStringBasedAggregation extends AbstractReactiveMongoQuery { + + private final ReactiveMongoOperations reactiveMongoOperations; + private final MongoConverter mongoConverter; + + /** + * @param method must not be {@literal null}. + * @param reactiveMongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "4.4.0") + public ReactiveStringBasedAggregation(ReactiveMongoQueryMethod method, + ReactiveMongoOperations reactiveMongoOperations, ExpressionParser expressionParser, + ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + + super(method, reactiveMongoOperations, expressionParser, evaluationContextProvider); + + this.reactiveMongoOperations = reactiveMongoOperations; + this.mongoConverter = reactiveMongoOperations.getConverter(); + } + + /** + * @param method must not be {@literal null}. + * @param reactiveMongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactiveStringBasedAggregation(ReactiveMongoQueryMethod method, + ReactiveMongoOperations reactiveMongoOperations, ValueExpressionDelegate delegate) { + + super(method, reactiveMongoOperations, delegate); + + this.reactiveMongoOperations = reactiveMongoOperations; + this.mongoConverter = reactiveMongoOperations.getConverter(); + } + + @Override + @SuppressWarnings("ReactiveStreamsNullableInLambdaInTransform") + protected Publisher doExecute(ReactiveMongoQueryMethod method, ResultProcessor processor, + ConvertingParameterAccessor accessor, @Nullable Class ignored) { + + return computePipeline(accessor).flatMapMany(it -> { + + return AggregationUtils.doAggregate(new AggregationPipeline(it), method, processor, accessor, + this::getValueExpressionEvaluator, + (aggregation, sourceType, typeToRead, elementType, simpleType, rawResult) -> { + + Flux flux = reactiveMongoOperations.aggregate(aggregation, typeToRead); + if (ReflectionUtils.isVoid(elementType)) { + return flux.then(); + } + + ReactiveMongoQueryExecution.ResultProcessingConverter resultProcessing = getResultProcessing(processor); + + if (simpleType && !rawResult && !elementType.equals(Document.class)) { + + flux = flux.handle((item, sink) -> { + + Object result = AggregationUtils.extractSimpleTypeResult((Document) item, elementType, mongoConverter); + + if (result != null) { + sink.next(result); + } + }); + } + + flux = flux.map(resultProcessing::convert); + + return method.isCollectionQuery() ? flux : flux.next(); + }); + }); + } + + private boolean isSimpleReturnType(Class targetType) { + return MongoSimpleTypes.HOLDER.isSimpleType(targetType); + } + + private Mono> computePipeline(ConvertingParameterAccessor accessor) { + return parseAggregationPipeline(getQueryMethod().getAnnotatedAggregation(), accessor); + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + throw new UnsupportedOperationException("No query support for aggregation"); + } + + @Override + protected boolean isCountQuery() { + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; + } + + @Override + protected boolean isDeleteQuery() { + return false; + } + + @Override + protected boolean isLimiting() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java index 32495f41db..0e980fcfaf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,26 @@ */ package org.springframework.data.mongodb.repository.query; -import java.util.ArrayList; -import java.util.List; +import reactor.core.publisher.Mono; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; + +import org.springframework.data.expression.ValueExpressionParser; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext; -import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBinding; -import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery.ParameterBindingParser; -import org.springframework.data.repository.query.EvaluationContextProvider; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.repository.query.ReactiveExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.ExpressionParser; import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.NonNull; import org.springframework.util.Assert; /** @@ -40,17 +46,17 @@ */ public class ReactiveStringBasedMongoQuery extends AbstractReactiveMongoQuery { - private static final String COUND_AND_DELETE = "Manually defined query for %s cannot be both a count and delete query at the same time!"; - private static final Logger LOG = LoggerFactory.getLogger(ReactiveStringBasedMongoQuery.class); - private static final ParameterBindingParser BINDING_PARSER = ParameterBindingParser.INSTANCE; + private static final String COUNT_EXISTS_AND_DELETE = "Manually defined query for %s cannot be a count and exists or delete query at the same time"; + private static final Log LOG = LogFactory.getLog(ReactiveStringBasedMongoQuery.class); private final String query; private final String fieldSpec; + + private final ValueExpressionParser expressionParser; + private final boolean isCountQuery; + private final boolean isExistsQuery; private final boolean isDeleteQuery; - private final List queryParameterBindings; - private final List fieldSpecParameterBindings; - private final ExpressionEvaluatingParameterBinder parameterBinder; /** * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link MongoQueryMethod} and @@ -60,94 +66,168 @@ public class ReactiveStringBasedMongoQuery extends AbstractReactiveMongoQuery { * @param mongoOperations must not be {@literal null}. * @param expressionParser must not be {@literal null}. * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. */ + @Deprecated(since = "4.4.0") public ReactiveStringBasedMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, - SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) { + ExpressionParser expressionParser, ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider); } /** * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, - * {@link MongoOperations}, {@link SpelExpressionParser} and {@link EvaluationContextProvider}. + * {@link MongoOperations}, {@link SpelExpressionParser} and + * {@link ReactiveExtensionAwareQueryMethodEvaluationContextProvider}. * * @param query must not be {@literal null}. * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. * @param expressionParser must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. */ + @Deprecated(since = "4.4.0") public ReactiveStringBasedMongoQuery(String query, ReactiveMongoQueryMethod method, - ReactiveMongoOperations mongoOperations, SpelExpressionParser expressionParser, - EvaluationContextProvider evaluationContextProvider) { + ReactiveMongoOperations mongoOperations, ExpressionParser expressionParser, + ReactiveQueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); - super(method, mongoOperations); + Assert.notNull(query, "Query must not be null"); - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(expressionParser, "SpelExpressionParser must not be null!"); + this.query = query; + this.expressionParser = ValueExpressionParser.create(() -> expressionParser); + this.fieldSpec = method.getFieldSpecification(); - this.queryParameterBindings = new ArrayList(); - this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query, - this.queryParameterBindings); + if (method.hasAnnotatedQuery()) { - this.fieldSpecParameterBindings = new ArrayList(); - this.fieldSpec = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings( - method.getFieldSpecification(), this.fieldSpecParameterBindings); + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); - this.isCountQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().count() : false; - this.isDeleteQuery = method.hasAnnotatedQuery() ? method.getQueryAnnotation().delete() : false; + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); - if (isCountQuery && isDeleteQuery) { - throw new IllegalArgumentException(String.format(COUND_AND_DELETE, method)); + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { + + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; } + } - this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider); + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link MongoQueryMethod}, + * {@link MongoOperations} and {@link ValueExpressionDelegate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public ReactiveStringBasedMongoQuery(ReactiveMongoQueryMethod method, ReactiveMongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + this(method.getAnnotatedQuery(), method, mongoOperations, delegate); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) + /** + * Creates a new {@link ReactiveStringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, + * {@link MongoOperations}, {@link ValueExpressionDelegate}. + * + * @param query must not be {@literal null}. + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 */ - @Override - protected Query createQuery(ConvertingParameterAccessor accessor) { + public ReactiveStringBasedMongoQuery(@NonNull String query, ReactiveMongoQueryMethod method, + ReactiveMongoOperations mongoOperations, ValueExpressionDelegate delegate) { + + super(method, mongoOperations, delegate); + + Assert.notNull(query, "Query must not be null"); + + this.query = query; + this.expressionParser = delegate.getValueExpressionParser(); + this.fieldSpec = method.getFieldSpecification(); + + if (method.hasAnnotatedQuery()) { - String queryString = parameterBinder.bind(this.query, accessor, - new BindingContext(getQueryMethod().getParameters(), queryParameterBindings)); - String fieldsString = parameterBinder.bind(this.fieldSpec, accessor, - new BindingContext(getQueryMethod().getParameters(), fieldSpecParameterBindings)); + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); - Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort()); + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); - if (LOG.isDebugEnabled()) { - LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject())); + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { + + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; } + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + + return getCodecRegistry().map(ParameterBindingDocumentCodec::new).flatMap(codec -> { - return query; + Mono queryObject = getBindingContext(query, accessor, codec) + .map(context -> codec.decode(query, context)); + Mono fieldsObject = getBindingContext(fieldSpec, accessor, codec) + .map(context -> codec.decode(fieldSpec, context)); + + return queryObject.zipWith(fieldsObject).map(tuple -> { + + Query query = new BasicQuery(tuple.getT1(), tuple.getT2()).with(accessor.getSort()); + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject())); + } + + return query; + }); + }); + } + + private Mono getBindingContext(String json, ConvertingParameterAccessor accessor, + ParameterBindingDocumentCodec codec) { + + ExpressionDependencies dependencies = codec.captureExpressionDependencies(json, accessor::getBindableValue, + expressionParser); + + return getValueExpressionEvaluatorLater(dependencies, accessor) + .map(it -> new ParameterBindingContext(accessor::getBindableValue, it)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isCountQuery() - */ @Override protected boolean isCountQuery() { return isCountQuery; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isDeleteQuery() - */ + @Override + protected boolean isExistsQuery() { + return isExistsQuery; + } + @Override protected boolean isDeleteQuery() { return this.isDeleteQuery; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isLimiting() - */ @Override protected boolean isLimiting() { return false; } + private static boolean hasAmbiguousProjectionFlags(boolean isCountQuery, boolean isExistsQuery, + boolean isDeleteQuery) { + return BooleanUtil.countBooleanTrueValues(isCountQuery, isExistsQuery, isDeleteQuery) > 1; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java index e34486516c..2c2af25e10 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/SimpleMongoEntityMetadata.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.repository.query; -import lombok.Getter; - import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.util.Assert; @@ -28,7 +26,7 @@ class SimpleMongoEntityMetadata implements MongoEntityMetadata { private final Class type; - private final @Getter MongoPersistentEntity collectionEntity; + private final MongoPersistentEntity collectionEntity; /** * Creates a new {@link SimpleMongoEntityMetadata} using the given type and {@link MongoPersistentEntity} to use for @@ -39,26 +37,22 @@ class SimpleMongoEntityMetadata implements MongoEntityMetadata { */ public SimpleMongoEntityMetadata(Class type, MongoPersistentEntity collectionEntity) { - Assert.notNull(type, "Type must not be null!"); - Assert.notNull(collectionEntity, "Collection entity must not be null or empty!"); + Assert.notNull(type, "Type must not be null"); + Assert.notNull(collectionEntity, "Collection entity must not be null or empty"); this.type = type; this.collectionEntity = collectionEntity; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.EntityMetadata#getJavaType() - */ public Class getJavaType() { return type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoEntityMetadata#getCollectionName() - */ public String getCollectionName() { return collectionEntity.getCollection(); } + + public MongoPersistentEntity getCollectionEntity() { + return this.collectionEntity; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringAggregationOperation.java new file mode 100644 index 0000000000..724c8f29ef --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringAggregationOperation.java @@ -0,0 +1,61 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.lang.Nullable; + +/** + * String-based aggregation operation for a repository query method. + * + * @author Christoph Strobl + * @since 4.3.1 + */ +class StringAggregationOperation implements AggregationOperation { + + private static final Pattern OPERATOR_PATTERN = Pattern.compile("\\$\\w+"); + + private final String source; + private final Class domainType; + private final Function bindFunction; + private final @Nullable String operator; + + StringAggregationOperation(String source, Class domainType, Function bindFunction) { + + this.source = source; + this.domainType = domainType; + this.bindFunction = bindFunction; + + Matcher matcher = OPERATOR_PATTERN.matcher(source); + this.operator = matcher.find() ? matcher.group() : null; + } + + @Override + public Document toDocument(AggregationOperationContext context) { + return context.getMappedObject(bindFunction.apply(source), domainType); + } + + @Override + public String getOperator() { + return operator != null ? operator : AggregationOperation.super.getOperator(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java new file mode 100644 index 0000000000..7ad5d78fa6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java @@ -0,0 +1,194 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; + +import org.bson.Document; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.SliceImpl; +import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ResultProcessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.util.ReflectionUtils; +import org.springframework.expression.ExpressionParser; +import org.springframework.lang.Nullable; + +/** + * {@link AbstractMongoQuery} implementation to run string-based aggregations using + * {@link org.springframework.data.mongodb.repository.Aggregation}. + * + * @author Christoph Strobl + * @author Divya Srivastava + * @author Mark Paluch + * @since 2.2 + */ +public class StringBasedAggregation extends AbstractMongoQuery { + + private final MongoOperations mongoOperations; + private final MongoConverter mongoConverter; + + /** + * Creates a new {@link StringBasedAggregation} from the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionParser must not be {@literal null}. + * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link QueryMethodValueEvaluationContextAccessor} instead. + */ + @Deprecated(since = "4.4.0") + public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOperations, + ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); + + if (method.isPageQuery()) { + throw new InvalidMongoDbApiUsageException(String.format( + "Repository aggregation method '%s' does not support '%s' return type; Please use 'Slice' or 'List' instead", + method.getName(), method.getReturnType().getType().getSimpleName())); + } + + this.mongoOperations = mongoOperations; + this.mongoConverter = mongoOperations.getConverter(); + } + + /** + * Creates a new {@link StringBasedAggregation} from the given {@link MongoQueryMethod} and {@link MongoOperations}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param delegate must not be {@literal null}. + * @since 4.4.0 + */ + public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOperations, + ValueExpressionDelegate delegate) { + super(method, mongoOperations, delegate); + + if (method.isPageQuery()) { + throw new InvalidMongoDbApiUsageException(String.format( + "Repository aggregation method '%s' does not support '%s' return type; Please use 'Slice' or 'List' instead", + method.getName(), method.getReturnType().getType().getSimpleName())); + } + + this.mongoOperations = mongoOperations; + this.mongoConverter = mongoOperations.getConverter(); + } + + @SuppressWarnings("unchecked") + @Override + @Nullable + protected Object doExecute(MongoQueryMethod method, ResultProcessor processor, ConvertingParameterAccessor accessor, + @Nullable Class ignore) { + + return AggregationUtils.doAggregate(AggregationUtils.computePipeline(this, method, accessor), method, processor, + accessor, this::getExpressionEvaluatorFor, + (aggregation, sourceType, typeToRead, elementType, simpleType, rawResult) -> { + + if (method.isStreamQuery()) { + + Stream stream = mongoOperations.aggregateStream(aggregation, typeToRead); + + if (!simpleType || elementType.equals(Document.class)) { + return stream; + } + + return stream + .map(it -> AggregationUtils.extractSimpleTypeResult((Document) it, elementType, mongoConverter)); + } + + AggregationResults result = (AggregationResults) mongoOperations.aggregate(aggregation, + typeToRead); + + if (ReflectionUtils.isVoid(elementType)) { + return null; + } + + if (rawResult) { + return result; + } + + List results = result.getMappedResults(); + if (method.isCollectionQuery()) { + return simpleType ? convertResults(elementType, (List) results) : results; + } + + if (method.isSliceQuery()) { + + Pageable pageable = accessor.getPageable(); + int pageSize = pageable.getPageSize(); + List resultsToUse = simpleType ? convertResults(elementType, (List) results) + : (List) results; + boolean hasNext = resultsToUse.size() > pageSize; + return new SliceImpl<>(hasNext ? resultsToUse.subList(0, pageSize) : resultsToUse, pageable, hasNext); + } + + Object uniqueResult = result.getUniqueMappedResult(); + + return simpleType + ? AggregationUtils.extractSimpleTypeResult((Document) uniqueResult, elementType, mongoConverter) + : uniqueResult; + }); + } + + private List convertResults(Class targetType, List mappedResults) { + + List list = new ArrayList<>(mappedResults.size()); + for (Document it : mappedResults) { + Object extractSimpleTypeResult = AggregationUtils.extractSimpleTypeResult(it, targetType, mongoConverter); + list.add(extractSimpleTypeResult); + } + return list; + } + + private boolean isSimpleReturnType(Class targetType) { + return MongoSimpleTypes.HOLDER.isSimpleType(targetType); + } + + @Override + protected Query createQuery(ConvertingParameterAccessor accessor) { + throw new UnsupportedOperationException("No query support for aggregation"); + } + + @Override + protected boolean isCountQuery() { + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; + } + + @Override + protected boolean isDeleteQuery() { + return false; + } + + @Override + protected boolean isLimiting() { + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java index 2a48d74ff7..abc158f88a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,19 @@ */ package org.springframework.data.mongodb.repository.query; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.Document; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.repository.query.ExpressionEvaluatingParameterBinder.BindingContext; -import org.springframework.data.repository.query.EvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.ExpressionParser; import org.springframework.expression.spel.standard.SpelExpressionParser; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import org.springframework.util.StringUtils; - -import com.mongodb.DBObject; -import com.mongodb.DBRef; -import com.mongodb.util.JSON; /** * Query to use a plain JSON String to create the {@link Query} to actually execute. @@ -48,58 +39,90 @@ */ public class StringBasedMongoQuery extends AbstractMongoQuery { - private static final String COUNT_EXISTS_AND_DELETE = "Manually defined query for %s cannot be a count and exists or delete query at the same time!"; - private static final Logger LOG = LoggerFactory.getLogger(StringBasedMongoQuery.class); - private static final ParameterBindingParser BINDING_PARSER = ParameterBindingParser.INSTANCE; + private static final String COUNT_EXISTS_AND_DELETE = "Manually defined query for %s cannot be a count and exists or delete query at the same time"; + private static final Log LOG = LogFactory.getLog(StringBasedMongoQuery.class); private final String query; private final String fieldSpec; + private final boolean isCountQuery; private final boolean isExistsQuery; private final boolean isDeleteQuery; - private final List queryParameterBindings; - private final List fieldSpecParameterBindings; - private final ExpressionEvaluatingParameterBinder parameterBinder; /** - * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod} and {@link MongoOperations}. + * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod}, {@link MongoOperations}, + * {@link SpelExpressionParser} and {@link QueryMethodEvaluationContextProvider}. * * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. * @param expressionParser must not be {@literal null}. * @param evaluationContextProvider must not be {@literal null}. + * @deprecated since 4.4.0, use the constructors accepting {@link ValueExpressionDelegate} instead. + */ + @Deprecated(since = "4.4.0") + public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, + ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) { + super(method, mongoOperations, expressionParser, evaluationContextProvider); + + String query = method.getAnnotatedQuery(); + Assert.notNull(query, "Query must not be null"); + + this.query = query; + this.fieldSpec = method.getFieldSpecification(); + + if (method.hasAnnotatedQuery()) { + + org.springframework.data.mongodb.repository.Query queryAnnotation = method.getQueryAnnotation(); + + this.isCountQuery = queryAnnotation.count(); + this.isExistsQuery = queryAnnotation.exists(); + this.isDeleteQuery = queryAnnotation.delete(); + + if (hasAmbiguousProjectionFlags(this.isCountQuery, this.isExistsQuery, this.isDeleteQuery)) { + throw new IllegalArgumentException(String.format(COUNT_EXISTS_AND_DELETE, method)); + } + + } else { + + this.isCountQuery = false; + this.isExistsQuery = false; + this.isDeleteQuery = false; + } + } + + /** + * Creates a new {@link StringBasedMongoQuery} for the given {@link MongoQueryMethod}, {@link MongoOperations}, + * {@link ValueExpressionDelegate}. + * + * @param method must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param expressionSupport must not be {@literal null}. + * @since 4.4.0 */ public StringBasedMongoQuery(MongoQueryMethod method, MongoOperations mongoOperations, - SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) { - this(method.getAnnotatedQuery(), method, mongoOperations, expressionParser, evaluationContextProvider); + ValueExpressionDelegate expressionSupport) { + this(method.getAnnotatedQuery(), method, mongoOperations, expressionSupport); } /** * Creates a new {@link StringBasedMongoQuery} for the given {@link String}, {@link MongoQueryMethod}, - * {@link MongoOperations}, {@link SpelExpressionParser} and {@link EvaluationContextProvider}. + * {@link MongoOperations}, {@link ValueExpressionDelegate}, {@link QueryMethodValueEvaluationContextAccessor}. * * @param query must not be {@literal null}. * @param method must not be {@literal null}. * @param mongoOperations must not be {@literal null}. - * @param expressionParser must not be {@literal null}. + * @param expressionSupport must not be {@literal null}. + * @since 4.3 */ public StringBasedMongoQuery(String query, MongoQueryMethod method, MongoOperations mongoOperations, - SpelExpressionParser expressionParser, EvaluationContextProvider evaluationContextProvider) { - - super(method, mongoOperations); - - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(expressionParser, "SpelExpressionParser must not be null!"); + ValueExpressionDelegate expressionSupport) { - this.queryParameterBindings = new ArrayList(); - this.query = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings(query, - this.queryParameterBindings); + super(method, mongoOperations, expressionSupport); - this.fieldSpecParameterBindings = new ArrayList(); - this.fieldSpec = BINDING_PARSER.parseAndCollectParameterBindingsFromQueryIntoBindings( - method.getFieldSpecification(), this.fieldSpecParameterBindings); + Assert.notNull(query, "Query must not be null"); - this.parameterBinder = new ExpressionEvaluatingParameterBinder(expressionParser, evaluationContextProvider); + this.query = query; + this.fieldSpec = method.getFieldSpecification(); if (method.hasAnnotatedQuery()) { @@ -121,19 +144,13 @@ public StringBasedMongoQuery(String query, MongoQueryMethod method, MongoOperati } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor) - */ @Override protected Query createQuery(ConvertingParameterAccessor accessor) { - String queryString = parameterBinder.bind(this.query, accessor, - new BindingContext(getQueryMethod().getParameters(), queryParameterBindings)); - String fieldsString = parameterBinder.bind(this.fieldSpec, accessor, - new BindingContext(getQueryMethod().getParameters(), fieldSpecParameterBindings)); + Document queryObject = decode(this.query, prepareBindingContext(this.query, accessor)); + Document fieldsObject = decode(this.fieldSpec, prepareBindingContext(this.fieldSpec, accessor)); - Query query = new BasicQuery(queryString, fieldsString).with(accessor.getSort()); + Query query = new BasicQuery(queryObject, fieldsObject).with(accessor.getSort()); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Created query %s for %s fields.", query.getQueryObject(), query.getFieldsObject())); @@ -142,284 +159,28 @@ protected Query createQuery(ConvertingParameterAccessor accessor) { return query; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery() - */ @Override protected boolean isCountQuery() { return isCountQuery; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isExistsQuery() - */ @Override protected boolean isExistsQuery() { return isExistsQuery; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery() - */ @Override protected boolean isDeleteQuery() { return this.isDeleteQuery; } - private static boolean hasAmbiguousProjectionFlags(boolean isCountQuery, boolean isExistsQuery, - boolean isDeleteQuery) { - return countBooleanValues(isCountQuery, isExistsQuery, isDeleteQuery) > 1; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isLimiting() - */ @Override protected boolean isLimiting() { return false; } - private static int countBooleanValues(boolean... values) { - - int count = 0; - - for (boolean value : values) { - - if (value) { - count++; - } - } - - return count; - } - - /** - * A parser that extracts the parameter bindings from a given query string. - * - * @author Thomas Darimont - */ - enum ParameterBindingParser { - - INSTANCE; - - private static final String EXPRESSION_PARAM_QUOTE = "'"; - private static final String EXPRESSION_PARAM_PREFIX = "?expr"; - private static final String INDEX_BASED_EXPRESSION_PARAM_START = "?#{"; - private static final String NAME_BASED_EXPRESSION_PARAM_START = ":#{"; - private static final char CURRLY_BRACE_OPEN = '{'; - private static final char CURRLY_BRACE_CLOSE = '}'; - private static final String PARAMETER_PREFIX = "_param_"; - private static final String PARSEABLE_PARAMETER = "\"" + PARAMETER_PREFIX + "$1\""; - private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)"); - private static final Pattern PARSEABLE_BINDING_PATTERN = Pattern.compile("\"?" + PARAMETER_PREFIX + "(\\d+)\"?"); - - private final static int PARAMETER_INDEX_GROUP = 1; - - /** - * Returns a list of {@link ParameterBinding}s found in the given {@code input} or an - * {@link Collections#emptyList()}. - * - * @param input can be empty. - * @param bindings must not be {@literal null}. - * @return - */ - public String parseAndCollectParameterBindingsFromQueryIntoBindings(String input, List bindings) { - - if (!StringUtils.hasText(input)) { - return input; - } - - Assert.notNull(bindings, "Parameter bindings must not be null!"); - - String transformedInput = transformQueryAndCollectExpressionParametersIntoBindings(input, bindings); - String parseableInput = makeParameterReferencesParseable(transformedInput); - - collectParameterReferencesIntoBindings(bindings, JSON.parse(parseableInput)); - - return transformedInput; - } - - private static String transformQueryAndCollectExpressionParametersIntoBindings(String input, - List bindings) { - - StringBuilder result = new StringBuilder(); - - int startIndex = 0; - int currentPos = 0; - int exprIndex = 0; - - while (currentPos < input.length()) { - - int indexOfExpressionParameter = getIndexOfExpressionParameter(input, currentPos); - - // no expression parameter found - if (indexOfExpressionParameter < 0) { - break; - } - - int exprStart = indexOfExpressionParameter + 3; - currentPos = exprStart; - - // eat parameter expression - int curlyBraceOpenCnt = 1; - - while (curlyBraceOpenCnt > 0) { - switch (input.charAt(currentPos++)) { - case CURRLY_BRACE_OPEN: - curlyBraceOpenCnt++; - break; - case CURRLY_BRACE_CLOSE: - curlyBraceOpenCnt--; - break; - default: - } - } - - result.append(input.subSequence(startIndex, indexOfExpressionParameter)); - result.append(EXPRESSION_PARAM_QUOTE).append(EXPRESSION_PARAM_PREFIX); - result.append(exprIndex); - result.append(EXPRESSION_PARAM_QUOTE); - - bindings.add(new ParameterBinding(exprIndex, true, input.substring(exprStart, currentPos - 1))); - - startIndex = currentPos; - - exprIndex++; - } - - return result.append(input.subSequence(currentPos, input.length())).toString(); - } - - private static String makeParameterReferencesParseable(String input) { - - Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(input); - return matcher.replaceAll(PARSEABLE_PARAMETER); - } - - private static void collectParameterReferencesIntoBindings(List bindings, Object value) { - - if (value instanceof String) { - - String string = ((String) value).trim(); - potentiallyAddBinding(string, bindings); - - } else if (value instanceof Pattern) { - - String string = value.toString().trim(); - Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(string); - - while (valueMatcher.find()) { - - int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP)); - - /* - * The pattern is used as a direct parameter replacement, e.g. 'field': ?1, - * therefore we treat it as not quoted to remain backwards compatible. - */ - boolean quoted = !string.equals(PARAMETER_PREFIX + paramIndex); - - bindings.add(new ParameterBinding(paramIndex, quoted)); - } - - } else if (value instanceof DBRef) { - - DBRef dbref = (DBRef) value; - - potentiallyAddBinding(dbref.getCollectionName(), bindings); - potentiallyAddBinding(dbref.getId().toString(), bindings); - - } else if (value instanceof Document) { - - Document document = (Document) value; - - for (String field : document.keySet()) { - collectParameterReferencesIntoBindings(bindings, field); - collectParameterReferencesIntoBindings(bindings, document.get(field)); - } - } else if (value instanceof DBObject) { - - DBObject dbo = (DBObject) value; - - for (String field : dbo.keySet()) { - collectParameterReferencesIntoBindings(bindings, field); - collectParameterReferencesIntoBindings(bindings, dbo.get(field)); - } - } - } - - private static void potentiallyAddBinding(String source, List bindings) { - - Matcher valueMatcher = PARSEABLE_BINDING_PATTERN.matcher(source); - - while (valueMatcher.find()) { - - int paramIndex = Integer.parseInt(valueMatcher.group(PARAMETER_INDEX_GROUP)); - boolean quoted = source.startsWith("'") || source.startsWith("\""); - - bindings.add(new ParameterBinding(paramIndex, quoted)); - } - } - - private static int getIndexOfExpressionParameter(String input, int position) { - - int indexOfExpressionParameter = input.indexOf(INDEX_BASED_EXPRESSION_PARAM_START, position); - - return indexOfExpressionParameter < 0 ? input.indexOf(NAME_BASED_EXPRESSION_PARAM_START, position) - : indexOfExpressionParameter; - } - } - - /** - * A generic parameter binding with name or position information. - * - * @author Thomas Darimont - */ - static class ParameterBinding { - - private final int parameterIndex; - private final boolean quoted; - private final @Nullable String expression; - - /** - * Creates a new {@link ParameterBinding} with the given {@code parameterIndex} and {@code quoted} information. - * - * @param parameterIndex - * @param quoted whether or not the parameter is already quoted. - */ - public ParameterBinding(int parameterIndex, boolean quoted) { - this(parameterIndex, quoted, null); - } - - public ParameterBinding(int parameterIndex, boolean quoted, @Nullable String expression) { - - this.parameterIndex = parameterIndex; - this.quoted = quoted; - this.expression = expression; - } - - public boolean isQuoted() { - return quoted; - } - - public int getParameterIndex() { - return parameterIndex; - } - - public String getParameter() { - return "?" + (isExpression() ? "expr" : "") + parameterIndex; - } - - @Nullable - public String getExpression() { - return expression; - } - - public boolean isExpression() { - return this.expression != null; - } + private static boolean hasAmbiguousProjectionFlags(boolean isCountQuery, boolean isExistsQuery, + boolean isDeleteQuery) { + return BooleanUtil.countBooleanTrueValues(isCountQuery, isExistsQuery, isDeleteQuery) > 1; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ValueExpressionDelegateValueExpressionEvaluator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ValueExpressionDelegateValueExpressionEvaluator.java new file mode 100644 index 0000000000..c479f3faa9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/ValueExpressionDelegateValueExpressionEvaluator.java @@ -0,0 +1,41 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import java.util.function.Function; + +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.repository.query.ValueExpressionDelegate; + +class ValueExpressionDelegateValueExpressionEvaluator implements ValueExpressionEvaluator { + + private final ValueExpressionDelegate delegate; + private final Function expressionToContext; + + ValueExpressionDelegateValueExpressionEvaluator(ValueExpressionDelegate delegate, Function expressionToContext) { + this.delegate = delegate; + this.expressionToContext = expressionToContext; + } + + @SuppressWarnings("unchecked") + @Override + public T evaluate(String expressionString) { + ValueExpression expression = delegate.parse(expressionString); + return (T) expression.evaluate(expressionToContext.apply(expression)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadata.java new file mode 100644 index 0000000000..6f1049e01f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadata.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.Optional; + +import com.mongodb.ReadPreference; + +/** + * Interface to abstract {@link CrudMethodMetadata} that provide the {@link ReadPreference} to be used for query + * execution. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.2 + */ +public interface CrudMethodMetadata { + + /** + * Returns the {@link ReadPreference} to be used. + * + * @return the {@link ReadPreference} to be used. + */ + Optional getReadPreference(); + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadataPostProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadataPostProcessor.java new file mode 100644 index 0000000000..f59a995170 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/CrudMethodMetadataPostProcessor.java @@ -0,0 +1,232 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.lang.reflect.AnnotatedElement; +import java.lang.reflect.Method; +import java.util.HashSet; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.springframework.aop.TargetSource; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.beans.factory.BeanClassLoaderAware; +import org.springframework.core.NamedThreadLocal; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.data.repository.core.RepositoryInformation; +import org.springframework.data.repository.core.support.RepositoryProxyPostProcessor; +import org.springframework.lang.Nullable; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.ReadPreference; + +/** + * {@link RepositoryProxyPostProcessor} that sets up interceptors to read metadata information from the invoked method. + * This is necessary to allow redeclaration of CRUD methods in repository interfaces and configure read preference + * information or query hints on them. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.2 + */ +class CrudMethodMetadataPostProcessor implements RepositoryProxyPostProcessor, BeanClassLoaderAware { + + private @Nullable ClassLoader classLoader = ClassUtils.getDefaultClassLoader(); + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + this.classLoader = classLoader; + } + + @Override + public void postProcess(ProxyFactory factory, RepositoryInformation repositoryInformation) { + factory.addAdvice(new CrudMethodMetadataPopulatingMethodInterceptor(repositoryInformation)); + } + + /** + * Returns a {@link CrudMethodMetadata} proxy that will lookup the actual target object by obtaining a thread bound + * instance from the {@link TransactionSynchronizationManager} later. + */ + CrudMethodMetadata getCrudMethodMetadata() { + + ProxyFactory factory = new ProxyFactory(); + + factory.addInterface(CrudMethodMetadata.class); + factory.setTargetSource(new ThreadBoundTargetSource()); + + return (CrudMethodMetadata) factory.getProxy(this.classLoader); + } + + /** + * {@link MethodInterceptor} to build and cache {@link DefaultCrudMethodMetadata} instances for the invoked methods. + * Will bind the found information to a {@link TransactionSynchronizationManager} for later lookup. + * + * @see DefaultCrudMethodMetadata + */ + static class CrudMethodMetadataPopulatingMethodInterceptor implements MethodInterceptor { + + private static final ThreadLocal currentInvocation = new NamedThreadLocal<>( + "Current AOP method invocation"); + + private final ConcurrentMap metadataCache = new ConcurrentHashMap<>(); + private final Set implementations = new HashSet<>(); + private final RepositoryInformation repositoryInformation; + + CrudMethodMetadataPopulatingMethodInterceptor(RepositoryInformation repositoryInformation) { + + this.repositoryInformation = repositoryInformation; + + ReflectionUtils.doWithMethods(repositoryInformation.getRepositoryInterface(), implementations::add, + method -> !repositoryInformation.isQueryMethod(method)); + } + + /** + * Return the AOP Alliance {@link MethodInvocation} object associated with the current invocation. + * + * @return the invocation object associated with the current invocation. + * @throws IllegalStateException if there is no AOP invocation in progress, or if the + * {@link CrudMethodMetadataPopulatingMethodInterceptor} was not added to this interceptor chain. + */ + static MethodInvocation currentInvocation() throws IllegalStateException { + + MethodInvocation invocation = currentInvocation.get(); + + if (invocation != null) { + return invocation; + } + + throw new IllegalStateException( + "No MethodInvocation found: Check that an AOP invocation is in progress, and that the " + + "CrudMethodMetadataPopulatingMethodInterceptor is upfront in the interceptor chain."); + } + + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + + Method method = invocation.getMethod(); + + if (!implementations.contains(method)) { + return invocation.proceed(); + } + + MethodInvocation oldInvocation = currentInvocation.get(); + currentInvocation.set(invocation); + + try { + + CrudMethodMetadata metadata = (CrudMethodMetadata) TransactionSynchronizationManager.getResource(method); + + if (metadata != null) { + return invocation.proceed(); + } + + CrudMethodMetadata methodMetadata = metadataCache.get(method); + + if (methodMetadata == null) { + + methodMetadata = new DefaultCrudMethodMetadata(repositoryInformation.getRepositoryInterface(), method); + CrudMethodMetadata tmp = metadataCache.putIfAbsent(method, methodMetadata); + + if (tmp != null) { + methodMetadata = tmp; + } + } + + TransactionSynchronizationManager.bindResource(method, methodMetadata); + + try { + return invocation.proceed(); + } finally { + TransactionSynchronizationManager.unbindResource(method); + } + } finally { + currentInvocation.set(oldInvocation); + } + } + } + + /** + * Default implementation of {@link CrudMethodMetadata} that will inspect the backing method for annotations. + */ + static class DefaultCrudMethodMetadata implements CrudMethodMetadata { + + private final Optional readPreference; + + /** + * Creates a new {@link DefaultCrudMethodMetadata} for the given {@link Method}. + * + * @param repositoryInterface the target repository interface. + * @param method must not be {@literal null}. + */ + DefaultCrudMethodMetadata(Class repositoryInterface, Method method) { + + Assert.notNull(repositoryInterface, "Repository interface must not be null"); + Assert.notNull(method, "Method must not be null"); + + this.readPreference = findReadPreference(method, repositoryInterface); + } + + private static Optional findReadPreference(AnnotatedElement... annotatedElements) { + + for (AnnotatedElement element : annotatedElements) { + + org.springframework.data.mongodb.repository.ReadPreference preference = AnnotatedElementUtils + .findMergedAnnotation(element, org.springframework.data.mongodb.repository.ReadPreference.class); + + if (preference != null) { + return Optional.of(com.mongodb.ReadPreference.valueOf(preference.value())); + } + } + + return Optional.empty(); + } + + @Override + public Optional getReadPreference() { + return readPreference; + } + } + + private static class ThreadBoundTargetSource implements TargetSource { + + @Override + public Class getTargetClass() { + return CrudMethodMetadata.class; + } + + @Override + public boolean isStatic() { + return false; + } + + @Override + public Object getTarget() { + + MethodInvocation invocation = CrudMethodMetadataPopulatingMethodInterceptor.currentInvocation(); + return TransactionSynchronizationManager.getResource(invocation.getMethod()); + } + + @Override + public void releaseTarget(Object target) {} + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/FetchableFluentQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/FetchableFluentQuerySupport.java new file mode 100644 index 0000000000..8590768b8b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/FetchableFluentQuerySupport.java @@ -0,0 +1,116 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +/** + * Support class for {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} implementations. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class FetchableFluentQuerySupport implements FluentQuery.FetchableFluentQuery { + + private final P predicate; + private final Sort sort; + + private final int limit; + + private final Class resultType; + private final List fieldsToInclude; + + FetchableFluentQuerySupport(P predicate, Sort sort, int limit, Class resultType, List fieldsToInclude) { + this.predicate = predicate; + this.sort = sort; + this.limit = limit; + this.resultType = resultType; + this.fieldsToInclude = fieldsToInclude; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery#sortBy(org.springframework.data.domain.Sort) + */ + @Override + public FluentQuery.FetchableFluentQuery sortBy(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public FluentQuery.FetchableFluentQuery limit(int limit) { + + Assert.isTrue(limit > 0, "Limit must be greater zero"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery#as(java.lang.Class) + */ + @Override + public FluentQuery.FetchableFluentQuery as(Class projection) { + + Assert.notNull(projection, "Projection target type must not be null"); + + return create(predicate, sort, limit, projection, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery#project(java.util.Collection) + */ + @Override + public FluentQuery.FetchableFluentQuery project(Collection properties) { + + Assert.notNull(properties, "Projection properties must not be null"); + + return create(predicate, sort, limit, resultType, new ArrayList<>(properties)); + } + + protected abstract FetchableFluentQuerySupport create(P predicate, Sort sort, int limit, + Class resultType, List fieldsToInclude); + + P getPredicate() { + return predicate; + } + + Sort getSort() { + return sort; + } + + int getLimit() { + return limit; + } + + Class getResultType() { + return resultType; + } + + List getFieldsToInclude() { + return fieldsToInclude; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java index e082a488db..23f161890d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,21 @@ */ package org.springframework.data.mongodb.repository.support; -import java.util.Arrays; -import java.util.HashSet; +import java.lang.reflect.Field; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.repository.query.MongoEntityMetadata; import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; import org.springframework.data.repository.core.support.QueryCreationListener; @@ -34,6 +37,9 @@ import org.springframework.data.repository.query.parser.Part.Type; import org.springframework.data.repository.query.parser.PartTree; import org.springframework.util.Assert; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.MongoException; /** * {@link QueryCreationListener} inspecting {@link PartTreeMongoQuery}s and creating an index for the properties it @@ -45,8 +51,8 @@ */ class IndexEnsuringQueryCreationListener implements QueryCreationListener { - private static final Set GEOSPATIAL_TYPES = new HashSet(Arrays.asList(Type.NEAR, Type.WITHIN)); - private static final Logger LOG = LoggerFactory.getLogger(IndexEnsuringQueryCreationListener.class); + private static final Set GEOSPATIAL_TYPES = Set.of(Type.NEAR, Type.WITHIN); + private static final Log LOG = LogFactory.getLog(IndexEnsuringQueryCreationListener.class); private final IndexOperationsProvider indexOperationsProvider; @@ -57,14 +63,10 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener metadata = query.getQueryMethod().getEntityInformation(); - indexOperationsProvider.indexOps(metadata.getCollectionName()).ensureIndex(index); - LOG.debug(String.format("Created %s!", index)); + try { + indexOperationsProvider.indexOps(metadata.getCollectionName(), metadata.getJavaType()).ensureIndex(index); + } catch (DataIntegrityViolationException e) { + + if (e.getCause() instanceof MongoException mongoException) { + + /* + * As of MongoDB 4.2 index creation raises an error when creating an index for the very same keys with + * different name, whereas previous versions silently ignored this. + * Because an index is by default named after the repository finder method it is not uncommon that an index + * for the very same property combination might already exist with a different name. + * So you see, that's why we need to ignore the error here. + * + * For details please see: https://docs.mongodb.com/master/release-notes/4.2-compatibility/#indexes + */ + if (mongoException.getCode() != 85) { + throw e; + } + } + } + + if (LOG.isDebugEnabled()) { + LOG.debug(String.format("Created %s", index)); + } + } + + public boolean isIndexOnUnwrappedType(Part part) { + + // TODO we could do it for nested fields in the + Field field = ReflectionUtils.findField(part.getProperty().getOwningType().getType(), + part.getProperty().getSegment()); + + if (field == null) { + return false; + } + + return AnnotatedElementUtils.hasAnnotation(field, Unwrapped.class); } private static Direction toDirection(Sort sort, String property) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java index dbd5e17edc..1d876289be 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MappingMongoEntityInformation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,9 @@ package org.springframework.data.mongodb.repository.support; import org.bson.types.ObjectId; +import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; import org.springframework.data.repository.core.support.PersistentEntityInformation; import org.springframework.lang.Nullable; @@ -87,26 +89,15 @@ private MappingMongoEntityInformation(MongoPersistentEntity entity, @Nullable this.fallbackIdType = idType != null ? idType : (Class) ObjectId.class; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoEntityInformation#getCollectionName() - */ public String getCollectionName() { return customCollectionName == null ? entityMetadata.getCollection() : customCollectionName; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoEntityInformation#getIdAttribute() - */ public String getIdAttribute() { - return entityMetadata.getRequiredIdProperty().getName(); + return entityMetadata.hasIdProperty() ? entityMetadata.getRequiredIdProperty().getName() : "_id"; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.PersistentEntityInformation#getIdType() - */ @Override - @SuppressWarnings("unchecked") public Class getIdType() { if (this.entityMetadata.hasIdProperty()) { @@ -115,4 +106,27 @@ public Class getIdType() { return fallbackIdType; } + + @Override + public boolean isVersioned() { + return this.entityMetadata.hasVersionProperty(); + } + + @Override + public Object getVersion(T entity) { + + if (!isVersioned()) { + return null; + } + + PersistentPropertyAccessor accessor = this.entityMetadata.getPropertyAccessor(entity); + + return accessor.getProperty(this.entityMetadata.getRequiredVersionProperty()); + } + + @Nullable + public Collation getCollation() { + return this.entityMetadata.getCollation(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java index af3dd05a60..3c029ee5aa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoAnnotationProcessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,6 +24,7 @@ import javax.tools.Diagnostic; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.lang.Nullable; import com.querydsl.apt.AbstractQuerydslProcessor; import com.querydsl.apt.Configuration; @@ -33,29 +34,25 @@ import com.querydsl.core.annotations.QueryEntities; import com.querydsl.core.annotations.QuerySupertype; import com.querydsl.core.annotations.QueryTransient; -import org.springframework.lang.Nullable; /** * Annotation processor to create Querydsl query types for QueryDsl annotated classes. * * @author Oliver Gierke + * @author Owen Q */ -@SupportedAnnotationTypes({ "com.mysema.query.annotations.*", "org.springframework.data.mongodb.core.mapping.*" }) +@SupportedAnnotationTypes({ "com.querydsl.core.annotations.*", "org.springframework.data.mongodb.core.mapping.*" }) @SupportedSourceVersion(SourceVersion.RELEASE_6) public class MongoAnnotationProcessor extends AbstractQuerydslProcessor { - /* - * (non-Javadoc) - * @see com.mysema.query.apt.AbstractQuerydslProcessor#createConfiguration(javax.annotation.processing.RoundEnvironment) - */ @Override protected Configuration createConfiguration(@Nullable RoundEnvironment roundEnv) { processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, "Running " + getClass().getSimpleName()); - DefaultConfiguration configuration = new DefaultConfiguration(roundEnv, processingEnv.getOptions(), - Collections. emptySet(), QueryEntities.class, Document.class, QuerySupertype.class, - QueryEmbeddable.class, QueryEmbedded.class, QueryTransient.class); + DefaultConfiguration configuration = new DefaultConfiguration(processingEnv, roundEnv, Collections.emptySet(), + QueryEntities.class, Document.class, QuerySupertype.class, QueryEmbeddable.class, QueryEmbedded.class, + QueryTransient.class); configuration.setUnknownAsEmbedded(true); return configuration; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java index 2c1fc57e67..d0a3f7a1e4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoEntityInformationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,10 @@ */ package org.springframework.data.mongodb.repository.support; -import org.springframework.data.domain.Persistable; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import org.springframework.util.ClassUtils; /** * Support class responsible for creating {@link MongoEntityInformation} instances for a given @@ -45,12 +43,8 @@ private MongoEntityInformationSupport() {} static MongoEntityInformation entityInformationFor(MongoPersistentEntity entity, @Nullable Class idType) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entity, "Entity must not be null"); - MappingMongoEntityInformation entityInformation = new MappingMongoEntityInformation( - (MongoPersistentEntity) entity, (Class) idType); - - return ClassUtils.isAssignable(Persistable.class, entity.getType()) - ? new PersistableMongoEntityInformation(entityInformation) : entityInformation; + return new MappingMongoEntityInformation<>((MongoPersistentEntity) entity, (Class) idType); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java index 0090227457..baf069c3a4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ import java.lang.reflect.Method; import java.util.Optional; +import org.springframework.beans.factory.BeanFactory; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoOperations; @@ -30,6 +31,7 @@ import org.springframework.data.mongodb.repository.query.MongoEntityInformation; import org.springframework.data.mongodb.repository.query.MongoQueryMethod; import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.StringBasedAggregation; import org.springframework.data.mongodb.repository.query.StringBasedMongoQuery; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.querydsl.QuerydslPredicateExecutor; @@ -38,12 +40,11 @@ import org.springframework.data.repository.core.RepositoryMetadata; import org.springframework.data.repository.core.support.RepositoryComposition.RepositoryFragments; import org.springframework.data.repository.core.support.RepositoryFactorySupport; -import org.springframework.data.repository.core.support.RepositoryFragment; -import org.springframework.data.repository.query.EvaluationContextProvider; import org.springframework.data.repository.query.QueryLookupStrategy; import org.springframework.data.repository.query.QueryLookupStrategy.Key; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; import org.springframework.data.repository.query.RepositoryQuery; -import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.data.repository.query.ValueExpressionDelegate; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -57,10 +58,10 @@ */ public class MongoRepositoryFactory extends RepositoryFactorySupport { - private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); - + private final CrudMethodMetadataPostProcessor crudMethodMetadataPostProcessor = new CrudMethodMetadataPostProcessor(); private final MongoOperations operations; private final MappingContext, MongoPersistentProperty> mappingContext; + @Nullable private QueryMethodValueEvaluationContextAccessor accessor; /** * Creates a new {@link MongoRepositoryFactory} with the given {@link MongoOperations}. @@ -69,29 +70,48 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport { */ public MongoRepositoryFactory(MongoOperations mongoOperations) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.operations = mongoOperations; this.mappingContext = mongoOperations.getConverter().getMappingContext(); + + addRepositoryProxyPostProcessor(crudMethodMetadataPostProcessor); + } + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + + super.setBeanClassLoader(classLoader); + crudMethodMetadataPostProcessor.setBeanClassLoader(classLoader); + } + + @Override + protected ProjectionFactory getProjectionFactory(ClassLoader classLoader, BeanFactory beanFactory) { + return this.operations.getConverter().getProjectionFactory(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getRepositoryBaseClass(org.springframework.data.repository.core.RepositoryMetadata) - */ @Override protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { return SimpleMongoRepository.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getRepositoryFragments(org.springframework.data.repository.core.RepositoryMetadata) - */ @Override protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata) { + return getRepositoryFragments(metadata, operations); + } - RepositoryFragments fragments = RepositoryFragments.empty(); + /** + * Creates {@link RepositoryFragments} based on {@link RepositoryMetadata} to add Mongo-specific extensions. Typically + * adds a {@link QuerydslMongoPredicateExecutor} if the repository interface uses Querydsl. + *

          + * Can be overridden by subclasses to customize {@link RepositoryFragments}. + * + * @param metadata repository metadata. + * @param operations the MongoDB operations manager. + * @return + * @since 3.2.1 + */ + protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata, MongoOperations operations) { boolean isQueryDslRepository = QUERY_DSL_PRESENT && QuerydslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); @@ -103,42 +123,33 @@ protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata "Cannot combine Querydsl and reactive repository support in a single interface"); } - MongoEntityInformation entityInformation = getEntityInformation(metadata.getDomainType(), - metadata); - - fragments = fragments.append(RepositoryFragment.implemented( - getTargetRepositoryViaReflection(QuerydslMongoPredicateExecutor.class, entityInformation, operations))); + return RepositoryFragments + .just(new QuerydslMongoPredicateExecutor<>(getEntityInformation(metadata.getDomainType()), operations)); } - return fragments; + return RepositoryFragments.empty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryInformation) - */ @Override protected Object getTargetRepository(RepositoryInformation information) { MongoEntityInformation entityInformation = getEntityInformation(information.getDomainType(), information); - return getTargetRepositoryViaReflection(information, entityInformation, operations); + Object targetRepository = getTargetRepositoryViaReflection(information, entityInformation, operations); + + if (targetRepository instanceof SimpleMongoRepository repository) { + repository.setRepositoryMethodMetadata(crudMethodMetadataPostProcessor.getCrudMethodMetadata()); + } + + return targetRepository; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key, org.springframework.data.repository.query.EvaluationContextProvider) - */ @Override protected Optional getQueryLookupStrategy(@Nullable Key key, - EvaluationContextProvider evaluationContextProvider) { - return Optional.of(new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext)); + ValueExpressionDelegate valueExpressionDelegate) { + return Optional.of(new MongoQueryLookupStrategy(operations, mappingContext, valueExpressionDelegate)); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getEntityInformation(java.lang.Class) - */ public MongoEntityInformation getEntityInformation(Class domainClass) { return getEntityInformation(domainClass, null); } @@ -157,39 +168,28 @@ private MongoEntityInformation getEntityInformation(Class doma * @author Oliver Gierke * @author Thomas Darimont */ - private static class MongoQueryLookupStrategy implements QueryLookupStrategy { - - private final MongoOperations operations; - private final EvaluationContextProvider evaluationContextProvider; - MappingContext, MongoPersistentProperty> mappingContext; - - public MongoQueryLookupStrategy(MongoOperations operations, EvaluationContextProvider evaluationContextProvider, - MappingContext, MongoPersistentProperty> mappingContext) { + private record MongoQueryLookupStrategy(MongoOperations operations, + MappingContext, MongoPersistentProperty> mappingContext, + ValueExpressionDelegate expressionSupport) implements QueryLookupStrategy { - this.operations = operations; - this.evaluationContextProvider = evaluationContextProvider; - this.mappingContext = mappingContext; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries) - */ @Override public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory, NamedQueries namedQueries) { MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, mappingContext); + queryMethod.verify(); + String namedQueryName = queryMethod.getNamedQueryName(); if (namedQueries.hasQuery(namedQueryName)) { String namedQuery = namedQueries.getQuery(namedQueryName); - return new StringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER, - evaluationContextProvider); + return new StringBasedMongoQuery(namedQuery, queryMethod, operations, expressionSupport); + } else if (queryMethod.hasAnnotatedAggregation()) { + return new StringBasedAggregation(queryMethod, operations, expressionSupport); } else if (queryMethod.hasAnnotatedQuery()) { - return new StringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider); + return new StringBasedMongoQuery(queryMethod, operations, expressionSupport); } else { - return new PartTreeMongoQuery(queryMethod, operations); + return new PartTreeMongoQuery(queryMethod, operations, expressionSupport); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java index 274a454273..c98d38c5f5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -65,32 +65,21 @@ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods this.createIndexesForQueryMethods = createIndexesForQueryMethods; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) - */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; } - /* - * (non-Javadoc) - * - * @see - * org.springframework.data.repository.support.RepositoryFactoryBeanSupport - * #createRepositoryFactory() - */ @Override - protected final RepositoryFactorySupport createRepositoryFactory() { + protected RepositoryFactorySupport createRepositoryFactory() { RepositoryFactorySupport factory = getFactoryInstance(operations); if (createIndexesForQueryMethods) { factory.addQueryCreationListener( - new IndexEnsuringQueryCreationListener(collectionName -> operations.indexOps(collectionName))); + new IndexEnsuringQueryCreationListener((collectionName, javaType) -> operations.indexOps(javaType))); } return factory; @@ -106,18 +95,11 @@ protected RepositoryFactorySupport getFactoryInstance(MongoOperations operations return new MongoRepositoryFactory(operations); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.data.repository.support.RepositoryFactoryBeanSupport - * #afterPropertiesSet() - */ @Override public void afterPropertiesSet() { super.afterPropertiesSet(); - Assert.state(operations != null, "MongoTemplate must not be null!"); + Assert.state(operations != null, "MongoTemplate must not be null"); if (!mappingContextConfigured) { setMappingContext(operations.getConverter().getMappingContext()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/PersistableMongoEntityInformation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/PersistableMongoEntityInformation.java deleted file mode 100644 index 8f46da3ea7..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/PersistableMongoEntityInformation.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -import org.springframework.data.domain.Persistable; -import org.springframework.data.mongodb.repository.query.MongoEntityInformation; - -/** - * {@link MongoEntityInformation} implementation wrapping an existing {@link MongoEntityInformation} considering - * {@link Persistable} types by delegating {@link #isNew(Object)} and {@link #getId(Object)} to the corresponding - * {@link Persistable#isNew()} and {@link Persistable#getId()} implementations. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @since 1.10 - */ -@RequiredArgsConstructor -class PersistableMongoEntityInformation implements MongoEntityInformation { - - private final @NonNull MongoEntityInformation delegate; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoEntityInformation#getCollectionName() - */ - @Override - public String getCollectionName() { - return delegate.getCollectionName(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoEntityInformation#getIdAttribute() - */ - @Override - public String getIdAttribute() { - return delegate.getIdAttribute(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.EntityInformation#isNew(java.lang.Object) - */ - @Override - @SuppressWarnings("unchecked") - public boolean isNew(T t) { - - if (t instanceof Persistable) { - return ((Persistable) t).isNew(); - } - - return delegate.isNew(t); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.EntityInformation#getId(java.lang.Object) - */ - @Override - @SuppressWarnings("unchecked") - public ID getId(T t) { - - if (t instanceof Persistable) { - return ((Persistable) t).getId(); - } - - return delegate.getId(t); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.PersistentEntityInformation#getIdType() - */ - @Override - public Class getIdType() { - return delegate.getIdType(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.EntityMetadata#getJavaType() - */ - @Override - public Class getJavaType() { - return delegate.getJavaType(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java index 19bf1782b2..ec845510ce 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,35 @@ */ package org.springframework.data.mongodb.repository.support; +import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; -import org.springframework.data.domain.Sort.Order; +import org.springframework.data.domain.Window; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; import org.springframework.data.querydsl.EntityPathResolver; -import org.springframework.data.querydsl.QSort; import org.springframework.data.querydsl.QuerydslPredicateExecutor; import org.springframework.data.querydsl.SimpleEntityPathResolver; -import org.springframework.data.repository.core.EntityInformation; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.util.Assert; import com.querydsl.core.NonUniqueResultException; import com.querydsl.core.types.EntityPath; -import com.querydsl.core.types.Expression; import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.PathBuilder; -import com.querydsl.mongodb.AbstractMongodbQuery; /** * MongoDB-specific {@link QuerydslPredicateExecutor} that allows execution {@link Predicate}s in various forms. @@ -51,10 +55,9 @@ * @author Mark Paluch * @since 2.0 */ -public class QuerydslMongoPredicateExecutor implements QuerydslPredicateExecutor { +public class QuerydslMongoPredicateExecutor extends QuerydslPredicateExecutorSupport + implements QuerydslPredicateExecutor { - private final PathBuilder builder; - private final EntityInformation entityInformation; private final MongoOperations mongoOperations; /** @@ -81,23 +84,15 @@ public QuerydslMongoPredicateExecutor(MongoEntityInformation entityInforma public QuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, MongoOperations mongoOperations, EntityPathResolver resolver) { - Assert.notNull(resolver, "EntityPathResolver must not be null!"); - - EntityPath path = resolver.createPath(entityInformation.getJavaType()); - - this.builder = new PathBuilder(path.getType(), path.getMetadata()); - this.entityInformation = entityInformation; + super(mongoOperations.getConverter(), pathBuilderFor(resolver.createPath(entityInformation.getJavaType())), + entityInformation); this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findById(com.querydsl.core.types.Predicate) - */ @Override public Optional findOne(Predicate predicate) { - Assert.notNull(predicate, "Predicate must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); try { return Optional.ofNullable(createQueryFor(predicate).fetchOne()); @@ -106,164 +101,219 @@ public Optional findOne(Predicate predicate) { } } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findAll(com.querydsl.core.types.Predicate) - */ @Override public List findAll(Predicate predicate) { - Assert.notNull(predicate, "Predicate must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); return createQueryFor(predicate).fetch(); } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findAll(com.querydsl.core.types.Predicate, com.querydsl.core.types.OrderSpecifier[]) - */ @Override public List findAll(Predicate predicate, OrderSpecifier... orders) { - Assert.notNull(predicate, "Predicate must not be null!"); - Assert.notNull(orders, "Order specifiers must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(orders, "Order specifiers must not be null"); return createQueryFor(predicate).orderBy(orders).fetch(); } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findAll(com.querydsl.core.types.Predicate, org.springframework.data.domain.Sort) - */ @Override public List findAll(Predicate predicate, Sort sort) { - Assert.notNull(predicate, "Predicate must not be null!"); - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(sort, "Sort must not be null"); return applySorting(createQueryFor(predicate), sort).fetch(); } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findAll(com.querydsl.core.types.OrderSpecifier[]) - */ @Override public Iterable findAll(OrderSpecifier... orders) { - Assert.notNull(orders, "Order specifiers must not be null!"); + Assert.notNull(orders, "Order specifiers must not be null"); return createQuery().orderBy(orders).fetch(); } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#findAll(com.querydsl.core.types.Predicate, org.springframework.data.domain.Pageable) - */ @Override public Page findAll(Predicate predicate, Pageable pageable) { - Assert.notNull(predicate, "Predicate must not be null!"); - Assert.notNull(pageable, "Pageable must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); - AbstractMongodbQuery> query = createQueryFor(predicate); + SpringDataMongodbQuery query = createQueryFor(predicate); return PageableExecutionUtils.getPage(applyPagination(query, pageable).fetch(), pageable, query::fetchCount); } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#count(com.querydsl.core.types.Predicate) - */ @Override public long count(Predicate predicate) { - Assert.notNull(predicate, "Predicate must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); return createQueryFor(predicate).fetchCount(); } - /* - * (non-Javadoc) - * @see org.springframework.data.querydsl.QuerydslPredicateExecutor#exists(com.querydsl.core.types.Predicate) - */ @Override public boolean exists(Predicate predicate) { - Assert.notNull(predicate, "Predicate must not be null!"); + Assert.notNull(predicate, "Predicate must not be null"); return createQueryFor(predicate).fetchCount() > 0; } + @Override + @SuppressWarnings("unchecked") + public R findBy(Predicate predicate, + Function, R> queryFunction) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + return queryFunction.apply(new FluentQuerydsl<>(predicate, (Class) typeInformation().getJavaType())); + } + /** - * Creates a {@link AbstractMongodbQuery} for the given {@link Predicate}. + * Creates a {@link SpringDataMongodbQuery} for the given {@link Predicate}. * * @param predicate * @return */ - private AbstractMongodbQuery> createQueryFor(Predicate predicate) { + private SpringDataMongodbQuery createQueryFor(Predicate predicate) { return createQuery().where(predicate); } /** - * Creates a {@link AbstractMongodbQuery}. + * Creates a {@link SpringDataMongodbQuery}. * * @return */ - private AbstractMongodbQuery> createQuery() { - return new SpringDataMongodbQuery<>(mongoOperations, entityInformation.getJavaType()); + private SpringDataMongodbQuery createQuery() { + return new SpringDataMongodbQuery<>(mongoOperations, typeInformation().getJavaType()); } /** - * Applies the given {@link Pageable} to the given {@link MongodbQuery}. + * Applies the given {@link Pageable} to the given {@link SpringDataMongodbQuery}. * * @param query * @param pageable * @return */ - private AbstractMongodbQuery> applyPagination( - AbstractMongodbQuery> query, Pageable pageable) { + private SpringDataMongodbQuery applyPagination(SpringDataMongodbQuery query, Pageable pageable) { + + if (pageable.isPaged()) { + query = query.offset(pageable.getOffset()).limit(pageable.getPageSize()); + } - query = query.offset(pageable.getOffset()).limit(pageable.getPageSize()); return applySorting(query, pageable.getSort()); } /** - * Applies the given {@link Sort} to the given {@link MongodbQuery}. + * Applies the given {@link Sort} to the given {@link SpringDataMongodbQuery}. * * @param query * @param sort * @return */ - private AbstractMongodbQuery> applySorting( - AbstractMongodbQuery> query, Sort sort) { - - // TODO: find better solution than instanceof check - if (sort instanceof QSort) { - - List> orderSpecifiers = ((QSort) sort).getOrderSpecifiers(); - query.orderBy(orderSpecifiers.toArray(new OrderSpecifier[orderSpecifiers.size()])); - - return query; - } - - sort.stream().map(this::toOrder).forEach(query::orderBy); + private SpringDataMongodbQuery applySorting(SpringDataMongodbQuery query, Sort sort) { + toOrderSpecifiers(sort).forEach(query::orderBy); return query; } /** - * Transforms a plain {@link Order} into a Querydsl specific {@link OrderSpecifier}. + * {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} using Querydsl + * {@link Predicate}. * - * @param order - * @return + * @author Mark Paluch + * @since 3.3 */ - @SuppressWarnings({ "rawtypes", "unchecked" }) - private OrderSpecifier toOrder(Order order) { + class FluentQuerydsl extends FetchableFluentQuerySupport { + + FluentQuerydsl(Predicate predicate, Class resultType) { + this(predicate, Sort.unsorted(), 0, resultType, Collections.emptyList()); + } - Expression property = builder.get(order.getProperty()); + FluentQuerydsl(Predicate predicate, Sort sort, int limit, Class resultType, List fieldsToInclude) { + super(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + protected FluentQuerydsl create(Predicate predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + return new FluentQuerydsl<>(predicate, sort, limit, resultType, fieldsToInclude); + } - return new OrderSpecifier( - order.isAscending() ? com.querydsl.core.types.Order.ASC : com.querydsl.core.types.Order.DESC, property); + @Override + public T oneValue() { + return createQuery().fetchOne(); + } + + @Override + public T firstValue() { + return createQuery().fetchFirst(); + } + + @Override + public List all() { + return createQuery().fetch(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Page page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchPage(pageable); + } + + @Override + public Slice slice(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchSlice(pageable); + } + + @Override + public Stream stream() { + return createQuery().stream(); + } + + @Override + public long count() { + return createQuery().fetchCount(); + } + + @Override + public boolean exists() { + return count() > 0; + } + + private SpringDataMongodbQuery createQuery() { + return new SpringDataMongodbQuery<>(mongoOperations, typeInformation().getJavaType(), getResultType(), + mongoOperations.getCollectionName(typeInformation().getJavaType()), this::customize).where(getPredicate()); + } + + private void customize(BasicQuery query) { + + List fieldsToInclude = getFieldsToInclude(); + if (!fieldsToInclude.isEmpty()) { + Document fields = new Document(); + fieldsToInclude.forEach(field -> fields.put(field, 1)); + query.setFieldsObject(fields); + } + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoRepository.java deleted file mode 100644 index 7e525e6eb4..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoRepository.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import java.io.Serializable; - -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.data.querydsl.EntityPathResolver; -import org.springframework.data.querydsl.QuerydslPredicateExecutor; - -import com.querydsl.core.types.Predicate; - -/** - * Special Querydsl based repository implementation that allows execution {@link Predicate}s in various forms. - * - * @author Oliver Gierke - * @author Thomas Darimont - * @author Mark Paluch - * @author Christoph Strobl - * @deprecated since 2.0. Querydsl execution is now linked via composable repositories and no longer requires to be a - * subclass of {@link SimpleMongoRepository}. Use {@link QuerydslMongoPredicateExecutor} for standalone - * Querydsl {@link Predicate} execution. - */ -@Deprecated -public class QuerydslMongoRepository extends QuerydslMongoPredicateExecutor - implements QuerydslPredicateExecutor { - - public QuerydslMongoRepository(MongoEntityInformation entityInformation, MongoOperations mongoOperations) { - super(entityInformation, mongoOperations); - } - - public QuerydslMongoRepository(MongoEntityInformation entityInformation, MongoOperations mongoOperations, - EntityPathResolver resolver) { - super(entityInformation, mongoOperations, resolver); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslPredicateExecutorSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslPredicateExecutorSupport.java new file mode 100644 index 0000000000..02c5a67d7e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslPredicateExecutorSupport.java @@ -0,0 +1,92 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.querydsl.QSort; +import org.springframework.data.repository.core.EntityInformation; + +import com.querydsl.core.types.EntityPath; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.dsl.PathBuilder; + +/** + * @author Christoph Strobl + * @since 2.2 + */ +abstract class QuerydslPredicateExecutorSupport { + + private final SpringDataMongodbSerializer serializer; + private final PathBuilder builder; + private final EntityInformation entityInformation; + + QuerydslPredicateExecutorSupport(MongoConverter converter, PathBuilder builder, + EntityInformation entityInformation) { + + this.serializer = new SpringDataMongodbSerializer(converter); + this.builder = builder; + this.entityInformation = entityInformation; + } + + protected static PathBuilder pathBuilderFor(EntityPath path) { + return new PathBuilder<>(path.getType(), path.getMetadata()); + } + + protected EntityInformation typeInformation() { + return entityInformation; + } + + protected SpringDataMongodbSerializer mongodbSerializer() { + return serializer; + } + + /** + * Transforms a plain {@link Order} into a Querydsl specific {@link OrderSpecifier}. + * + * @param order + * @return + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + protected OrderSpecifier toOrder(Order order) { + + Expression property = builder.get(order.getProperty()); + + return new OrderSpecifier( + order.isAscending() ? com.querydsl.core.types.Order.ASC : com.querydsl.core.types.Order.DESC, property); + } + + /** + * Converts the given {@link Sort} to {@link OrderSpecifier}. + * + * @param sort + * @return + */ + protected List> toOrderSpecifiers(Sort sort) { + + if (sort instanceof QSort qSort) { + return qSort.getOrderSpecifiers(); + } + + return sort.stream().map(this::toOrder).collect(Collectors.toList()); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java index da39c9e73d..3d46babd69 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ import org.springframework.util.Assert; import com.querydsl.core.types.EntityPath; -import com.querydsl.mongodb.AbstractMongodbQuery; /** * Base class to create repository implementations based on Querydsl. @@ -41,37 +40,37 @@ public abstract class QuerydslRepositorySupport { */ public QuerydslRepositorySupport(MongoOperations operations) { - Assert.notNull(operations, "MongoOperations must not be null!"); + Assert.notNull(operations, "MongoOperations must not be null"); this.template = operations; this.context = operations.getConverter().getMappingContext(); } /** - * Returns a {@link MongodbQuery} for the given {@link EntityPath}. The collection being queried is derived from the + * Returns a {@link SpringDataMongodbQuery} for the given {@link EntityPath}. The collection being queried is derived from the * entity metadata. * * @param path * @return */ - protected AbstractMongodbQuery> from(final EntityPath path) { + protected SpringDataMongodbQuery from(final EntityPath path) { - Assert.notNull(path, "EntityPath must not be null!"); + Assert.notNull(path, "EntityPath must not be null"); MongoPersistentEntity entity = context.getRequiredPersistentEntity(path.getType()); return from(path, entity.getCollection()); } /** - * Returns a {@link MongodbQuery} for the given {@link EntityPath} querying the given collection. + * Returns a {@link SpringDataMongodbQuery} for the given {@link EntityPath} querying the given collection. * * @param path must not be {@literal null} * @param collection must not be blank or {@literal null} * @return */ - protected AbstractMongodbQuery> from(final EntityPath path, String collection) { + protected SpringDataMongodbQuery from(final EntityPath path, String collection) { - Assert.notNull(path, "EntityPath must not be null!"); - Assert.hasText(collection, "Collection name must not be null or empty!"); + Assert.notNull(path, "EntityPath must not be null"); + Assert.hasText(collection, "Collection name must not be null or empty"); return new SpringDataMongodbQuery(template, path.getType(), collection); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveFluentQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveFluentQuerySupport.java new file mode 100644 index 0000000000..1b1c9c3275 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveFluentQuerySupport.java @@ -0,0 +1,114 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +/** + * Support class for {@link org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery} implementations. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class ReactiveFluentQuerySupport implements FluentQuery.ReactiveFluentQuery { + + private final P predicate; + private final Sort sort; + private final int limit; + private final Class resultType; + private final List fieldsToInclude; + + ReactiveFluentQuerySupport(P predicate, Sort sort, int limit, Class resultType, List fieldsToInclude) { + this.predicate = predicate; + this.sort = sort; + this.limit = limit; + this.resultType = resultType; + this.fieldsToInclude = fieldsToInclude; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery#sortBy(org.springframework.data.domain.Sort) + */ + @Override + public ReactiveFluentQuery sortBy(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public ReactiveFluentQuery limit(int limit) { + + Assert.isTrue(limit > 0, "Limit must be greater zero"); + + return create(predicate, sort, limit, resultType, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery#as(java.lang.Class) + */ + @Override + public ReactiveFluentQuery as(Class projection) { + + Assert.notNull(projection, "Projection target type must not be null"); + + return create(predicate, sort, limit, projection, fieldsToInclude); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery#project(java.util.Collection) + */ + @Override + public ReactiveFluentQuery project(Collection properties) { + + Assert.notNull(properties, "Projection properties must not be null"); + + return create(predicate, sort, limit, resultType, new ArrayList<>(properties)); + } + + protected abstract ReactiveFluentQuerySupport create(P predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude); + + P getPredicate() { + return predicate; + } + + Sort getSort() { + return sort; + } + + int getLimit() { + return limit; + } + + Class getResultType() { + return resultType; + } + + List getFieldsToInclude() { + return fieldsToInclude; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java index d5f234df32..3edfcdd2db 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,14 @@ */ package org.springframework.data.mongodb.repository.support; -import lombok.AccessLevel; -import lombok.RequiredArgsConstructor; +import static org.springframework.data.querydsl.QuerydslUtils.*; import java.io.Serializable; import java.lang.reflect.Method; import java.util.Optional; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; @@ -30,17 +31,22 @@ import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryMethod; import org.springframework.data.mongodb.repository.query.ReactivePartTreeMongoQuery; +import org.springframework.data.mongodb.repository.query.ReactiveStringBasedAggregation; import org.springframework.data.mongodb.repository.query.ReactiveStringBasedMongoQuery; import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor; import org.springframework.data.repository.core.NamedQueries; import org.springframework.data.repository.core.RepositoryInformation; import org.springframework.data.repository.core.RepositoryMetadata; import org.springframework.data.repository.core.support.ReactiveRepositoryFactorySupport; -import org.springframework.data.repository.query.EvaluationContextProvider; +import org.springframework.data.repository.core.support.RepositoryComposition.RepositoryFragments; +import org.springframework.data.repository.core.support.RepositoryFragment; import org.springframework.data.repository.query.QueryLookupStrategy; import org.springframework.data.repository.query.QueryLookupStrategy.Key; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; import org.springframework.data.repository.query.RepositoryQuery; -import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.data.repository.query.ValueExpressionDelegate; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -54,10 +60,10 @@ */ public class ReactiveMongoRepositoryFactory extends ReactiveRepositoryFactorySupport { - private static final SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); - + private final CrudMethodMetadataPostProcessor crudMethodMetadataPostProcessor = new CrudMethodMetadataPostProcessor(); private final ReactiveMongoOperations operations; private final MappingContext, MongoPersistentProperty> mappingContext; + @Nullable private QueryMethodValueEvaluationContextAccessor accessor; /** * Creates a new {@link ReactiveMongoRepositoryFactory} with the given {@link ReactiveMongoOperations}. @@ -66,92 +72,114 @@ public class ReactiveMongoRepositoryFactory extends ReactiveRepositoryFactorySup */ public ReactiveMongoRepositoryFactory(ReactiveMongoOperations mongoOperations) { - Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!"); + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null"); this.operations = mongoOperations; this.mappingContext = mongoOperations.getConverter().getMappingContext(); + + setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + addRepositoryProxyPostProcessor(crudMethodMetadataPostProcessor); + } + + @Override + public void setBeanClassLoader(ClassLoader classLoader) { + + super.setBeanClassLoader(classLoader); + crudMethodMetadataPostProcessor.setBeanClassLoader(classLoader); + } + + @Override + protected ProjectionFactory getProjectionFactory(ClassLoader classLoader, BeanFactory beanFactory) { + return this.operations.getConverter().getProjectionFactory(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getRepositoryBaseClass(org.springframework.data.repository.core.RepositoryMetadata) - */ @Override protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { return SimpleReactiveMongoRepository.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getTargetRepository(org.springframework.data.repository.core.RepositoryInformation) - */ + @Override + protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata) { + + RepositoryFragments fragments = RepositoryFragments.empty(); + + boolean isQueryDslRepository = QUERY_DSL_PRESENT + && ReactiveQuerydslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); + + if (isQueryDslRepository) { + + MongoEntityInformation entityInformation = getEntityInformation(metadata.getDomainType(), + metadata); + + fragments = fragments.append(RepositoryFragment + .implemented(instantiateClass(ReactiveQuerydslMongoPredicateExecutor.class, entityInformation, operations))); + } + + return fragments; + } + @Override protected Object getTargetRepository(RepositoryInformation information) { MongoEntityInformation entityInformation = getEntityInformation(information.getDomainType(), information); - return getTargetRepositoryViaReflection(information, entityInformation, operations); + Object targetRepository = getTargetRepositoryViaReflection(information, entityInformation, operations); + + if (targetRepository instanceof SimpleReactiveMongoRepository repository) { + repository.setRepositoryMethodMetadata(crudMethodMetadataPostProcessor.getCrudMethodMetadata()); + } + + return targetRepository; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getQueryLookupStrategy(org.springframework.data.repository.query.QueryLookupStrategy.Key, org.springframework.data.repository.query.EvaluationContextProvider) - */ - @Override - protected Optional getQueryLookupStrategy(@Nullable Key key, - EvaluationContextProvider evaluationContextProvider) { - return Optional.of(new MongoQueryLookupStrategy(operations, evaluationContextProvider, mappingContext)); + @Override protected Optional getQueryLookupStrategy(Key key, + ValueExpressionDelegate valueExpressionDelegate) { + return Optional.of(new MongoQueryLookupStrategy(operations, mappingContext, valueExpressionDelegate)); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactorySupport#getEntityInformation(java.lang.Class) - */ + @Override public MongoEntityInformation getEntityInformation(Class domainClass) { return getEntityInformation(domainClass, null); } @SuppressWarnings("unchecked") private MongoEntityInformation getEntityInformation(Class domainClass, - @Nullable RepositoryInformation information) { + @Nullable RepositoryMetadata metadata) { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(domainClass); - return new MappingMongoEntityInformation((MongoPersistentEntity) entity, - information != null ? (Class) information.getIdType() : null); + return new MappingMongoEntityInformation<>((MongoPersistentEntity) entity, + metadata != null ? (Class) metadata.getIdType() : null); } /** * {@link QueryLookupStrategy} to create {@link PartTreeMongoQuery} instances. * * @author Mark Paluch + * @author Christoph Strobl */ - @RequiredArgsConstructor(access = AccessLevel.PACKAGE) - private static class MongoQueryLookupStrategy implements QueryLookupStrategy { - - private final ReactiveMongoOperations operations; - private final EvaluationContextProvider evaluationContextProvider; - private final MappingContext, MongoPersistentProperty> mappingContext; + private record MongoQueryLookupStrategy(ReactiveMongoOperations operations, + MappingContext, MongoPersistentProperty> mappingContext, + ValueExpressionDelegate delegate) implements QueryLookupStrategy { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryLookupStrategy#resolveQuery(java.lang.reflect.Method, org.springframework.data.repository.core.RepositoryMetadata, org.springframework.data.projection.ProjectionFactory, org.springframework.data.repository.core.NamedQueries) - */ @Override public RepositoryQuery resolveQuery(Method method, RepositoryMetadata metadata, ProjectionFactory factory, NamedQueries namedQueries) { ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, metadata, factory, mappingContext); + queryMethod.verify(); + String namedQueryName = queryMethod.getNamedQueryName(); if (namedQueries.hasQuery(namedQueryName)) { String namedQuery = namedQueries.getQuery(namedQueryName); - return new ReactiveStringBasedMongoQuery(namedQuery, queryMethod, operations, EXPRESSION_PARSER, - evaluationContextProvider); + return new ReactiveStringBasedMongoQuery(namedQuery, queryMethod, operations, delegate); + } else if (queryMethod.hasAnnotatedAggregation()) { + return new ReactiveStringBasedAggregation(queryMethod, operations, delegate); } else if (queryMethod.hasAnnotatedQuery()) { - return new ReactiveStringBasedMongoQuery(queryMethod, operations, EXPRESSION_PARSER, evaluationContextProvider); + return new ReactiveStringBasedMongoQuery(queryMethod, operations, delegate); } else { - return new ReactivePartTreeMongoQuery(queryMethod, operations); + return new ReactivePartTreeMongoQuery(queryMethod, operations, delegate); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java index 8965e3d343..4f9c0d945c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,13 +16,17 @@ package org.springframework.data.mongodb.repository.support; import java.io.Serializable; +import java.util.Optional; +import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.index.IndexOperationsAdapter; import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport; import org.springframework.data.repository.core.support.RepositoryFactorySupport; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ReactiveExtensionAwareQueryMethodEvaluationContextProvider; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -34,7 +38,7 @@ * @author Christoph Strobl * @since 2.0 * @see org.springframework.data.repository.reactive.ReactiveSortingRepository - * @see org.springframework.data.repository.reactive.RxJava2SortingRepository + * @see org.springframework.data.repository.reactive.RxJava3SortingRepository */ public class ReactiveMongoRepositoryFactoryBean, S, ID extends Serializable> extends RepositoryFactoryBeanSupport { @@ -70,37 +74,32 @@ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods this.createIndexesForQueryMethods = createIndexesForQueryMethods; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) - */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; } - /* - * (non-Javadoc) - * - * @see - * org.springframework.data.repository.support.RepositoryFactoryBeanSupport - * #createRepositoryFactory() - */ @Override - protected final RepositoryFactorySupport createRepositoryFactory() { + protected RepositoryFactorySupport createRepositoryFactory() { RepositoryFactorySupport factory = getFactoryInstance(operations); if (createIndexesForQueryMethods) { factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener( - collectionName -> IndexOperationsAdapter.blocking(operations.indexOps(collectionName)))); + (collectionName, javaType) -> IndexOperationsAdapter.blocking(operations.indexOps(javaType)))); } return factory; } + @Override + protected Optional createDefaultQueryMethodEvaluationContextProvider( + ListableBeanFactory beanFactory) { + return Optional.of(new ReactiveExtensionAwareQueryMethodEvaluationContextProvider(beanFactory)); + } + /** * Creates and initializes a {@link RepositoryFactorySupport} instance. * @@ -111,18 +110,11 @@ protected RepositoryFactorySupport getFactoryInstance(ReactiveMongoOperations op return new ReactiveMongoRepositoryFactory(operations); } - /* - * (non-Javadoc) - * - * @see - * org.springframework.data.repository.support.RepositoryFactoryBeanSupport - * #afterPropertiesSet() - */ @Override public void afterPropertiesSet() { super.afterPropertiesSet(); - Assert.state(operations != null, "ReactiveMongoOperations must not be null!"); + Assert.state(operations != null, "ReactiveMongoOperations must not be null"); if (!mappingContextConfigured) { setMappingContext(operations.getConverter().getMappingContext()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageableExecutionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageableExecutionUtils.java new file mode 100644 index 0000000000..fcf420212a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactivePageableExecutionUtils.java @@ -0,0 +1,69 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.util.Assert; + +/** + * Support for query execution using {@link Pageable}. Using {@link ReactivePageableExecutionUtils} assumes that data + * queries are cheaper than {@code COUNT} queries and so some cases can take advantage of optimizations. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class ReactivePageableExecutionUtils { + + private ReactivePageableExecutionUtils() {} + + /** + * Constructs a {@link Page} based on the given {@code content}, {@link Pageable} and {@link Mono} applying + * optimizations. The construction of {@link Page} omits a count query if the total can be determined based on the + * result size and {@link Pageable}. + * + * @param content must not be {@literal null}. + * @param pageable must not be {@literal null}. + * @param totalSupplier must not be {@literal null}. + * @return the {@link Page}. + */ + public static Mono> getPage(List content, Pageable pageable, Mono totalSupplier) { + + Assert.notNull(content, "Content must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); + Assert.notNull(totalSupplier, "TotalSupplier must not be null"); + + if (pageable.isUnpaged() || pageable.getOffset() == 0) { + + if (pageable.isUnpaged() || pageable.getPageSize() > content.size()) { + return Mono.just(new PageImpl<>(content, pageable, content.size())); + } + + return totalSupplier.map(total -> new PageImpl<>(content, pageable, total)); + } + + if (content.size() != 0 && pageable.getPageSize() > content.size()) { + return Mono.just(new PageImpl<>(content, pageable, pageable.getOffset() + content.size())); + } + + return totalSupplier.map(total -> new PageImpl<>(content, pageable, total)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutor.java new file mode 100644 index 0000000000..9a1482823f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutor.java @@ -0,0 +1,286 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collections; +import java.util.List; +import java.util.function.Function; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.querydsl.EntityPathResolver; +import org.springframework.data.querydsl.QuerydslPredicateExecutor; +import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor; +import org.springframework.data.querydsl.SimpleEntityPathResolver; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.util.Assert; + +import com.querydsl.core.types.EntityPath; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; + +/** + * MongoDB-specific {@link QuerydslPredicateExecutor} that allows execution {@link Predicate}s in various forms. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + */ +public class ReactiveQuerydslMongoPredicateExecutor extends QuerydslPredicateExecutorSupport + implements ReactiveQuerydslPredicateExecutor { + + private final ReactiveMongoOperations mongoOperations; + + /** + * Creates a new {@link ReactiveQuerydslMongoPredicateExecutor} for the given {@link MongoEntityInformation} and + * {@link ReactiveMongoOperations}. Uses the {@link SimpleEntityPathResolver} to create an {@link EntityPath} for the + * given domain class. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + */ + public ReactiveQuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, + ReactiveMongoOperations mongoOperations) { + + this(entityInformation, mongoOperations, SimpleEntityPathResolver.INSTANCE); + } + + /** + * Creates a new {@link ReactiveQuerydslMongoPredicateExecutor} for the given {@link MongoEntityInformation}, + * {@link ReactiveMongoOperations} and {@link EntityPathResolver}. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. + * @param resolver must not be {@literal null}. + */ + public ReactiveQuerydslMongoPredicateExecutor(MongoEntityInformation entityInformation, + ReactiveMongoOperations mongoOperations, EntityPathResolver resolver) { + + super(mongoOperations.getConverter(), pathBuilderFor(resolver.createPath(entityInformation.getJavaType())), + entityInformation); + this.mongoOperations = mongoOperations; + } + + @Override + public Mono findOne(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchOne(); + } + + @Override + public Flux findAll(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetch(); + } + + @Override + public Flux findAll(Predicate predicate, OrderSpecifier... orders) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(orders, "Order specifiers must not be null"); + + return createQueryFor(predicate).orderBy(orders).fetch(); + } + + @Override + public Flux findAll(Predicate predicate, Sort sort) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(sort, "Sort must not be null"); + + return applySorting(createQueryFor(predicate), sort).fetch(); + } + + @Override + public Flux findAll(OrderSpecifier... orders) { + + Assert.notNull(orders, "Order specifiers must not be null"); + + return createQuery().orderBy(orders).fetch(); + } + + @Override + public Mono count(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchCount(); + } + + @Override + public Mono exists(Predicate predicate) { + + Assert.notNull(predicate, "Predicate must not be null"); + + return createQueryFor(predicate).fetchCount().map(it -> it != 0); + } + + @Override + public > P findBy(Predicate predicate, + Function, P> queryFunction) { + + Assert.notNull(predicate, "Predicate must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + return queryFunction.apply(new ReactiveFluentQuerydsl(predicate, (Class) typeInformation().getJavaType())); + } + + /** + * Creates a {@link ReactiveSpringDataMongodbQuery} for the given {@link Predicate}. + * + * @param predicate + * @return + */ + private ReactiveSpringDataMongodbQuery createQueryFor(Predicate predicate) { + return createQuery().where(predicate); + } + + /** + * Creates a {@link ReactiveSpringDataMongodbQuery}. + * + * @return + */ + private ReactiveSpringDataMongodbQuery createQuery() { + + Class javaType = typeInformation().getJavaType(); + return new ReactiveSpringDataMongodbQuery<>(mongoOperations, javaType, javaType, + mongoOperations.getCollectionName(javaType), it -> {}); + } + + /** + * Applies the given {@link Sort} to the given {@link ReactiveSpringDataMongodbQuery}. + * + * @param query + * @param sort + * @return + */ + private ReactiveSpringDataMongodbQuery applySorting(ReactiveSpringDataMongodbQuery query, Sort sort) { + + toOrderSpecifiers(sort).forEach(query::orderBy); + return query; + } + + /** + * {@link org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery} using Querydsl {@link Predicate}. + * + * @since 3.3 + * @author Mark Paluch + */ + class ReactiveFluentQuerydsl extends ReactiveFluentQuerySupport { + + ReactiveFluentQuerydsl(Predicate predicate, Class resultType) { + this(predicate, Sort.unsorted(), 0, resultType, Collections.emptyList()); + } + + ReactiveFluentQuerydsl(Predicate predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + super(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + protected ReactiveFluentQuerydsl create(Predicate predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + return new ReactiveFluentQuerydsl<>(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public Mono one() { + return createQuery().fetchOne(); + } + + @Override + public Mono first() { + return createQuery().fetchFirst(); + } + + @Override + public Flux all() { + return createQuery().fetch(); + } + + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Mono> page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchPage(pageable); + } + + @Override + public Mono> slice(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + return createQuery().fetchSlice(pageable); + } + + @Override + public Mono count() { + return createQuery().fetchCount(); + } + + @Override + public Mono exists() { + return count().map(it -> it > 0).defaultIfEmpty(false); + } + + private ReactiveSpringDataMongodbQuery createQuery() { + + return new ReactiveSpringDataMongodbQuery<>(mongoOperations, typeInformation().getJavaType(), getResultType(), + mongoOperations.getCollectionName(typeInformation().getJavaType()), this::customize).where(getPredicate()); + } + + private void customize(BasicQuery query) { + + List fieldsToInclude = getFieldsToInclude(); + + if (!fieldsToInclude.isEmpty()) { + Document fields = new Document(); + fieldsToInclude.forEach(field -> fields.put(field, 1)); + query.setFieldsObject(fields); + } + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java new file mode 100644 index 0000000000..cf5191fd42 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java @@ -0,0 +1,310 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import org.springframework.data.mongodb.repository.util.SliceUtils; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.Document; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveFindOperation; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; +import org.springframework.util.StringUtils; + +import com.querydsl.core.JoinExpression; +import com.querydsl.core.QueryMetadata; +import com.querydsl.core.QueryModifiers; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.ExpressionUtils; +import com.querydsl.core.types.Operation; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Path; +import com.querydsl.core.types.Predicate; +import com.querydsl.mongodb.MongodbOps; + +/** + * MongoDB query utilizing {@link ReactiveMongoOperations} for command execution. + * + * @implNote This class uses {@link MongoOperations} to directly convert documents into the target entity type. Also, we + * want entities to participate in lifecycle events and entity callbacks. + * @param result type + * @author Mark Paluch + * @author Christoph Strobl + * @author Rocco Lagrotteria + * @since 2.2 + */ +class ReactiveSpringDataMongodbQuery extends SpringDataMongodbQuerySupport> { + + private final ReactiveMongoOperations mongoOperations; + private final Consumer queryCustomizer; + private final ReactiveFindOperation.FindWithQuery find; + + ReactiveSpringDataMongodbQuery(ReactiveMongoOperations mongoOperations, Class entityClass) { + this(mongoOperations, entityClass, entityClass, null, it -> {}); + } + + @SuppressWarnings("unchecked") + ReactiveSpringDataMongodbQuery(ReactiveMongoOperations mongoOperations, Class domainType, + Class resultType, @Nullable String collection, Consumer queryCustomizer) { + + super(new SpringDataMongodbSerializer(mongoOperations.getConverter())); + + this.mongoOperations = mongoOperations; + this.queryCustomizer = queryCustomizer; + this.find = (StringUtils.hasText(collection) ? mongoOperations.query(domainType).inCollection(collection) + : mongoOperations.query(domainType)).as((Class) resultType); + } + + /** + * Fetch all matching query results. + * + * @return {@link Flux} emitting all query results or {@link Flux#empty()} if there are none. + */ + Flux fetch() { + return createQuery().flatMapMany(it -> find.matching(it).all()); + } + + Mono> scroll(ScrollPosition scrollPosition) { + return createQuery().flatMap(it -> find.matching(it).scroll(scrollPosition)); + } + + /** + * Fetch all matching query results as page. + * + * @return {@link Mono} emitting the requested page. + */ + Mono> fetchPage(Pageable pageable) { + + Mono> content = createQuery().map(it -> it.with(pageable)).flatMapMany(it -> find.matching(it).all()) + .collectList(); + + return content.flatMap(it -> ReactivePageableExecutionUtils.getPage(it, pageable, fetchCount())); + } + + /** + * Fetch all matching query results as Slice. + * + * @param pageable defines range and sort of requested slice + * @return {@link Mono} emitting the requested Slice. + * @since 4.5 + */ + Mono> fetchSlice(Pageable pageable) { + + Mono> content = createQuery().map(it -> SliceUtils.limitResult(it, pageable).with(pageable.getSort())) + .flatMapMany(it -> find.matching(it).all()).collectList(); + + return content.map(it -> SliceUtils.sliceResult(it, pageable)); + } + + /** + * Fetch the one matching query result. + * + * @return {@link Mono} emitting the first query result or {@link Mono#empty()} if there are none. + * @throws org.springframework.dao.IncorrectResultSizeDataAccessException if more than one match found. + */ + Mono fetchOne() { + return createQuery().flatMap(it -> find.matching(it).one()); + } + + /** + * Fetch the first matching query result. @return {@link Mono} emitting the first query result or {@link Mono#empty()} + * if there are none. + * + * @since 3.3 + */ + Mono fetchFirst() { + return createQuery().flatMap(it -> find.matching(it).first()); + } + + /** + * Fetch the count of matching query results. + * + * @return {@link Mono} emitting the first query result count. Emits always a count even item. + */ + Mono fetchCount() { + return createQuery().flatMap(it -> find.matching(it).count()); + } + + protected Mono createQuery() { + + QueryMetadata metadata = getQueryMixin().getMetadata(); + + return createQuery(createReactiveFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + metadata.getOrderBy()); + } + + /** + * Creates a MongoDB query that is emitted through a {@link Mono} given {@link Mono} of {@link Predicate}. + * + * @param filter must not be {@literal null}. + * @param projection can be {@literal null} if no projection is given. Query requests all fields in such case. + * @param modifiers must not be {@literal null}. + * @param orderBy must not be {@literal null}. + * @return {@link Mono} emitting the {@link Query}. + */ + protected Mono createQuery(Mono filter, @Nullable Expression projection, + QueryModifiers modifiers, List> orderBy) { + + return filter.map(this::createQuery) // + .defaultIfEmpty(createQuery(null)) // + .map(it -> { + + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(it, fields == null ? new Document() : fields); + + Integer limit = modifiers.getLimitAsInteger(); + Integer offset = modifiers.getOffsetAsInteger(); + + if (limit != null) { + basicQuery.limit(limit); + } + if (offset != null) { + basicQuery.skip(offset); + } + if (orderBy.size() > 0) { + basicQuery.setSortObject(createSort(orderBy)); + } + + queryCustomizer.accept(basicQuery); + + return basicQuery; + }); + } + + protected Mono createReactiveFilter(QueryMetadata metadata) { + + if (!metadata.getJoins().isEmpty()) { + + return createReactiveJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it)) + .switchIfEmpty(Mono.justOrEmpty(metadata.getWhere())); + } + + return Mono.justOrEmpty(metadata.getWhere()); + } + + /** + * Creates a Join filter by querying {@link com.mongodb.DBRef references}. + * + * @param metadata + * @return + */ + @SuppressWarnings("unchecked") + protected Mono createReactiveJoinFilter(QueryMetadata metadata) { + + MultiValueMap, Mono> predicates = new LinkedMultiValueMap<>(); + List joins = metadata.getJoins(); + + for (int i = joins.size() - 1; i >= 0; i--) { + + JoinExpression join = joins.get(i); + Path source = (Path) ((Operation) join.getTarget()).getArg(0); + Path target = (Path) ((Operation) join.getTarget()).getArg(1); + Collection> extraFilters = predicates.get(target.getRoot()); + + Mono filter = allOf(extraFilters).map(it -> ExpressionUtils.allOf(join.getCondition(), it)) + .switchIfEmpty(Mono.justOrEmpty(join.getCondition())); + + Mono predicate = getIds(target.getType(), filter) // + .collectList() // + .handle((it, sink) -> { + + if (it.isEmpty()) { + sink.error(new NoMatchException(source)); + return; + } + + Path path = ExpressionUtils.path(String.class, source, "$id"); + sink.next(ExpressionUtils.in((Path) path, it)); + }); + + predicates.add(source.getRoot(), predicate); + } + + Path source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0); + return allOf(predicates.get(source.getRoot())).onErrorResume(NoMatchException.class, + e -> Mono.just(ExpressionUtils.predicate(MongodbOps.NO_MATCH, e.source))); + } + + private Mono allOf(@Nullable Collection> predicates) { + return predicates != null ? Flux.concat(predicates).collectList().map(ExpressionUtils::allOf) : Mono.empty(); + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected Flux getIds(Class targetType, Mono condition) { + + return condition.flatMapMany(it -> getJoinIds(targetType, it)) + .switchIfEmpty(Flux.defer(() -> getJoinIds(targetType, null))); + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected Flux getJoinIds(Class targetType, @Nullable Predicate condition) { + + return createQuery(Mono.justOrEmpty(condition), null, QueryModifiers.EMPTY, Collections.emptyList()) + .flatMapMany(query -> mongoOperations.findDistinct(query, FieldName.ID.name(), targetType, Object.class)); + } + + @Override + protected List getIds(Class aClass, Predicate predicate) { + throw new UnsupportedOperationException( + "Use create Flux getIds(Class targetType, Mono condition)"); + } + + /** + * Marker exception to indicate no matches for a query using reference Id's. + */ + static class NoMatchException extends RuntimeException { + + final Path source; + + NoMatchException(Path source) { + this.source = source; + } + + @Override + public synchronized Throwable fillInStackTrace() { + return null; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java index e8c2a400f8..2f4c30ee7a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,28 +17,42 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.function.Function; +import java.util.function.UnaryOperator; import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.mongodb.repository.util.SliceUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.data.util.StreamUtils; import org.springframework.data.util.Streamable; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; + /** * Repository base implementation for Mongo. * @@ -46,11 +60,15 @@ * @author Christoph Strobl * @author Thomas Darimont * @author Mark Paluch + * @author Mehran Behnam + * @author Jens Schauder + * @author Kirill Egorov */ public class SimpleMongoRepository implements MongoRepository { - private final MongoOperations mongoOperations; + private @Nullable CrudMethodMetadata crudMethodMetadata; private final MongoEntityInformation entityInformation; + private final MongoOperations mongoOperations; /** * Creates a new {@link SimpleMongoRepository} for the given {@link MongoEntityInformation} and {@link MongoTemplate}. @@ -60,292 +78,300 @@ public class SimpleMongoRepository implements MongoRepository { */ public SimpleMongoRepository(MongoEntityInformation metadata, MongoOperations mongoOperations) { - Assert.notNull(metadata, "MongoEntityInformation must not be null!"); - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(metadata, "MongoEntityInformation must not be null"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.entityInformation = metadata; this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#save(java.lang.Object) - */ + // ------------------------------------------------------------------------- + // Methods from CrudRepository + // ------------------------------------------------------------------------- + @Override public S save(S entity) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entity, "Entity must not be null"); if (entityInformation.isNew(entity)) { - mongoOperations.insert(entity, entityInformation.getCollectionName()); - } else { - mongoOperations.save(entity, entityInformation.getCollectionName()); + return mongoOperations.insert(entity, entityInformation.getCollectionName()); } - return entity; + return mongoOperations.save(entity, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#saveAll(java.lang.Iterable) - */ @Override public List saveAll(Iterable entities) { - Assert.notNull(entities, "The given Iterable of entities not be null!"); + Assert.notNull(entities, "The given Iterable of entities not be null"); Streamable source = Streamable.of(entities); - boolean allNew = source.stream().allMatch(it -> entityInformation.isNew(it)); + boolean allNew = source.stream().allMatch(entityInformation::isNew); if (allNew) { List result = source.stream().collect(Collectors.toList()); - mongoOperations.insert(result, entityInformation.getCollectionName()); - return result; - - } else { - return source.stream().map(this::save).collect(Collectors.toList()); + return new ArrayList<>(mongoOperations.insert(result, entityInformation.getCollectionName())); } + + return source.stream().map(this::save).collect(Collectors.toList()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findById(java.io.Serializable) - */ @Override public Optional findById(ID id) { - Assert.notNull(id, "The given id must not be null!"); + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); return Optional.ofNullable( - mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName())); + mongoOperations.findOne(query, entityInformation.getJavaType(), entityInformation.getCollectionName())); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#existsById(java.lang.Object) - */ @Override public boolean existsById(ID id) { - Assert.notNull(id, "The given id must not be null!"); + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); - return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(), - entityInformation.getCollectionName()); + return mongoOperations.exists(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + @Override + public List findAll() { + return findAll(new Query()); + } + + @Override + public List findAllById(Iterable ids) { + + Assert.notNull(ids, "The given Ids of entities not be null"); + + return findAll(getIdQuery(ids)); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#count() - */ @Override public long count() { - return mongoOperations.getCollection(entityInformation.getCollectionName()).count(); + + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.count(query, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#deleteById(java.lang.Object) - */ @Override public void deleteById(ID id) { - Assert.notNull(id, "The given id must not be null!"); + Assert.notNull(id, "The given id must not be null"); - mongoOperations.remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName()); + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.lang.Object) - */ @Override public void delete(T entity) { - Assert.notNull(entity, "The given entity must not be null!"); + Assert.notNull(entity, "The given entity must not be null"); + + DeleteResult deleteResult = mongoOperations.remove(entity, entityInformation.getCollectionName()); - deleteById(entityInformation.getRequiredId(entity)); + if (entityInformation.isVersioned() && deleteResult.wasAcknowledged() && deleteResult.getDeletedCount() == 0) { + throw new OptimisticLockingFailureException(String.format( + "The entity with id %s with version %s in %s cannot be deleted; Was it modified or deleted in the meantime", + entityInformation.getId(entity), entityInformation.getVersion(entity), + entityInformation.getCollectionName())); + } } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#delete(java.lang.Iterable) - */ @Override - public void deleteAll(Iterable entities) { + public void deleteAllById(Iterable ids) { - Assert.notNull(entities, "The given Iterable of entities not be null!"); + Assert.notNull(ids, "The given Iterable of ids must not be null"); - entities.forEach(this::delete); + Query query = getIdQuery(ids); + getReadPreference().ifPresent(query::withReadPreference); + mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#deleteAll() - */ @Override - public void deleteAll() { - mongoOperations.remove(new Query(), entityInformation.getCollectionName()); - } + public void deleteAll(Iterable entities) { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAll() - */ - @Override - public List findAll() { - return findAll(new Query()); + Assert.notNull(entities, "The given Iterable of entities must not be null"); + + entities.forEach(this::delete); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.CrudRepository#findAllById(java.lang.Iterable) - */ @Override - public Iterable findAllById(Iterable ids) { + public void deleteAll() { - return findAll(new Query(new Criteria(entityInformation.getIdAttribute()) - .in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList())))); + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + + mongoOperations.remove(query, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Pageable) - */ + // ------------------------------------------------------------------------- + // Methods from PagingAndSortingRepository + // ------------------------------------------------------------------------- + @Override public Page findAll(Pageable pageable) { - Assert.notNull(pageable, "Pageable must not be null!"); + Assert.notNull(pageable, "Pageable must not be null"); - Long count = count(); + long count = count(); List list = findAll(new Query().with(pageable)); - return new PageImpl(list, pageable, count); + return new PageImpl<>(list, pageable, count); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.PagingAndSortingRepository#findAll(org.springframework.data.domain.Sort) - */ @Override public List findAll(Sort sort) { - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(sort, "Sort must not be null"); - return findAll(new Query().with(sort)); + Query query = new Query().with(sort); + return findAll(query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Object) - */ + // ------------------------------------------------------------------------- + // Methods from MongoRepository + // ------------------------------------------------------------------------- + @Override public S insert(S entity) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entity, "Entity must not be null"); - mongoOperations.insert(entity, entityInformation.getCollectionName()); - return entity; + return mongoOperations.insert(entity, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#insert(java.lang.Iterable) - */ @Override public List insert(Iterable entities) { - Assert.notNull(entities, "The given Iterable of entities not be null!"); + Assert.notNull(entities, "The given Iterable of entities not be null"); - List list = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList()); + Collection list = toCollection(entities); if (list.isEmpty()) { - return list; + return Collections.emptyList(); } - mongoOperations.insertAll(list); - return list; + return new ArrayList<>(mongoOperations.insertAll(list)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Pageable) - */ + // ------------------------------------------------------------------------- + // Methods from QueryByExampleExecutor + // ------------------------------------------------------------------------- + @Override - public Page findAll(final Example example, Pageable pageable) { + public Optional findOne(Example example) { - Assert.notNull(example, "Sample must not be null!"); - Assert.notNull(pageable, "Pageable must not be null!"); + Assert.notNull(example, "Sample must not be null"); - Query q = new Query(new Criteria().alike(example)).with(pageable); - List list = mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); - return PageableExecutionUtils.getPage(list, pageable, - () -> mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName())); + return Optional + .ofNullable(mongoOperations.findOne(query, example.getProbeType(), entityInformation.getCollectionName())); + } + + @Override + public List findAll(Example example) { + return findAll(example, Sort.unsorted()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) - */ @Override public List findAll(Example example, Sort sort) { - Assert.notNull(example, "Sample must not be null!"); - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(sort, "Sort must not be null"); - Query q = new Query(new Criteria().alike(example)).with(sort); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()) // + .with(sort); + getReadPreference().ifPresent(query::withReadPreference); - return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()); + return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoRepository#findAllByExample(org.springframework.data.domain.Example) - */ @Override - public List findAll(Example example) { - return findAll(example, Sort.unsorted()); - } + public Page findAll(Example example, Pageable pageable) { - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#findOne(org.springframework.data.domain.Example) - */ - @Override - public Optional findOne(Example example) { + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(pageable, "Pageable must not be null"); - Assert.notNull(example, "Sample must not be null!"); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()).with(pageable); // + getReadPreference().ifPresent(query::withReadPreference); - Query q = new Query(new Criteria().alike(example)); - return Optional - .ofNullable(mongoOperations.findOne(q, example.getProbeType(), entityInformation.getCollectionName())); + List list = mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); + + return PageableExecutionUtils.getPage(list, pageable, () -> mongoOperations + .count(Query.of(query).limit(-1).skip(-1), example.getProbeType(), entityInformation.getCollectionName())); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#count(org.springframework.data.domain.Example) - */ @Override public long count(Example example) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(example, "Sample must not be null"); - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName()); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.count(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.QueryByExampleExecutor#exists(org.springframework.data.domain.Example) - */ @Override public boolean exists(Example example) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(example, "Sample must not be null"); + + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName()); + } + + @Override + public R findBy(Example example, + Function, R> queryFunction) { + + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); + + return queryFunction.apply(new FluentQueryByExample<>(example, example.getProbeType())); + } + + // ------------------------------------------------------------------------- + // Utility methods + // ------------------------------------------------------------------------- + + /** + * Configures a custom {@link CrudMethodMetadata} to be used to detect {@link ReadPreference}s and query hints to be + * applied to queries. + * + * @param crudMethodMetadata + * @since 4.2 + */ + void setRepositoryMethodMetadata(CrudMethodMetadata crudMethodMetadata) { + this.crudMethodMetadata = crudMethodMetadata; + } + + private Optional getReadPreference() { + + if (crudMethodMetadata == null) { + return Optional.empty(); + } - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName()); + return crudMethodMetadata.getReadPreference(); } private Query getIdQuery(Object id) { @@ -356,12 +382,131 @@ private Criteria getIdCriteria(Object id) { return where(entityInformation.getIdAttribute()).is(id); } + private Query getIdQuery(Iterable ids) { + + Query query = new Query(new Criteria(entityInformation.getIdAttribute()).in(toCollection(ids))); + getReadPreference().ifPresent(query::withReadPreference); + return query; + } + + private static Collection toCollection(Iterable ids) { + return ids instanceof Collection collection ? collection + : StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList()); + } + private List findAll(@Nullable Query query) { if (query == null) { return Collections.emptyList(); } + getReadPreference().ifPresent(query::withReadPreference); return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } + + /** + * {@link org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery} using {@link Example}. + * + * @author Mark Paluch + * @since 3.3 + */ + class FluentQueryByExample extends FetchableFluentQuerySupport, T> { + + FluentQueryByExample(Example example, Class resultType) { + this(example, Sort.unsorted(), 0, resultType, Collections.emptyList()); + } + + FluentQueryByExample(Example example, Sort sort, int limit, Class resultType, List fieldsToInclude) { + super(example, sort, limit, resultType, fieldsToInclude); + } + + @Override + protected FluentQueryByExample create(Example predicate, Sort sort, int limit, Class resultType, + List fieldsToInclude) { + return new FluentQueryByExample<>(predicate, sort, limit, resultType, fieldsToInclude); + } + + @Override + public T oneValue() { + return createQuery().oneValue(); + } + + @Override + public T firstValue() { + return createQuery().firstValue(); + } + + @Override + public List all() { + return createQuery().all(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Page page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + List list = createQuery(q -> q.with(pageable)).all(); + + return PageableExecutionUtils.getPage(list, pageable, this::count); + } + + @Override + public Slice slice(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + List resultList = createQuery(q -> SliceUtils.limitResult(q, pageable).with(pageable.getSort())).all(); + + return SliceUtils.sliceResult(resultList, pageable); + } + + @Override + public Stream stream() { + return createQuery().stream(); + } + + @Override + public long count() { + return createQuery().count(); + } + + @Override + public boolean exists() { + return createQuery().exists(); + } + + private ExecutableFindOperation.TerminatingFind createQuery() { + return createQuery(UnaryOperator.identity()); + } + + private ExecutableFindOperation.TerminatingFind createQuery(UnaryOperator queryCustomizer) { + + Query query = new Query(new Criteria().alike(getPredicate())) // + .collation(entityInformation.getCollation()); + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + + if (!getFieldsToInclude().isEmpty()) { + query.fields().include(getFieldsToInclude()); + } + + getReadPreference().ifPresent(query::withReadPreference); + + query = queryCustomizer.apply(query); + + return mongoOperations.query(getPredicate().getProbeType()).inCollection(entityInformation.getCollectionName()) + .as(getResultType()).matching(query); + } + + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java index 3a99ef5a88..1c1df2c9a1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,381 +17,444 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; +import org.springframework.data.mongodb.repository.util.SliceUtils; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; -import java.util.stream.Collectors; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.UnaryOperator; import org.reactivestreams.Publisher; + import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.data.domain.Example; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveFindOperation; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.ReactiveMongoRepository; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.data.util.StreamUtils; -import org.springframework.data.util.Streamable; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import com.mongodb.ReadPreference; +import com.mongodb.client.result.DeleteResult; + /** * Reactive repository base implementation for Mongo. * * @author Mark Paluch * @author Oliver Gierke * @author Christoph Strobl + * @author Ruben J Garcia + * @author Jens Schauder + * @author Clément Petit + * @author Kirill Egorov * @since 2.0 */ -@RequiredArgsConstructor public class SimpleReactiveMongoRepository implements ReactiveMongoRepository { - private final @NonNull MongoEntityInformation entityInformation; - private final @NonNull ReactiveMongoOperations mongoOperations; + private @Nullable CrudMethodMetadata crudMethodMetadata; + private final MongoEntityInformation entityInformation; + private final ReactiveMongoOperations mongoOperations; - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(java.lang.Object) + /** + * Creates a new {@link SimpleReactiveMongoRepository} for the given {@link MongoEntityInformation} and + * {@link MongoTemplate}. + * + * @param entityInformation must not be {@literal null}. + * @param mongoOperations must not be {@literal null}. */ - @Override - public Mono findById(ID id) { + public SimpleReactiveMongoRepository(MongoEntityInformation entityInformation, + ReactiveMongoOperations mongoOperations) { - Assert.notNull(id, "The given id must not be null!"); + Assert.notNull(entityInformation, "EntityInformation must not be null"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); - return mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName()); + this.entityInformation = entityInformation; + this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findById(org.reactivestreams.Publisher) - */ + // ------------------------------------------------------------------------- + // Methods from ReactiveCrudRepository + // ------------------------------------------------------------------------- + @Override - public Mono findById(Publisher publisher) { + public Mono save(S entity) { - Assert.notNull(publisher, "The given id must not be null!"); + Assert.notNull(entity, "Entity must not be null"); - return Mono.from(publisher).flatMap( - id -> mongoOperations.findById(id, entityInformation.getJavaType(), entityInformation.getCollectionName())); + if (entityInformation.isNew(entity)) { + return mongoOperations.insert(entity, entityInformation.getCollectionName()); + } + + return mongoOperations.save(entity, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findOne(org.springframework.data.domain.Example) - */ @Override - public Mono findOne(Example example) { + public Flux saveAll(Iterable entities) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(entities, "The given Iterable of entities must not be null"); - Query q = new Query(new Criteria().alike(example)); - q.limit(2); + List source = toList(entities); + return source.stream().allMatch(entityInformation::isNew) ? // + insert(source) : concatMapSequentially(source, this::save); + } - return mongoOperations.find(q, example.getProbeType(), entityInformation.getCollectionName()).buffer(2) - .flatMap(vals -> { + @Override + public Flux saveAll(Publisher publisher) { - if (vals.size() > 1) { - return Mono.error(new IncorrectResultSizeDataAccessException(1)); - } - return Mono.just(vals.iterator().next()); - }).single(); + Assert.notNull(publisher, "The given Publisher of entities must not be null"); + + return concatMapSequentially(publisher, this::save); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(java.lang.Object) - */ @Override - public Mono existsById(ID id) { + public Mono findById(ID id) { - Assert.notNull(id, "The given id must not be null!"); + Assert.notNull(id, "The given id must not be null"); - return mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(), - entityInformation.getCollectionName()); + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.findOne(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#existsById(org.reactivestreams.Publisher) - */ @Override - public Mono existsById(Publisher publisher) { + public Mono findById(Publisher publisher) { - Assert.notNull(publisher, "The given id must not be null!"); + Assert.notNull(publisher, "The given id must not be null"); + Optional readPreference = getReadPreference(); - return Mono.from(publisher).flatMap(id -> mongoOperations.exists(getIdQuery(id), entityInformation.getJavaType(), - entityInformation.getCollectionName())); + return Mono.from(publisher).flatMap(id -> { + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.findOne(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }); + } + + @Override + public Mono existsById(ID id) { + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.exists(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#exists(org.springframework.data.domain.Example) - */ @Override - public Mono exists(Example example) { + public Mono existsById(Publisher publisher) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(publisher, "The given id must not be null"); + Optional readPreference = getReadPreference(); - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.exists(q, example.getProbeType(), entityInformation.getCollectionName()); + return Mono.from(publisher).flatMap(id -> { + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.exists(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll() - */ @Override public Flux findAll() { return findAll(new Query()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findAllById(java.lang.Iterable) - */ @Override public Flux findAllById(Iterable ids) { - Assert.notNull(ids, "The given Iterable of Id's must not be null!"); + Assert.notNull(ids, "The given Iterable of Id's must not be null"); - return findAll(new Query(new Criteria(entityInformation.getIdAttribute()) - .in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList())))); + return findAll(getIdQuery(ids)); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#findAllById(org.reactivestreams.Publisher) - */ @Override public Flux findAllById(Publisher ids) { - Assert.notNull(ids, "The given Publisher of Id's must not be null!"); + Assert.notNull(ids, "The given Publisher of Id's must not be null"); - return Flux.from(ids).buffer().flatMap(this::findAllById); + Optional readPreference = getReadPreference(); + return Flux.from(ids).buffer().flatMapSequential(listOfIds -> { + Query query = getIdQuery(listOfIds); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveSortingRepository#findAll(org.springframework.data.domain.Sort) - */ @Override - public Flux findAll(Sort sort) { + public Mono count() { - Assert.notNull(sort, "Sort must not be null!"); + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.count(query, entityInformation.getCollectionName()); + } - return findAll(new Query().with(sort)); + @Override + public Mono deleteById(ID id) { + + Assert.notNull(id, "The given id must not be null"); + + return deleteById(id, getReadPreference()); + } + + private Mono deleteById(ID id, Optional readPreference) { + + Assert.notNull(id, "The given id must not be null"); + + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort) - */ @Override - public Flux findAll(Example example, Sort sort) { + public Mono deleteById(Publisher publisher) { - Assert.notNull(example, "Sample must not be null!"); - Assert.notNull(sort, "Sort must not be null!"); + Assert.notNull(publisher, "Id must not be null"); - Query query = new Query(new Criteria().alike(example)).with(sort); + Optional readPreference = getReadPreference(); - return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); + return Mono.from(publisher).flatMap(id -> { + Query query = getIdQuery(id); + readPreference.ifPresent(query::withReadPreference); + return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + }).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#findAll(org.springframework.data.domain.Example) - */ @Override - public Flux findAll(Example example) { + public Mono delete(T entity) { - Assert.notNull(example, "Example must not be null!"); + Assert.notNull(entity, "The given entity must not be null"); - return findAll(example, Sort.unsorted()); + Mono remove = mongoOperations.remove(entity, entityInformation.getCollectionName()); + + if (entityInformation.isVersioned()) { + + remove = remove.handle((deleteResult, sink) -> { + + if (deleteResult.wasAcknowledged() && deleteResult.getDeletedCount() == 0) { + sink.error(new OptimisticLockingFailureException(String.format( + "The entity with id %s with version %s in %s cannot be deleted; Was it modified or deleted in the meantime", + entityInformation.getId(entity), entityInformation.getVersion(entity), + entityInformation.getCollectionName()))); + } else { + sink.next(deleteResult); + } + }); + } + + return remove.then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#count() - */ @Override - public Mono count() { - return mongoOperations.count(new Query(), entityInformation.getCollectionName()); + public Mono deleteAllById(Iterable ids) { + + Assert.notNull(ids, "The given Iterable of Id's must not be null"); + + return deleteAllById(ids, getReadPreference()); + } + + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private Mono deleteAllById(Iterable ids, Optional readPreference) { + + Query query = getIdQuery(ids); + readPreference.ifPresent(query::withReadPreference); + + return mongoOperations.remove(query, entityInformation.getJavaType(), entityInformation.getCollectionName()).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#count(org.springframework.data.domain.Example) - */ @Override - public Mono count(Example example) { + public Mono deleteAll(Iterable entities) { - Assert.notNull(example, "Sample must not be null!"); + Assert.notNull(entities, "The given Iterable of entities must not be null"); - Query q = new Query(new Criteria().alike(example)); - return mongoOperations.count(q, example.getProbeType(), entityInformation.getCollectionName()); + Optional readPreference = getReadPreference(); + return Flux.fromIterable(entities).map(entityInformation::getRequiredId).collectList() + .flatMap(ids -> deleteAllById(ids, readPreference)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Object) - */ @Override - public Mono insert(S entity) { + public Mono deleteAll(Publisher entityStream) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entityStream, "The given Publisher of entities must not be null"); - return mongoOperations.insert(entity, entityInformation.getCollectionName()); + Optional readPreference = getReadPreference(); + return Flux.from(entityStream)// + .map(entityInformation::getRequiredId)// + .concatMap(id -> deleteById(id, readPreference))// + .then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(java.lang.Iterable) - */ @Override - public Flux insert(Iterable entities) { + public Mono deleteAll() { + Query query = new Query(); + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.remove(query, entityInformation.getCollectionName()).then(Mono.empty()); + } - Assert.notNull(entities, "The given Iterable of entities must not be null!"); + // ------------------------------------------------------------------------- + // Methods from ReactiveSortingRepository + // ------------------------------------------------------------------------- - List source = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList()); + @Override + public Flux findAll(Sort sort) { + + Assert.notNull(sort, "Sort must not be null"); - return source.isEmpty() ? Flux.empty() : Flux.from(mongoOperations.insertAll(source)); + return findAll(new Query().with(sort)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.ReactiveMongoRepository#insert(org.reactivestreams.Publisher) - */ + // ------------------------------------------------------------------------- + // Methods from ReactiveMongoRepository + // ------------------------------------------------------------------------- + @Override - public Flux insert(Publisher entities) { + public Mono insert(S entity) { - Assert.notNull(entities, "The given Publisher of entities must not be null!"); + Assert.notNull(entity, "Entity must not be null"); - return Flux.from(entities).flatMap(entity -> mongoOperations.insert(entity, entityInformation.getCollectionName())); + return mongoOperations.insert(entity, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#save(java.lang.Object) - */ @Override - public Mono save(S entity) { + public Flux insert(Iterable entities) { - Assert.notNull(entity, "Entity must not be null!"); + Assert.notNull(entities, "The given Iterable of entities must not be null"); - if (entityInformation.isNew(entity)) { - return mongoOperations.insert(entity, entityInformation.getCollectionName()); - } + return insert(toCollection(entities)); + } - return mongoOperations.save(entity, entityInformation.getCollectionName()); + private Flux insert(Collection entities) { + return entities.isEmpty() ? Flux.empty() : mongoOperations.insert(entities, entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#saveAll(java.lang.Iterable) - */ @Override - public Flux saveAll(Iterable entities) { - - Assert.notNull(entities, "The given Iterable of entities must not be null!"); + public Flux insert(Publisher entities) { - Streamable source = Streamable.of(entities); + Assert.notNull(entities, "The given Publisher of entities must not be null"); - return source.stream().allMatch(it -> entityInformation.isNew(it)) ? // - mongoOperations.insertAll(source.stream().collect(Collectors.toList())) : // - Flux.fromIterable(entities).flatMap(this::save); + return Flux.from(entities).concatMap(this::insert); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#saveAll(org.reactivestreams.Publisher) - */ + // ------------------------------------------------------------------------- + // Methods from ReactiveMongoRepository + // ------------------------------------------------------------------------- + @Override - public Flux saveAll(Publisher entityStream) { + public Mono findOne(Example example) { - Assert.notNull(entityStream, "The given Publisher of entities must not be null!"); + Assert.notNull(example, "Sample must not be null"); - return Flux.from(entityStream) - .flatMap(entity -> entityInformation.isNew(entity) ? // - mongoOperations.insert(entity, entityInformation.getCollectionName()).then(Mono.just(entity)) : // - mongoOperations.save(entity, entityInformation.getCollectionName()).then(Mono.just(entity))); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()) // + .limit(2); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()).buffer(2) + .map(vals -> { + + if (vals.size() > 1) { + throw new IncorrectResultSizeDataAccessException(1); + } + return vals.iterator().next(); + }).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteById(java.lang.Object) - */ @Override - public Mono deleteById(ID id) { + public Flux findAll(Example example) { - Assert.notNull(id, "The given id must not be null!"); + Assert.notNull(example, "Example must not be null"); - return mongoOperations - .remove(getIdQuery(id), entityInformation.getJavaType(), entityInformation.getCollectionName()).then(); + return findAll(example, Sort.unsorted()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteById(org.reactivestreams.Publisher) - */ @Override - public Mono deleteById(Publisher publisher) { + public Flux findAll(Example example, Sort sort) { + + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(sort, "Sort must not be null"); - Assert.notNull(publisher, "Id must not be null!"); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()) // + .with(sort); + getReadPreference().ifPresent(query::withReadPreference); - return Mono.from(publisher).flatMap(id -> mongoOperations.remove(getIdQuery(id), entityInformation.getJavaType(), - entityInformation.getCollectionName())).then(); + return mongoOperations.find(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#delete(java.lang.Object) - */ @Override - public Mono delete(T entity) { + public Mono count(Example example) { + + Assert.notNull(example, "Sample must not be null"); - Assert.notNull(entity, "The given entity must not be null!"); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); - return deleteById(entityInformation.getRequiredId(entity)); + return mongoOperations.count(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteAll(java.lang.Iterable) - */ @Override - public Mono deleteAll(Iterable entities) { + public Mono exists(Example example) { - Assert.notNull(entities, "The given Iterable of entities must not be null!"); + Assert.notNull(example, "Sample must not be null"); - return Flux.fromIterable(entities).flatMap(entity -> deleteById(entityInformation.getRequiredId(entity))).then(); + Query query = new Query(new Criteria().alike(example)) // + .collation(entityInformation.getCollation()); + getReadPreference().ifPresent(query::withReadPreference); + + return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteAll(org.reactivestreams.Publisher) - */ @Override - public Mono deleteAll(Publisher entityStream) { + public > P findBy(Example example, + Function, P> queryFunction) { - Assert.notNull(entityStream, "The given Publisher of entities must not be null!"); + Assert.notNull(example, "Sample must not be null"); + Assert.notNull(queryFunction, "Query function must not be null"); - return Flux.from(entityStream)// - .map(entityInformation::getRequiredId)// - .flatMap(this::deleteById)// - .then(); + return queryFunction + .apply(new ReactiveFluentQueryByExample<>(example, example.getProbeType(), getReadPreference())); } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.reactive.ReactiveCrudRepository#deleteAll() + /** + * Configures a custom {@link CrudMethodMetadata} to be used to detect {@link ReadPreference}s and query hints to be + * applied to queries. + * + * @param crudMethodMetadata + * @since 4.2 */ - @Override - public Mono deleteAll() { - return mongoOperations.remove(new Query(), entityInformation.getCollectionName()).then(Mono.empty()); + void setRepositoryMethodMetadata(CrudMethodMetadata crudMethodMetadata) { + this.crudMethodMetadata = crudMethodMetadata; + } + + private Flux findAll(Query query) { + + getReadPreference().ifPresent(query::withReadPreference); + return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); + } + + private Optional getReadPreference() { + + if (crudMethodMetadata == null) { + return Optional.empty(); + } + + return crudMethodMetadata.getReadPreference(); } private Query getIdQuery(Object id) { @@ -402,8 +465,178 @@ private Criteria getIdCriteria(Object id) { return where(entityInformation.getIdAttribute()).is(id); } - private Flux findAll(Query query) { + private Query getIdQuery(Iterable ids) { + return new Query(where(entityInformation.getIdAttribute()).in(toCollection(ids))); + } + + /** + * Transform the elements emitted by this Flux into Publishers, then flatten these inner publishers into a single + * Flux. The operation does not allow interleaving between performing the map operation for the first and second + * source element guaranteeing the mapping operation completed before subscribing to its following inners, that will + * then be subscribed to eagerly emitting elements in order of their source. + * + *
          +	 * Flux.just(first-element).flatMap(...)
          +	 *     .concatWith(Flux.fromIterable(remaining-elements).flatMapSequential(...))
          +	 * 
          + * + * @param source the collection of elements to transform. + * @param mapper the transformation {@link Function}. Must not be {@literal null}. + * @return never {@literal null}. + * @param source type + */ + static Flux concatMapSequentially(List source, + Function> mapper) { + + return switch (source.size()) { + case 0 -> Flux.empty(); + case 1 -> Flux.just(source.get(0)).flatMap(mapper); + case 2 -> Flux.fromIterable(source).concatMap(mapper); + default -> { + + Flux first = Flux.just(source.get(0)).flatMap(mapper); + Flux theRest = Flux.fromIterable(source.subList(1, source.size())).flatMapSequential(mapper); + yield first.concatWith(theRest); + } + }; + } + + static Flux concatMapSequentially(Publisher publisher, + Function> mapper) { + + return Flux.from(publisher).switchOnFirst((signal, source) -> { + + if (!signal.hasValue()) { + return source.concatMap(mapper); + } + + Mono firstCall = Mono.from(mapper.apply(signal.get())); + return firstCall.concatWith(source.skip(1).flatMapSequential(mapper)); + }); + } + + private static List toList(Iterable source) { + + Collection collection = toCollection(source); + + if (collection instanceof List list) { + return list; + } + + return new ArrayList<>(collection); + } + + private static Collection toCollection(Iterable source) { + + if (source instanceof Collection collection) { + return collection; + } + + List list = new ArrayList<>(); + source.forEach(list::add); + + return list; + } + + /** + * {@link org.springframework.data.repository.query.FluentQuery.ReactiveFluentQuery} using {@link Example}. + * + * @author Mark Paluch + * @since 3.3 + */ + class ReactiveFluentQueryByExample extends ReactiveFluentQuerySupport, T> { + + private final Optional readPreference; + + ReactiveFluentQueryByExample(Example example, Class resultType, Optional readPreference) { + this(example, Sort.unsorted(), 0, resultType, Collections.emptyList(), readPreference); + } + + ReactiveFluentQueryByExample(Example example, Sort sort, int limit, Class resultType, + List fieldsToInclude, Optional readPreference) { + super(example, sort, limit, resultType, fieldsToInclude); + this.readPreference = readPreference; + } + + @Override + protected ReactiveFluentQueryByExample create(Example predicate, Sort sort, int limit, + Class resultType, List fieldsToInclude) { + return new ReactiveFluentQueryByExample<>(predicate, sort, limit, resultType, fieldsToInclude, readPreference); + } + + @Override + public Mono one() { + return createQuery().one(); + } + + @Override + public Mono first() { + return createQuery().first(); + } + + @Override + public Flux all() { + return createQuery().all(); + } + + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return createQuery().scroll(scrollPosition); + } + + @Override + public Mono> page(Pageable pageable) { + + Assert.notNull(pageable, "Pageable must not be null"); + + Mono> items = createQuery(q -> q.with(pageable)).all().collectList(); + + return items.flatMap(content -> ReactivePageableExecutionUtils.getPage(content, pageable, this.count())); + } + + @Override + public Mono> slice(Pageable pageable) { + + return createQuery(q -> SliceUtils.limitResult(q, pageable).with(pageable.getSort())).all().collectList() + .map(it -> SliceUtils.sliceResult(it, pageable)); + } + + @Override + public Mono count() { + return createQuery().count(); + } + + @Override + public Mono exists() { + return createQuery().exists(); + } + + private ReactiveFindOperation.TerminatingFind createQuery() { + return createQuery(UnaryOperator.identity()); + } + + private ReactiveFindOperation.TerminatingFind createQuery(UnaryOperator queryCustomizer) { + + Query query = new Query(new Criteria().alike(getPredicate())) // + .collation(entityInformation.getCollation()); + + if (getSort().isSorted()) { + query.with(getSort()); + } + + query.limit(getLimit()); + + if (!getFieldsToInclude().isEmpty()) { + query.fields().include(getFieldsToInclude()); + } + + readPreference.ifPresent(query::withReadPreference); + + query = queryCustomizer.apply(query); + + return mongoOperations.query(getPredicate().getProbeType()).inCollection(entityInformation.getCollectionName()) + .as(getResultType()).matching(query); + } - return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java index fa7185468d..0ef6c38744 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,53 @@ */ package org.springframework.data.mongodb.repository.support; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.function.Consumer; +import java.util.stream.Stream; + import org.bson.Document; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.util.SliceUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.lang.Nullable; -import com.google.common.base.Function; -import com.mongodb.BasicDBObject; -import com.mongodb.DBCollection; -import com.mongodb.DBObject; -import com.querydsl.mongodb.AbstractMongodbQuery; +import com.mysema.commons.lang.CloseableIterator; +import com.mysema.commons.lang.EmptyCloseableIterator; +import com.querydsl.core.Fetchable; +import com.querydsl.core.QueryMetadata; +import com.querydsl.core.QueryModifiers; +import com.querydsl.core.QueryResults; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; /** - * Spring Data specific {@link AbstractMongodbQuery} implementation. + * Spring Data specific simple {@link com.querydsl.core.Fetchable} {@link com.querydsl.core.SimpleQuery Query} + * implementation. * * @author Oliver Gierke * @author Mark Paluch + * @author Christoph Strobl */ -public class SpringDataMongodbQuery extends AbstractMongodbQuery> { +public class SpringDataMongodbQuery extends SpringDataMongodbQuerySupport> + implements Fetchable { - private final MongoOperations operations; + private final MongoOperations mongoOperations; + private final Consumer queryCustomizer; + private final ExecutableFindOperation.FindWithQuery find; /** * Creates a new {@link SpringDataMongodbQuery}. @@ -42,7 +69,7 @@ public class SpringDataMongodbQuery extends AbstractMongodbQuery type) { + public SpringDataMongodbQuery(MongoOperations operations, Class type) { this(operations, type, operations.getCollectionName(type)); } @@ -53,28 +80,212 @@ public SpringDataMongodbQuery(final MongoOperations operations, final Class type, - String collectionName) { + public SpringDataMongodbQuery(MongoOperations operations, Class type, String collectionName) { + this(operations, type, type, collectionName, it -> {}); + } - super(((MongoTemplate) operations).getMongoDbFactory().getLegacyDb().getCollection(collectionName), - new Function() { + /** + * Creates a new {@link SpringDataMongodbQuery}. + * + * @param operations must not be {@literal null}. + * @param domainType must not be {@literal null}. + * @param resultType must not be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @since 3.3 + */ + SpringDataMongodbQuery(MongoOperations operations, Class domainType, Class resultType, + String collectionName, Consumer queryCustomizer) { + super(new SpringDataMongodbSerializer(operations.getConverter())); - @Override - public T apply(@Nullable DBObject input) { - return operations.getConverter().read(type, new Document((BasicDBObject) input)); - } - }, new SpringDataMongodbSerializer(operations.getConverter())); + Class resultType1 = (Class) resultType; + this.mongoOperations = operations; + this.queryCustomizer = queryCustomizer; + this.find = mongoOperations.query(domainType).inCollection(collectionName).as(resultType1); + } - this.operations = operations; + @Override + public CloseableIterator iterate() { + + try { + Stream stream = stream(); + Iterator iterator = stream.iterator(); + + return new CloseableIterator() { + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public T next() { + return iterator.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("Cannot remove from iterator while streaming data."); + } + + @Override + public void close() { + stream.close(); + } + }; + } catch (RuntimeException e) { + return handleException(e, new EmptyCloseableIterator<>()); + } } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.AbstractMongodbQuery#getCollection(java.lang.Class) + public Window scroll(ScrollPosition scrollPosition) { + + try { + return find.matching(createQuery()).scroll(scrollPosition); + } catch (RuntimeException e) { + return handleException(e, Window.from(Collections.emptyList(), value -> { + throw new UnsupportedOperationException(); + })); + } + } + + @Override + public Stream stream() { + + try { + return find.matching(createQuery()).stream(); + } catch (RuntimeException e) { + return handleException(e, Stream.empty()); + } + } + + @Override + public List fetch() { + try { + return find.matching(createQuery()).all(); + } catch (RuntimeException e) { + return handleException(e, Collections.emptyList()); + } + } + + /** + * Fetch a {@link Page}. + * + * @param pageable + * @return + */ + public Page fetchPage(Pageable pageable) { + + try { + + List content = find.matching(createQuery().with(pageable)).all(); + + return PageableExecutionUtils.getPage(content, pageable, this::fetchCount); + } catch (RuntimeException e) { + return handleException(e, new PageImpl<>(Collections.emptyList(), pageable, 0)); + } + } + + /** + * Fetch a {@link Slice}. + * + * @param pageable defines range and sort of requested slice + * @return new instance of {@link Slice} containing matching results within range. + * @since 4.5 */ + public Slice fetchSlice(Pageable pageable) { + + List content = find.matching(SliceUtils.limitResult(createQuery(), pageable).with(pageable.getSort())).all(); + + return SliceUtils.sliceResult(content, pageable); + } + + @Override + public T fetchFirst() { + try { + return find.matching(createQuery()).firstValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + @Override + public T fetchOne() { + try { + return find.matching(createQuery()).oneValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + @Override + public QueryResults fetchResults() { + + long total = fetchCount(); + return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total) + : QueryResults.emptyResults(); + } + @Override - protected DBCollection getCollection(Class type) { - return ((MongoTemplate) operations).getMongoDbFactory().getLegacyDb() - .getCollection(operations.getCollectionName(type)); + public long fetchCount() { + try { + return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count(); + } catch (RuntimeException e) { + return handleException(e, 0L); + } + } + + protected org.springframework.data.mongodb.core.query.Query createQuery() { + + QueryMetadata metadata = getQueryMixin().getMetadata(); + + return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + metadata.getOrderBy()); } + + protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter, + @Nullable Expression projection, QueryModifiers modifiers, List> orderBy) { + + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(createQuery(filter), fields == null ? new Document() : fields); + + Integer limit = modifiers.getLimitAsInteger(); + Integer offset = modifiers.getOffsetAsInteger(); + + if (limit != null) { + basicQuery.limit(limit); + } + if (offset != null) { + basicQuery.skip(offset); + } + if (orderBy.size() > 0) { + basicQuery.setSortObject(createSort(orderBy)); + } + + queryCustomizer.accept(basicQuery); + + return basicQuery; + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected List getIds(Class targetType, Predicate condition) { + + Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList()); + return mongoOperations.findDistinct(query, FieldName.ID.name(), targetType, Object.class); + } + + private static T handleException(RuntimeException e, T defaultValue) { + + if (e.getClass().getName().endsWith("$NoResults")) { + return defaultValue; + } + + throw e; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java new file mode 100644 index 0000000000..a64f666f3f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java @@ -0,0 +1,139 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.json.JsonMode; +import org.bson.json.JsonWriterSettings; + +import com.mongodb.MongoClientSettings; +import com.querydsl.core.support.QueryMixin; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.mongodb.document.AbstractMongodbQuery; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; + +/** + * Support query type to augment Spring Data-specific {@link #toString} representations and + * {@link org.springframework.data.domain.Sort} creation. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class SpringDataMongodbQuerySupport> + extends AbstractMongodbQuery { + + private final QueryMixin superQueryMixin; + + private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL) + .build(); + + private final MongodbDocumentSerializer serializer; + + @SuppressWarnings("unchecked") + SpringDataMongodbQuerySupport(MongodbDocumentSerializer serializer) { + + super(serializer); + this.serializer = serializer; + this.superQueryMixin = super.getQueryMixin(); + } + + /** + * Returns the {@literal Mongo Shell} representation of the query.
          + * The following query + * + *
          +	 *
          +	 * where(p.lastname.eq("Matthews")).orderBy(p.firstname.asc()).offset(1).limit(5);
          +	 * 
          + * + * results in + * + *
          +	 *
          +	 * find({"lastname" : "Matthews"}).sort({"firstname" : 1}).skip(1).limit(5)
          +	 * 
          + * + * Note that encoding to {@link String} may fail when using data types that cannot be encoded or DBRef's without an + * identifier. + * + * @return never {@literal null}. + */ + @Override + public String toString() { + + Document projection = createProjection(getQueryMixin().getMetadata().getProjection()); + Document sort = createSort(getQueryMixin().getMetadata().getOrderBy()); + DocumentCodec codec = new DocumentCodec(MongoClientSettings.getDefaultCodecRegistry()); + + StringBuilder sb = new StringBuilder("find(" + asDocument().toJson(JSON_WRITER_SETTINGS, codec)); + if (projection != null && projection.isEmpty()) { + sb.append(", ").append(projection.toJson(JSON_WRITER_SETTINGS, codec)); + } + sb.append(")"); + if (!sort.isEmpty()) { + sb.append(".sort(").append(sort.toJson(JSON_WRITER_SETTINGS, codec)).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getOffset() != null) { + sb.append(".skip(").append(getQueryMixin().getMetadata().getModifiers().getOffset()).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getLimit() != null) { + sb.append(".limit(").append(getQueryMixin().getMetadata().getModifiers().getLimit()).append(")"); + } + return sb.toString(); + } + + /** + * Get the where definition as a Document instance + * + * @return + */ + public Document asDocument() { + return createQuery(getQueryMixin().getMetadata().getWhere()); + } + + /** + * Obtain the {@literal Mongo Shell} json query representation. + * + * @return never {@literal null}. + */ + public String toJson() { + return toJson(JSON_WRITER_SETTINGS); + } + + /** + * Obtain the json query representation applying given {@link JsonWriterSettings settings}. + * + * @param settings must not be {@literal null}. + * @return never {@literal null}. + */ + public String toJson(JsonWriterSettings settings) { + return asDocument().toJson(settings); + } + + /** + * Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}. + * + * @param orderSpecifiers can be {@literal null}. + * @return an empty {@link Document} if predicate is {@literal null}. + * @see MongodbDocumentSerializer#toSort(List) + */ + protected Document createSort(List> orderSpecifiers) { + return serializer.toSort(orderSpecifiers); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java index 83f850fe55..d9a550a0f7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.repository.support; -import java.util.Collections; -import java.util.HashSet; import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; @@ -25,16 +23,15 @@ import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; import com.mongodb.DBRef; -import com.mongodb.util.JSON; import com.querydsl.core.types.Constant; import com.querydsl.core.types.Expression; import com.querydsl.core.types.Operation; @@ -42,6 +39,7 @@ import com.querydsl.core.types.PathMetadata; import com.querydsl.core.types.PathType; import com.querydsl.mongodb.MongodbSerializer; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints. @@ -50,42 +48,29 @@ * @author Christoph Strobl * @author Mark Paluch */ -class SpringDataMongodbSerializer extends MongodbSerializer { +class SpringDataMongodbSerializer extends MongodbDocumentSerializer { - private static final String ID_KEY = "_id"; - private static final Set PATH_TYPES; - - static { - - Set pathTypes = new HashSet(); - pathTypes.add(PathType.VARIABLE); - pathTypes.add(PathType.PROPERTY); - - PATH_TYPES = Collections.unmodifiableSet(pathTypes); - } + private static final String ID_KEY = FieldName.ID.name(); + private static final Set PATH_TYPES = Set.of(PathType.VARIABLE, PathType.PROPERTY); private final MongoConverter converter; private final MappingContext, MongoPersistentProperty> mappingContext; private final QueryMapper mapper; /** - * Creates a new {@link SpringDataMongodbSerializer} for the given {@link MappingContext}. + * Creates a new {@link SpringDataMongodbSerializer} for the given {@link MongoConverter}. * - * @param mappingContext must not be {@literal null}. + * @param converter must not be {@literal null}. */ public SpringDataMongodbSerializer(MongoConverter converter) { - Assert.notNull(converter, "MongoConverter must not be null!"); + Assert.notNull(converter, "MongoConverter must not be null"); this.mappingContext = converter.getMappingContext(); this.converter = converter; this.mapper = new QueryMapper(converter); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#visit(com.querydsl.core.types.Constant, java.lang.Void) - */ @Override public Object visit(Constant expr, Void context) { @@ -96,10 +81,6 @@ public Object visit(Constant expr, Void context) { return converter.convertToMongoType(expr.getConstant()); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#getKeyForPath(com.querydsl.core.types.Path, com.querydsl.core.types.PathMetadata) - */ @Override protected String getKeyForPath(Path expr, PathMetadata metadata) { @@ -114,88 +95,103 @@ protected String getKeyForPath(Path expr, PathMetadata metadata) { return property == null ? super.getKeyForPath(expr, metadata) : property.getFieldName(); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#asDocument(java.lang.String, java.lang.Object) - */ @Override - protected DBObject asDBObject(@Nullable String key, @Nullable Object value) { + protected Document asDocument(@Nullable String key, @Nullable Object value) { - value = value instanceof Optional ? ((Optional) value).orElse(null) : value; + value = value instanceof Optional optional ? optional.orElse(null) : value; - if (ID_KEY.equals(key)) { - DBObject superIdValue = super.asDBObject(key, value); - Document mappedIdValue = mapper.getMappedObject((BasicDBObject) superIdValue, Optional.empty()); - return (DBObject) JSON.parse(mappedIdValue.toJson()); - } - return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value)); + return super.asDocument(key, value instanceof Pattern ? value : converter.convertToMongoType(value)); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#isReference(com.querydsl.core.types.Path) - */ @Override protected boolean isReference(@Nullable Path path) { MongoPersistentProperty property = getPropertyForPotentialDbRef(path); - return property == null ? false : property.isAssociation(); + return property != null && property.isAssociation(); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#asReference(java.lang.Object) - */ @Override protected DBRef asReference(@Nullable Object constant) { return asReference(constant, null); } - protected DBRef asReference(Object constant, Path path) { + protected DBRef asReference(Object constant, @Nullable Path path) { return converter.toDBRef(constant, getPropertyForPotentialDbRef(path)); } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#asDBKey(com.querydsl.core.types.Operation, int) - */ @Override protected String asDBKey(@Nullable Operation expr, int index) { Expression arg = expr.getArg(index); String key = super.asDBKey(expr, index); - if (!(arg instanceof Path)) { + if (!(arg instanceof Path path)) { return key; } - Path path = (Path) arg; - if (!isReference(path)) { return key; } MongoPersistentProperty property = getPropertyFor(path); - return property.isIdProperty() ? key.replaceAll("." + ID_KEY + "$", "") : key; + return property != null && property.getOwner().isIdProperty(property) ? key.replaceAll("." + ID_KEY + "$", "") + : key; } - /* - * (non-Javadoc) - * @see com.querydsl.mongodb.MongodbSerializer#convert(com.querydsl.core.types.Path, com.querydsl.core.types.Constant) - */ + @Override + protected boolean isId(Path arg) { + MongoPersistentProperty propertyFor = getPropertyFor(arg); + return propertyFor == null ? super.isId(arg) : propertyFor.getOwner().isIdProperty(propertyFor); + } + + @Override + @Nullable protected Object convert(@Nullable Path path, @Nullable Constant constant) { + if (constant == null) { + return null; + } + if (!isReference(path)) { - return super.convert(path, constant); + + MongoPersistentProperty property = getPropertyFor(path); + if (property == null) { + return super.convert(path, constant); + } + + if (property.getOwner().isIdProperty(property)) { + return mapper.convertId(constant.getConstant(), property.getFieldType()); + } + + if (property.hasExplicitWriteTarget()) { + return converter.convertToMongoType(constant.getConstant(), TypeInformation.of(property.getFieldType())); + } + + return converter.convertToMongoType(constant.getConstant()); } MongoPersistentProperty property = getPropertyFor(path); - return property.isIdProperty() ? asReference(constant.getConstant(), path.getMetadata().getParent()) - : asReference(constant.getConstant(), path); + if (property != null) { + if (property.isDocumentReference()) { + return converter.toDocumentPointer(constant.getConstant(), property).getPointer(); + } + + if (property.getOwner().isIdProperty(property)) { + + MongoPersistentProperty propertyForPotentialDbRef = getPropertyForPotentialDbRef(path); + if (propertyForPotentialDbRef != null && propertyForPotentialDbRef.isDocumentReference()) { + return converter.toDocumentPointer(constant.getConstant(), propertyForPotentialDbRef).getPointer(); + } + return asReference(constant.getConstant(), path.getMetadata().getParent()); + } + } + + return asReference(constant.getConstant(), path); } + @Nullable private MongoPersistentProperty getPropertyFor(Path path) { Path parent = path.getMetadata().getParent(); @@ -205,7 +201,7 @@ private MongoPersistentProperty getPropertyFor(Path path) { } MongoPersistentEntity entity = mappingContext.getPersistentEntity(parent.getType()); - return entity != null ? entity.getRequiredPersistentProperty(path.getMetadata().getName()) : null; + return entity != null ? entity.getPersistentProperty(path.getMetadata().getName()) : null; } /** @@ -216,7 +212,8 @@ private MongoPersistentProperty getPropertyFor(Path path) { * @param path * @return */ - private MongoPersistentProperty getPropertyForPotentialDbRef(Path path) { + @Nullable + private MongoPersistentProperty getPropertyForPotentialDbRef(@Nullable Path path) { if (path == null) { return null; @@ -225,7 +222,8 @@ private MongoPersistentProperty getPropertyForPotentialDbRef(Path path) { MongoPersistentProperty property = getPropertyFor(path); PathMetadata metadata = path.getMetadata(); - if (property != null && property.isIdProperty() && metadata != null && metadata.getParent() != null) { + if (property != null && property.getOwner().isIdProperty(property) && metadata != null + && metadata.getParent() != null) { return getPropertyFor(metadata.getParent()); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/util/SliceUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/util/SliceUtils.java new file mode 100644 index 0000000000..b570687cb5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/util/SliceUtils.java @@ -0,0 +1,74 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.util; + +import java.util.List; + +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Utility methods for {@link Slice} handling. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.5 + */ +public class SliceUtils { + + /** + * Creates a {@link Slice} given {@link Pageable} and {@link List} of results. + * + * @param the element type. + * @param resultList the source list holding the result of the request. If the result list contains more elements + * (indicating a next slice is available) it is trimmed to the {@link Pageable#getPageSize() page size}. + * @param pageable the source pageable. + * @return new instance of {@link Slice}. + */ + public static Slice sliceResult(List resultList, Pageable pageable) { + + boolean hasNext = resultList.size() > pageable.getPageSize(); + + if (hasNext) { + resultList = resultList.subList(0, pageable.getPageSize()); + } + + return new SliceImpl<>(resultList, pageable, hasNext); + } + + /** + * Customize query for {@link #sliceResult sliced result} retrieval. If {@link Pageable#isPaged() paged} the + * {@link Query#limit(int) limit} is set to {@code pagesize + 1} in order to determine if more data is available. + * + * @param query the source query + * @param pageable paging to apply. + * @return new instance of {@link Query} if either {@link Pageable#isPaged() paged}, the source query otherwise. + */ + public static Query limitResult(Query query, Pageable pageable) { + + if (pageable.isUnpaged()) { + return query; + } + + Query target = Query.of(query); + target.skip(pageable.getOffset()); + target.limit(pageable.getPageSize() + 1); + + return target; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index 95ab969c11..cbbd4a37a9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,52 +15,271 @@ */ package org.springframework.data.mongodb.util; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.Map.Entry; +import java.util.StringJoiner; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.StreamSupport; -import org.bson.BsonValue; -import org.bson.Document; +import org.bson.*; +import org.bson.codecs.Codec; +import org.bson.codecs.DocumentCodec; +import org.bson.codecs.EncoderContext; +import org.bson.codecs.configuration.CodecConfigurationException; +import org.bson.codecs.configuration.CodecRegistry; import org.bson.conversions.Bson; +import org.bson.json.JsonParseException; +import org.bson.types.Binary; +import org.bson.types.Decimal128; +import org.bson.types.ObjectId; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; import com.mongodb.BasicDBObject; import com.mongodb.DBObject; import com.mongodb.DBRef; +import com.mongodb.MongoClientSettings; /** + * Internal API for operations on {@link Bson} elements that can be either {@link Document} or {@link DBObject}. + * * @author Christoph Strobl * @author Mark Paluch * @since 2.0 */ public class BsonUtils { + /** + * The empty document (immutable). This document is serializable. + * + * @since 3.2.5 + */ + public static final Document EMPTY_DOCUMENT = new EmptyDocument(); + @SuppressWarnings("unchecked") @Nullable public static T get(Bson bson, String key) { return (T) asMap(bson).get(key); } + /** + * Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted + * version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + */ public static Map asMap(Bson bson) { - if (bson instanceof Document) { - return (Document) bson; + return asMap(bson, MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted + * version of {@code bson} or a converted (detached from the original value) using the given {@link CodecRegistry} to + * obtain {@link org.bson.codecs.Codec codecs} that might be required for conversion. + * + * @param bson can be {@literal null}. + * @param codecRegistry must not be {@literal null}. + * @return never {@literal null}. Returns an empty {@link Map} if input {@link Bson} is {@literal null}. + * @since 4.0 + */ + public static Map asMap(@Nullable Bson bson, CodecRegistry codecRegistry) { + + if (bson == null) { + return Collections.emptyMap(); + } + + if (bson instanceof Document document) { + return document; + } + if (bson instanceof BasicDBObject dbo) { + return dbo; } - if (bson instanceof BasicDBObject) { - return ((BasicDBObject) bson); + if (bson instanceof DBObject dbo) { + return dbo.toMap(); } - throw new IllegalArgumentException("o_O what's that? Cannot read values from " + bson.getClass()); + + return new Document(bson.toBsonDocument(Document.class, codecRegistry)); + } + + /** + * Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a + * casted version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + * @since 3.2.5 + */ + public static Document asDocument(Bson bson) { + return asDocument(bson, MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a + * casted version of {@code bson} or a converted (detached from the original value) using the given + * {@link CodecRegistry} to obtain {@link org.bson.codecs.Codec codecs} that might be required for conversion. + * + * @param bson + * @param codecRegistry must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + public static Document asDocument(Bson bson, CodecRegistry codecRegistry) { + + Map map = asMap(bson, codecRegistry); + + if (map instanceof Document document) { + return document; + } + + return new Document(map); + } + + /** + * Return the {@link Bson} object as mutable {@link Document} containing all entries from {@link Bson}. + * + * @param bson + * @return a mutable {@link Document} containing all entries from {@link Bson}. + * @since 3.2.5 + */ + public static Document asMutableDocument(Bson bson) { + + if (bson instanceof EmptyDocument) { + bson = new Document(asDocument(bson)); + } + + if (bson instanceof Document document) { + return document; + } + + Map map = asMap(bson); + + if (map instanceof Document document) { + return document; + } + + return new Document(map); } public static void addToMap(Bson bson, String key, @Nullable Object value) { - if (bson instanceof Document) { - ((Document) bson).put(key, value); + if (bson instanceof Document document) { + + document.put(key, value); + return; + } + if (bson instanceof BSONObject bsonObject) { + + bsonObject.put(key, value); + return; + } + + throw new IllegalArgumentException(String.format( + "Cannot add key/value pair to %s; as map given Bson must be a Document or BSONObject", bson.getClass())); + } + + /** + * Add all entries from the given {@literal source} {@link Map} to the {@literal target}. + * + * @param target must not be {@literal null}. + * @param source must not be {@literal null}. + * @since 3.2 + */ + public static void addAllToMap(Bson target, Map source) { + + if (target instanceof Document document) { + + document.putAll(source); + return; + } + + if (target instanceof BSONObject bsonObject) { + + bsonObject.putAll(source); return; } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); + + throw new IllegalArgumentException( + String.format("Cannot add all to %s; Given Bson must be a Document or BSONObject.", target.getClass())); + } + + /** + * Check if a given entry (key/value pair) is present in the given {@link Bson}. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @param value can be {@literal null}. + * @return {@literal true} if (key/value pair) is present. + * @since 3.2 + */ + public static boolean contains(Bson bson, String key, @Nullable Object value) { + + if (bson instanceof Document document) { + return document.containsKey(key) && ObjectUtils.nullSafeEquals(document.get(key), value); + } + if (bson instanceof BSONObject bsonObject) { + return bsonObject.containsField(key) && ObjectUtils.nullSafeEquals(bsonObject.get(key), value); + } + + Map map = asMap(bson); + return map.containsKey(key) && ObjectUtils.nullSafeEquals(map.get(key), value); + } + + /** + * Remove {@code _id : null} from the given {@link Bson} if present. + * + * @param bson must not be {@literal null}. + * @since 3.2 + */ + public static boolean removeNullId(Bson bson) { + + if (!contains(bson, FieldName.ID.name(), null)) { + return false; + } + + removeFrom(bson, FieldName.ID.name()); + return true; + } + + /** + * Remove the given {@literal key} from the {@link Bson} value. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @since 3.2 + */ + static void removeFrom(Bson bson, String key) { + + if (bson instanceof Document document) { + + document.remove(key); + return; + } + + if (bson instanceof BSONObject bsonObject) { + + bsonObject.removeField(key); return; } - throw new IllegalArgumentException("o_O what's that? Cannot add value to " + bson.getClass()); + + throw new IllegalArgumentException( + String.format("Cannot remove from %s given Bson must be a Document or BSONObject.", bson.getClass())); } /** @@ -73,35 +292,669 @@ public static void addToMap(Bson bson, String key, @Nullable Object value) { */ public static Object toJavaType(BsonValue value) { - switch (value.getBsonType()) { - case INT32: - return value.asInt32().getValue(); - case INT64: - return value.asInt64().getValue(); - case STRING: - return value.asString().getValue(); - case DECIMAL128: - return value.asDecimal128().doubleValue(); - case DOUBLE: - return value.asDouble().getValue(); - case BOOLEAN: - return value.asBoolean().getValue(); - case OBJECT_ID: - return value.asObjectId().getValue(); - case DB_POINTER: - return new DBRef(value.asDBPointer().getNamespace(), value.asDBPointer().getId()); - case BINARY: - return value.asBinary().getData(); - case DATE_TIME: - return new Date(value.asDateTime().getValue()); - case SYMBOL: - return value.asSymbol().getSymbol(); - case ARRAY: - return value.asArray().toArray(); - case DOCUMENT: - return Document.parse(value.asDocument().toJson()); - default: - return value; + return switch (value.getBsonType()) { + case INT32 -> value.asInt32().getValue(); + case INT64 -> value.asInt64().getValue(); + case STRING -> value.asString().getValue(); + case DECIMAL128 -> value.asDecimal128().doubleValue(); + case DOUBLE -> value.asDouble().getValue(); + case BOOLEAN -> value.asBoolean().getValue(); + case OBJECT_ID -> value.asObjectId().getValue(); + case DB_POINTER -> new DBRef(value.asDBPointer().getNamespace(), value.asDBPointer().getId()); + case BINARY -> { + + BsonBinary binary = value.asBinary(); + if(binary.getType() != BsonBinarySubType.VECTOR.getValue()) { + yield binary.getData(); + } + yield value.asBinary().asVector(); + } + case DATE_TIME -> new Date(value.asDateTime().getValue()); + case SYMBOL -> value.asSymbol().getSymbol(); + case ARRAY -> value.asArray().toArray(); + case DOCUMENT -> Document.parse(value.asDocument().toJson()); + + default -> value; + }; + } + + /** + * Convert a given simple value (eg. {@link String}, {@link Long}) to its corresponding {@link BsonValue}. + * + * @param source must not be {@literal null}. + * @return the corresponding {@link BsonValue} representation. + * @throws IllegalArgumentException if {@literal source} does not correspond to a {@link BsonValue} type. + * @since 3.0 + */ + public static BsonValue simpleToBsonValue(Object source) { + return simpleToBsonValue(source, MongoClientSettings.getDefaultCodecRegistry()); + } + + /** + * Convert a given simple value (eg. {@link String}, {@link Long}) to its corresponding {@link BsonValue}. + * + * @param source must not be {@literal null}. + * @param codecRegistry The {@link CodecRegistry} used as a fallback to convert types using native {@link Codec}. Must + * not be {@literal null}. + * @return the corresponding {@link BsonValue} representation. + * @throws IllegalArgumentException if {@literal source} does not correspond to a {@link BsonValue} type. + * @since 4.2 + */ + @SuppressWarnings("unchecked") + public static BsonValue simpleToBsonValue(Object source, CodecRegistry codecRegistry) { + + if (source instanceof BsonValue bsonValue) { + return bsonValue; + } + + if (source instanceof ObjectId objectId) { + return new BsonObjectId(objectId); + } + + if (source instanceof String stringValue) { + return new BsonString(stringValue); + } + + if (source instanceof Double doubleValue) { + return new BsonDouble(doubleValue); + } + + if (source instanceof Integer integerValue) { + return new BsonInt32(integerValue); + } + + if (source instanceof Long longValue) { + return new BsonInt64(longValue); + } + + if (source instanceof byte[] byteArray) { + return new BsonBinary(byteArray); + } + + if (source instanceof Boolean booleanValue) { + return new BsonBoolean(booleanValue); + } + + if (source instanceof Float floatValue) { + return new BsonDouble(floatValue); + } + + if (source instanceof Binary binary) { + return new BsonBinary(binary.getType(), binary.getData()); + } + + if (source instanceof Date date) { + new BsonDateTime(date.getTime()); + } + + try { + + Object value = source; + if (ClassUtils.isPrimitiveArray(source.getClass())) { + value = CollectionUtils.arrayToList(source); + } + + Codec codec = codecRegistry.get(value.getClass()); + BsonCapturingWriter writer = new BsonCapturingWriter(value.getClass()); + codec.encode(writer, value, + ObjectUtils.isArray(value) || value instanceof Collection ? EncoderContext.builder().build() : null); + return writer.getCapturedValue(); + } catch (CodecConfigurationException e) { + throw new IllegalArgumentException( + String.format("Unable to convert %s to BsonValue.", source != null ? source.getClass().getName() : "null")); + } + } + + /** + * Merge the given {@link Document documents} into on in the given order. Keys contained within multiple documents are + * overwritten by their follow-ups. + * + * @param documents must not be {@literal null}. Can be empty. + * @return the document containing all key value pairs. + * @since 2.2 + */ + public static Document merge(Document... documents) { + + if (ObjectUtils.isEmpty(documents)) { + return new Document(); + } + + if (documents.length == 1) { + return documents[0]; + } + + Document target = new Document(); + Arrays.asList(documents).forEach(target::putAll); + return target; + } + + /** + * @param source + * @param orElse + * @return + * @since 2.2 + */ + public static Document toDocumentOrElse(String source, Function orElse) { + + if (source.stripLeading().startsWith("{")) { + return Document.parse(source); + } + + return orElse.apply(source); + } + + /** + * Serialize the given {@link Document} as Json applying default codecs if necessary. + * + * @param source + * @return + * @since 2.2.1 + */ + @Nullable + public static String toJson(@Nullable Document source) { + + if (source == null) { + return null; + } + + try { + return source.toJson(); + } catch (Exception e) { + return toJson((Object) source); + } + } + + /** + * Check if a given String looks like {@link Document#parse(String) parsable} json. + * + * @param value can be {@literal null}. + * @return {@literal true} if the given value looks like a json document. + * @since 3.0 + */ + public static boolean isJsonDocument(@Nullable String value) { + + if (!StringUtils.hasText(value)) { + return false; + } + + String potentialJson = value.trim(); + return potentialJson.startsWith("{") && potentialJson.endsWith("}"); + } + + /** + * Check if a given String looks like {@link org.bson.BsonArray#parse(String) parsable} json array. + * + * @param value can be {@literal null}. + * @return {@literal true} if the given value looks like a json array. + * @since 3.0 + */ + public static boolean isJsonArray(@Nullable String value) { + return StringUtils.hasText(value) && (value.startsWith("[") && value.endsWith("]")); + } + + /** + * Parse the given {@literal json} to {@link Document} applying transformations as specified by a potentially given + * {@link org.bson.codecs.Codec}. + * + * @param json must not be {@literal null}. + * @param codecRegistryProvider can be {@literal null}. In that case the default {@link DocumentCodec} is used. + * @return never {@literal null}. + * @throws IllegalArgumentException if the required argument is {@literal null}. + * @since 3.0 + */ + public static Document parse(String json, @Nullable CodecRegistryProvider codecRegistryProvider) { + + Assert.notNull(json, "Json must not be null"); + + if (codecRegistryProvider == null) { + return Document.parse(json); + } + + return Document.parse(json, codecRegistryProvider.getCodecFor(Document.class) + .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); + } + + /** + * Resolve the value for a given key. If the given {@link Bson} value contains the key the value is immediately + * returned. If not and the key contains a path using the dot ({@code .}) notation it will try to resolve the path by + * inspecting the individual parts. If one of the intermediate ones is {@literal null} or cannot be inspected further + * (wrong) type, {@literal null} is returned. + * + * @param bson the source to inspect. Must not be {@literal null}. + * @param key the key to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 3.0.8 + */ + @Nullable + public static Object resolveValue(Bson bson, String key) { + return resolveValue(asMap(bson), key); + } + + /** + * Resolve the value for a given {@link FieldName field name}. If the given name is a {@link Type#KEY} the value is + * obtained from the target {@link Bson} immediately. If the given fieldName is a {@link Type#PATH} maybe using the + * dot ({@code .}) notation it will try to resolve the path by inspecting the individual parts. If one of the + * intermediate ones is {@literal null} or cannot be inspected further (wrong) type, {@literal null} is returned. + * + * @param bson the source to inspect. Must not be {@literal null}. + * @param fieldName the name to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 4.2 + */ + public static Object resolveValue(Bson bson, FieldName fieldName) { + return resolveValue(asMap(bson), fieldName); + } + + /** + * Resolve the value for a given {@link FieldName field name}. If the given name is a {@link Type#KEY} the value is + * obtained from the target {@link Bson} immediately. If the given fieldName is a {@link Type#PATH} maybe using the + * dot ({@code .}) notation it will try to resolve the path by inspecting the individual parts. If one of the + * intermediate ones is {@literal null} or cannot be inspected further (wrong) type, {@literal null} is returned. + * + * @param source the source to inspect. Must not be {@literal null}. + * @param fieldName the key to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 4.2 + */ + @Nullable + public static Object resolveValue(Map source, FieldName fieldName) { + + if (fieldName.isKey()) { + return source.get(fieldName.name()); + } + + String[] parts = fieldName.parts(); + + for (int i = 1; i < parts.length; i++) { + + Object result = source.get(parts[i - 1]); + + if (!(result instanceof Bson resultBson)) { + return null; + } + + source = asMap(resultBson); + } + + return source.get(parts[parts.length - 1]); + } + + /** + * Resolve the value for a given key. If the given {@link Map} value contains the key the value is immediately + * returned. If not and the key contains a path using the dot ({@code .}) notation it will try to resolve the path by + * inspecting the individual parts. If one of the intermediate ones is {@literal null} or cannot be inspected further + * (wrong) type, {@literal null} is returned. + * + * @param source the source to inspect. Must not be {@literal null}. + * @param key the key to lookup. Must not be {@literal null}. + * @return can be {@literal null}. + * @since 4.1 + */ + @Nullable + public static Object resolveValue(Map source, String key) { + + if (source.containsKey(key)) { + return source.get(key); + } + + return resolveValue(source, FieldName.path(key)); + } + + public static boolean hasValue(Bson bson, FieldName fieldName) { + + Map source = asMap(bson); + if (fieldName.isKey()) { + return source.containsKey(fieldName.name()); + } + + String[] parts = fieldName.parts(); + Object result; + + for (int i = 1; i < parts.length; i++) { + + result = source.get(parts[i - 1]); + source = getAsMap(result); + + if (source == null) { + return false; + } + } + + return source.containsKey(parts[parts.length - 1]); + + } + + /** + * Returns whether the underlying {@link Bson bson} has a value ({@literal null} or non-{@literal null}) for the given + * {@code key}. + * + * @param bson the source to inspect. Must not be {@literal null}. + * @param key the key to lookup. Must not be {@literal null}. + * @return {@literal true} if no non {@literal null} value present. + * @since 3.0.8 + */ + public static boolean hasValue(Bson bson, String key) { + return hasValue(bson, FieldName.path(key)); + } + + /** + * Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise. + * + * @param source can be {@literal null}. + * @return can be {@literal null}. + */ + @Nullable + @SuppressWarnings("unchecked") + private static Map getAsMap(Object source) { + + if (source instanceof Document document) { + return document; + } + + if (source instanceof BasicDBObject basicDBObject) { + return basicDBObject; + } + + if (source instanceof DBObject dbObject) { + return dbObject.toMap(); + } + + if (source instanceof Map) { + return (Map) source; + } + + return null; + } + + /** + * Returns the given source object as {@link Bson}, i.e. {@link Document}s and maps as is or throw + * {@link IllegalArgumentException}. + * + * @param source + * @return the converted/casted source object. + * @throws IllegalArgumentException if {@code source} cannot be converted/cast to {@link Bson}. + * @since 3.2.3 + * @see #supportsBson(Object) + */ + @SuppressWarnings("unchecked") + public static Bson asBson(Object source) { + + if (source instanceof Document document) { + return document; + } + + if (source instanceof BasicDBObject basicDBObject) { + return basicDBObject; + } + + if (source instanceof DBObject dbObject) { + return new Document(dbObject.toMap()); + } + + if (source instanceof Map) { + return new Document((Map) source); + } + + throw new IllegalArgumentException(String.format("Cannot convert %s to Bson", source)); + } + + /** + * Returns the given source can be used/converted as {@link Bson}. + * + * @param source + * @return {@literal true} if the given source can be converted to {@link Bson}. + * @since 3.2.3 + */ + public static boolean supportsBson(Object source) { + return source instanceof DBObject || source instanceof Map; + } + + /** + * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a + * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element + * collection for everything else. + * + * @param source must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + public static Collection asCollection(Object source) { + + if (source instanceof Collection collection) { + return collection; + } + + return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + } + + public static Document mapValues(Document source, BiFunction valueMapper) { + return mapEntries(source, Entry::getKey, entry -> valueMapper.apply(entry.getKey(), entry.getValue())); + } + + public static Document mapEntries(Document source, Function, String> keyMapper, + Function, Object> valueMapper) { + + if (source.isEmpty()) { + return source; + } + + Map target = new LinkedHashMap<>(source.size(), 1f); + for (Entry entry : source.entrySet()) { + target.put(keyMapper.apply(entry), valueMapper.apply(entry)); + } + return new Document(target); + } + + @Nullable + private static String toJson(@Nullable Object value) { + + if (value == null) { + return null; + } + + try { + return value instanceof Document document + ? document.toJson(MongoClientSettings.getDefaultCodecRegistry().get(Document.class)) + : serializeValue(value); + + } catch (Exception e) { + + if (value instanceof Collection collection) { + return toString(collection); + } else if (value instanceof Map map) { + return toString(map); + } else if (ObjectUtils.isArray(value)) { + return toString(Arrays.asList(ObjectUtils.toObjectArray(value))); + } + + throw e instanceof JsonParseException jsonParseException ? jsonParseException : new JsonParseException(e); + } + } + + private static String serializeValue(@Nullable Object value) { + + if (value == null) { + return "null"; + } + + String documentJson = new Document("toBeEncoded", value).toJson(); + return documentJson.substring(documentJson.indexOf(':') + 1, documentJson.length() - 1).trim(); + } + + private static String toString(Map source) { + + // Avoid String.format for performance + return iterableToDelimitedString(source.entrySet(), "{ ", " }", + entry -> "\"" + entry.getKey() + "\" : " + toJson(entry.getValue())); + } + + private static String toString(Collection source) { + return iterableToDelimitedString(source, "[ ", " ]", BsonUtils::toJson); + } + + private static String iterableToDelimitedString(Iterable source, String prefix, String suffix, + Converter transformer) { + + StringJoiner joiner = new StringJoiner(", ", prefix, suffix); + + StreamSupport.stream(source.spliterator(), false).map(transformer::convert).forEach(joiner::add); + + return joiner.toString(); + } + + static class BsonCapturingWriter extends AbstractBsonWriter { + + private final List values = new ArrayList<>(0); + + public BsonCapturingWriter(Class type) { + super(new BsonWriterSettings()); + + if (ClassUtils.isAssignable(Map.class, type)) { + setContext(new Context(null, BsonContextType.DOCUMENT)); + } else if (ClassUtils.isAssignable(List.class, type) || type.isArray()) { + setContext(new Context(null, BsonContextType.ARRAY)); + } else { + setContext(new Context(null, BsonContextType.DOCUMENT)); + } + } + + @Nullable + BsonValue getCapturedValue() { + + if (values.isEmpty()) { + return null; + } + if (!getContext().getContextType().equals(BsonContextType.ARRAY)) { + return values.get(0); + } + + return new BsonArray(values); + } + + @Override + protected void doWriteStartDocument() { + + } + + @Override + protected void doWriteEndDocument() { + + } + + @Override + public void writeStartArray() { + setState(State.VALUE); + } + + @Override + public void writeEndArray() { + setState(State.NAME); + } + + @Override + protected void doWriteStartArray() { + + } + + @Override + protected void doWriteEndArray() { + + } + + @Override + protected void doWriteBinaryData(BsonBinary value) { + values.add(value); + } + + @Override + protected void doWriteBoolean(boolean value) { + values.add(BsonBoolean.valueOf(value)); + } + + @Override + protected void doWriteDateTime(long value) { + values.add(new BsonDateTime(value)); + } + + @Override + protected void doWriteDBPointer(BsonDbPointer value) { + values.add(value); + } + + @Override + protected void doWriteDouble(double value) { + values.add(new BsonDouble(value)); + } + + @Override + protected void doWriteInt32(int value) { + values.add(new BsonInt32(value)); + } + + @Override + protected void doWriteInt64(long value) { + values.add(new BsonInt64(value)); + } + + @Override + protected void doWriteDecimal128(Decimal128 value) { + values.add(new BsonDecimal128(value)); + } + + @Override + protected void doWriteJavaScript(String value) { + values.add(new BsonJavaScript(value)); + } + + @Override + protected void doWriteJavaScriptWithScope(String value) { + throw new UnsupportedOperationException("Cannot capture JavaScriptWith"); + } + + @Override + protected void doWriteMaxKey() {} + + @Override + protected void doWriteMinKey() {} + + @Override + protected void doWriteNull() { + values.add(new BsonNull()); + } + + @Override + protected void doWriteObjectId(ObjectId value) { + values.add(new BsonObjectId(value)); + } + + @Override + protected void doWriteRegularExpression(BsonRegularExpression value) { + values.add(value); + } + + @Override + protected void doWriteString(String value) { + values.add(new BsonString(value)); + } + + @Override + protected void doWriteSymbol(String value) { + values.add(new BsonSymbol(value)); + } + + @Override + protected void doWriteTimestamp(BsonTimestamp value) { + values.add(value); + } + + @Override + protected void doWriteUndefined() { + values.add(new BsonUndefined()); + } + + @Override + public void flush() { + values.clear(); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java index c20f9f9d21..48f2e9b84f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DBObjectUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,21 @@ */ package org.springframework.data.mongodb.util; +import java.util.Arrays; + import com.mongodb.BasicDBList; /** * @author Thomas Darimont + * @deprecated since 4.2.0 */ +@Deprecated(since = "4.2.0", forRemoval = true) public class DBObjectUtils { public static BasicDBList dbList(Object... items) { BasicDBList list = new BasicDBList(); - for (Object item : items) { - list.add(item); - } + list.addAll(Arrays.asList(items)); return list; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DotPath.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DotPath.java new file mode 100644 index 0000000000..191c7d24d3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DotPath.java @@ -0,0 +1,89 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Value object representing a dot path. + * + * @author Mark Paluch + * @since 3.2 + */ +public class DotPath { + + private static final DotPath EMPTY = new DotPath(""); + + private final String path; + + private DotPath(String path) { + this.path = path; + } + + /** + * Creates a new {@link DotPath} from {@code dotPath}. + * + * @param dotPath the dot path, can be empty or {@literal null}. + * @return the {@link DotPath} representing {@code dotPath}. + */ + public static DotPath from(@Nullable String dotPath) { + + if (StringUtils.hasLength(dotPath)) { + return new DotPath(dotPath); + } + + return EMPTY; + } + + /** + * Returns an empty dotpath. + * + * @return an empty dotpath. + */ + public static DotPath empty() { + return EMPTY; + } + + /** + * Append a segment to the dotpath. If the dotpath is not empty, then segments are separated with a dot. + * + * @param segment the segment to append. + * @return + */ + public DotPath append(String segment) { + + if (isEmpty()) { + return new DotPath(segment); + } + + return new DotPath(path + "." + segment); + } + + /** + * Returns whether this dotpath is empty. + * + * @return whether this dotpath is empty. + */ + public boolean isEmpty() { + return !StringUtils.hasLength(path); + } + + @Override + public String toString() { + return path; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DurationUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DurationUtil.java new file mode 100644 index 0000000000..67255b878a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/DurationUtil.java @@ -0,0 +1,96 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.time.Duration; +import java.util.function.Supplier; + +import org.springframework.core.env.Environment; +import org.springframework.data.expression.ValueEvaluationContext; +import org.springframework.data.expression.ValueExpression; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.format.datetime.standard.DurationFormatterUtils; +import org.springframework.lang.Nullable; + +/** + * Helper to evaluate Duration from expressions. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class DurationUtil { + + private static final ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); + + /** + * Evaluates and potentially parses the given string representation into a {@link Duration} value. + * + * @param value the {@link String} representation of the duration to evaluate. + * @param evaluationContext context supplier for property and expression language evaluation. + * @return the evaluated duration. + */ + public static Duration evaluate(String value, ValueEvaluationContext evaluationContext) { + + ValueExpression expression = PARSER.parse(value); + Object evaluatedTimeout = expression.evaluate(evaluationContext); + + if (evaluatedTimeout == null) { + return Duration.ZERO; + } + + if (evaluatedTimeout instanceof Duration duration) { + return duration; + } + + return parse(evaluatedTimeout.toString()); + } + + /** + * Evaluates and potentially parses the given string representation into a {@link Duration} value. + * + * @param value the {@link String} representation of the duration to evaluate. + * @param evaluationContext context supplier for expression language evaluation. + * @return the evaluated duration. + */ + public static Duration evaluate(String value, Supplier evaluationContext) { + + return evaluate(value, new ValueEvaluationContext() { + @Nullable + @Override + public Environment getEnvironment() { + return null; + } + + @Nullable + @Override + public EvaluationContext getEvaluationContext() { + return evaluationContext.get(); + } + }); + } + + /** + * + * @param duration duration string to parse. + * @return parsed {@link Duration}. + * @see DurationFormatterUtils + */ + public static Duration parse(String duration) { + return DurationFormatterUtils.detectAndParse(duration); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java new file mode 100644 index 0000000000..ffc97402fe --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java @@ -0,0 +1,95 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.jetbrains.annotations.Nullable; + +/** + * Empty variant of {@link Document}. + * + * @author Mark Paluch + */ +class EmptyDocument extends Document { + + @Override + public Document append(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object put(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object remove(Object key) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map map) { + throw new UnsupportedOperationException(); + } + + @Override + public void replaceAll(BiFunction function) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean remove(Object key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean replace(String key, Object oldValue, Object newValue) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public Object replace(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Set> entrySet() { + return Collections.emptySet(); + } + + @Override + public Collection values() { + return Collections.emptyList(); + } + + @Override + public Set keySet() { + return Collections.emptySet(); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java index a14a6bf7d8..8fc4b108ff 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoClientVersion.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,45 +15,104 @@ */ package org.springframework.data.mongodb.util; +import java.lang.reflect.Field; + +import org.springframework.data.util.Version; +import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.internal.build.MongoDriverVersion; /** * {@link MongoClientVersion} holds information about the used mongo-java client and is used to distinguish between * different versions. * * @author Christoph Strobl + * @author Mark Paluch * @since 1.7 */ public class MongoClientVersion { - private static final boolean IS_MONGO_30 = ClassUtils.isPresent("com.mongodb.binding.SingleServerBinding", - MongoClientVersion.class.getClassLoader()); + private static final boolean SYNC_CLIENT_PRESENT = ClassUtils.isPresent("com.mongodb.MongoClient", + MongoClientVersion.class.getClassLoader()) + || ClassUtils.isPresent("com.mongodb.client.MongoClient", MongoClientVersion.class.getClassLoader()); - private static final boolean IS_MONGO_34 = ClassUtils.isPresent("org.bson.types.Decimal128", + private static final boolean ASYNC_CLIENT_PRESENT = ClassUtils.isPresent("com.mongodb.async.client.MongoClient", MongoClientVersion.class.getClassLoader()); - private static final boolean IS_ASYNC_CLIENT = ClassUtils.isPresent("com.mongodb.async.client.MongoClient", - MongoClientVersion.class.getClassLoader()); + private static final boolean REACTIVE_CLIENT_PRESENT = ClassUtils + .isPresent("com.mongodb.reactivestreams.client.MongoClient", MongoClientVersion.class.getClassLoader()); + + private static final boolean IS_VERSION_5_OR_NEWER; + + private static final Version CLIENT_VERSION; + + static { + + ClassLoader classLoader = MongoClientVersion.class.getClassLoader(); + Version version = getMongoDbDriverVersion(classLoader); + + CLIENT_VERSION = version; + IS_VERSION_5_OR_NEWER = CLIENT_VERSION.isGreaterThanOrEqualTo(Version.parse("5.0")); + } /** - * @return {@literal true} if MongoDB Java driver version 3.0 or later is on classpath. + * @return {@literal true} if the async MongoDB Java driver is on classpath. */ - public static boolean isMongo3Driver() { - return IS_MONGO_30; + public static boolean isAsyncClient() { + return ASYNC_CLIENT_PRESENT; } /** - * @return {@literal true} if MongoDB Java driver version 3.4 or later is on classpath. - * @since 1.10 + * @return {@literal true} if the sync MongoDB Java driver is on classpath. + * @since 2.1 */ - public static boolean isMongo34Driver() { - return IS_MONGO_34; + public static boolean isSyncClientPresent() { + return SYNC_CLIENT_PRESENT; } /** - * @return {lliteral true} if MongoDB Java driver is on classpath. + * @return {@literal true} if the reactive MongoDB Java driver is on classpath. + * @since 2.1 */ - public static boolean isAsyncClient() { - return IS_ASYNC_CLIENT; + public static boolean isReactiveClientPresent() { + return REACTIVE_CLIENT_PRESENT; + } + + /** + * @return {@literal true} if the MongoDB Java driver version is 5 or newer. + * @since 4.3 + */ + public static boolean isVersion5orNewer() { + return IS_VERSION_5_OR_NEWER; + } + + private static Version getMongoDbDriverVersion(ClassLoader classLoader) { + + Version version = getVersionFromPackage(classLoader); + return version == null ? guessDriverVersionFromClassPath(classLoader) : version; + } + + @Nullable + private static Version getVersionFromPackage(ClassLoader classLoader) { + + if (ClassUtils.isPresent("com.mongodb.internal.build.MongoDriverVersion", classLoader)) { + try { + Field field = ReflectionUtils.findField(MongoDriverVersion.class, "VERSION"); + return field != null ? Version.parse("" + field.get(null)) : null; + } catch (ReflectiveOperationException | IllegalArgumentException exception) { + // well not much we can do, right? + } + } + return null; + } + + private static Version guessDriverVersionFromClassPath(ClassLoader classLoader) { + + if (ClassUtils.isPresent("com.mongodb.internal.connection.StreamFactoryFactory", classLoader)) { + return Version.parse("5"); + } + return Version.parse("4.11"); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java new file mode 100644 index 0000000000..8bd422c493 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java @@ -0,0 +1,417 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.lang.reflect.Method; +import java.net.InetSocketAddress; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.reactivestreams.Publisher; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.ServerAddress; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoIterable; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * Compatibility adapter to bridge functionality across different MongoDB driver versions. + *

          + * This class is for internal use within the framework and should not be used by applications. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.3 + */ +public class MongoCompatibilityAdapter { + + private static final String NO_LONGER_SUPPORTED = "%s is no longer supported on Mongo Client 5 or newer"; + private static final String NOT_SUPPORTED_ON_4 = "%s is not supported on Mongo Client 4"; + + private static final @Nullable Method getStreamFactoryFactory = ReflectionUtils.findMethod(MongoClientSettings.class, + "getStreamFactoryFactory"); + + private static final @Nullable Method setBucketSize = ReflectionUtils.findMethod(IndexOptions.class, "bucketSize", + Double.class); + + private static final @Nullable Method setTrimFactor; + + static { + + // method name changed in between + Method trimFactor = ReflectionUtils.findMethod(RangeOptions.class, "setTrimFactor", Integer.class); + if (trimFactor != null) { + setTrimFactor = trimFactor; + } else { + setTrimFactor = ReflectionUtils.findMethod(RangeOptions.class, "trimFactor", Integer.class); + } + } + + /** + * Return a compatibility adapter for {@link MongoClientSettings.Builder}. + * + * @param builder + * @return + */ + public static ClientSettingsBuilderAdapter clientSettingsBuilderAdapter(MongoClientSettings.Builder builder) { + return new MongoStreamFactoryFactorySettingsConfigurer(builder)::setStreamFactory; + } + + /** + * Return a compatibility adapter for {@link MongoClientSettings}. + * + * @param clientSettings + * @return + */ + public static ClientSettingsAdapter clientSettingsAdapter(MongoClientSettings clientSettings) { + return new ClientSettingsAdapter() { + @Override + public T getStreamFactoryFactory() { + + if (MongoClientVersion.isVersion5orNewer() || getStreamFactoryFactory == null) { + return null; + } + + return (T) ReflectionUtils.invokeMethod(getStreamFactoryFactory, clientSettings); + } + }; + } + + /** + * Return a compatibility adapter for {@link IndexOptions}. + * + * @param options + * @return + */ + public static IndexOptionsAdapter indexOptionsAdapter(IndexOptions options) { + return bucketSize -> { + + if (MongoClientVersion.isVersion5orNewer() || setBucketSize == null) { + throw new UnsupportedOperationException(NO_LONGER_SUPPORTED.formatted("IndexOptions.bucketSize")); + } + + ReflectionUtils.invokeMethod(setBucketSize, options, bucketSize); + }; + } + + /** + * Return a compatibility adapter for {@code MapReduceIterable}. + * + * @param iterable + * @return + */ + @SuppressWarnings("deprecation") + public static MapReduceIterableAdapter mapReduceIterableAdapter(Object iterable) { + return sharded -> { + + if (MongoClientVersion.isVersion5orNewer()) { + throw new UnsupportedOperationException(NO_LONGER_SUPPORTED.formatted("sharded")); + } + + // Use MapReduceIterable to avoid package-protected access violations to + // com.mongodb.client.internal.MapReduceIterableImpl + Method shardedMethod = ReflectionUtils.findMethod(MapReduceIterable.class, "sharded", boolean.class); + ReflectionUtils.invokeMethod(shardedMethod, iterable, sharded); + }; + } + + /** + * Return a compatibility adapter for {@link RangeOptions}. + * + * @param options + * @return + */ + public static RangeOptionsAdapter rangeOptionsAdapter(RangeOptions options) { + return trimFactor -> { + + if (!MongoClientVersion.isVersion5orNewer() || setTrimFactor == null) { + throw new UnsupportedOperationException(NOT_SUPPORTED_ON_4.formatted("RangeOptions.trimFactor")); + } + + ReflectionUtils.invokeMethod(setTrimFactor, options, trimFactor); + }; + } + + /** + * Return a compatibility adapter for {@code MapReducePublisher}. + * + * @param publisher + * @return + */ + @SuppressWarnings("deprecation") + public static MapReducePublisherAdapter mapReducePublisherAdapter(Object publisher) { + return sharded -> { + + if (MongoClientVersion.isVersion5orNewer()) { + throw new UnsupportedOperationException(NO_LONGER_SUPPORTED.formatted("sharded")); + } + + // Use MapReducePublisher to avoid package-protected access violations to MapReducePublisherImpl + Method shardedMethod = ReflectionUtils.findMethod(MapReducePublisher.class, "sharded", boolean.class); + ReflectionUtils.invokeMethod(shardedMethod, publisher, sharded); + }; + } + + /** + * Return a compatibility adapter for {@link ServerAddress}. + * + * @param serverAddress + * @return + */ + public static ServerAddressAdapter serverAddressAdapter(ServerAddress serverAddress) { + return () -> { + + if (MongoClientVersion.isVersion5orNewer()) { + return null; + } + + Method serverAddressMethod = ReflectionUtils.findMethod(ServerAddress.class, "getSocketAddress"); + Object value = ReflectionUtils.invokeMethod(serverAddressMethod, serverAddress); + return value != null ? InetSocketAddress.class.cast(value) : null; + }; + } + + public static MongoDatabaseAdapterBuilder mongoDatabaseAdapter() { + return MongoDatabaseAdapter::new; + } + + public static ReactiveMongoDatabaseAdapterBuilder reactiveMongoDatabaseAdapter() { + return ReactiveMongoDatabaseAdapter::new; + } + + public interface IndexOptionsAdapter { + void setBucketSize(double bucketSize); + } + + public interface ClientSettingsAdapter { + @Nullable + T getStreamFactoryFactory(); + } + + public interface ClientSettingsBuilderAdapter { + void setStreamFactoryFactory(T streamFactory); + } + + public interface MapReduceIterableAdapter { + void sharded(boolean sharded); + } + + public interface MapReducePublisherAdapter { + void sharded(boolean sharded); + } + + public interface ServerAddressAdapter { + @Nullable + InetSocketAddress getSocketAddress(); + } + + public interface MongoDatabaseAdapterBuilder { + MongoDatabaseAdapter forDb(com.mongodb.client.MongoDatabase db); + } + + public interface RangeOptionsAdapter { + void trimFactor(Integer trimFactor); + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + public static class MongoDatabaseAdapter { + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD; + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD_SESSION; + + private static final Class collectionNamesReturnType; + + private final MongoDatabase db; + + static { + + if (MongoClientVersion.isSyncClientPresent()) { + + LIST_COLLECTION_NAMES_METHOD = ReflectionUtils.findMethod(MongoDatabase.class, "listCollectionNames"); + LIST_COLLECTION_NAMES_METHOD_SESSION = ReflectionUtils.findMethod(MongoDatabase.class, "listCollectionNames", + ClientSession.class); + + if (MongoClientVersion.isVersion5orNewer()) { + try { + collectionNamesReturnType = ClassUtils.forName("com.mongodb.client.ListCollectionNamesIterable", + MongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("Unable to load com.mongodb.client.ListCollectionNamesIterable", e); + } + } else { + try { + collectionNamesReturnType = ClassUtils.forName("com.mongodb.client.MongoIterable", + MongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("Unable to load com.mongodb.client.ListCollectionNamesIterable", e); + } + } + } else { + LIST_COLLECTION_NAMES_METHOD = null; + LIST_COLLECTION_NAMES_METHOD_SESSION = null; + collectionNamesReturnType = Object.class; + } + } + + public MongoDatabaseAdapter(MongoDatabase db) { + this.db = db; + } + + public Class> collectionNameIterableType() { + return (Class>) collectionNamesReturnType; + } + + public MongoIterable listCollectionNames() { + + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, "No method listCollectionNames present for %s".formatted(db)); + return (MongoIterable) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD, db); + } + + public MongoIterable listCollectionNames(ClientSession clientSession) { + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, + "No method listCollectionNames(ClientSession) present for %s".formatted(db)); + return (MongoIterable) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD_SESSION, db, + clientSession); + } + } + + public interface ReactiveMongoDatabaseAdapterBuilder { + ReactiveMongoDatabaseAdapter forDb(com.mongodb.reactivestreams.client.MongoDatabase db); + } + + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) + public static class ReactiveMongoDatabaseAdapter { + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD; + + @Nullable // + private static final Method LIST_COLLECTION_NAMES_METHOD_SESSION; + + private static final Class collectionNamesReturnType; + + private final com.mongodb.reactivestreams.client.MongoDatabase db; + + static { + + if (MongoClientVersion.isReactiveClientPresent()) { + + LIST_COLLECTION_NAMES_METHOD = ReflectionUtils + .findMethod(com.mongodb.reactivestreams.client.MongoDatabase.class, "listCollectionNames"); + LIST_COLLECTION_NAMES_METHOD_SESSION = ReflectionUtils.findMethod( + com.mongodb.reactivestreams.client.MongoDatabase.class, "listCollectionNames", + com.mongodb.reactivestreams.client.ClientSession.class); + + if (MongoClientVersion.isVersion5orNewer()) { + try { + collectionNamesReturnType = ClassUtils.forName( + "com.mongodb.reactivestreams.client.ListCollectionNamesPublisher", + ReactiveMongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("com.mongodb.reactivestreams.client.ListCollectionNamesPublisher", e); + } + } else { + try { + collectionNamesReturnType = ClassUtils.forName("org.reactivestreams.Publisher", + ReactiveMongoDatabaseAdapter.class.getClassLoader()); + } catch (ClassNotFoundException e) { + throw new IllegalStateException("org.reactivestreams.Publisher", e); + } + } + } else { + LIST_COLLECTION_NAMES_METHOD = null; + LIST_COLLECTION_NAMES_METHOD_SESSION = null; + collectionNamesReturnType = Object.class; + } + } + + ReactiveMongoDatabaseAdapter(com.mongodb.reactivestreams.client.MongoDatabase db) { + this.db = db; + } + + public Class> collectionNamePublisherType() { + return (Class>) collectionNamesReturnType; + + } + + public Publisher listCollectionNames() { + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, "No method listCollectionNames present for %s".formatted(db)); + return (Publisher) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD, db); + } + + public Publisher listCollectionNames(com.mongodb.reactivestreams.client.ClientSession clientSession) { + Assert.state(LIST_COLLECTION_NAMES_METHOD != null, + "No method listCollectionNames(ClientSession) present for %s".formatted(db)); + return (Publisher) ReflectionUtils.invokeMethod(LIST_COLLECTION_NAMES_METHOD_SESSION, db, clientSession); + } + } + + static class MongoStreamFactoryFactorySettingsConfigurer { + + private static final Log logger = LogFactory.getLog(MongoStreamFactoryFactorySettingsConfigurer.class); + + private static final String STREAM_FACTORY_NAME = "com.mongodb.connection.StreamFactoryFactory"; + private static final boolean STREAM_FACTORY_PRESENT = ClassUtils.isPresent(STREAM_FACTORY_NAME, + MongoCompatibilityAdapter.class.getClassLoader()); + private final MongoClientSettings.Builder settingsBuilder; + + static boolean isStreamFactoryPresent() { + return STREAM_FACTORY_PRESENT; + } + + public MongoStreamFactoryFactorySettingsConfigurer(Builder settingsBuilder) { + this.settingsBuilder = settingsBuilder; + } + + void setStreamFactory(Object streamFactory) { + + if (MongoClientVersion.isVersion5orNewer() && isStreamFactoryPresent()) { + logger.warn("StreamFactoryFactory is no longer available. Use TransportSettings instead."); + return; + } + + try { + Class streamFactoryType = ClassUtils.forName(STREAM_FACTORY_NAME, streamFactory.getClass().getClassLoader()); + + if (!ClassUtils.isAssignable(streamFactoryType, streamFactory.getClass())) { + throw new IllegalArgumentException("Expected %s but found %s".formatted(streamFactoryType, streamFactory)); + } + + Method setter = ReflectionUtils.findMethod(settingsBuilder.getClass(), "streamFactoryFactory", + streamFactoryType); + if (setter != null) { + ReflectionUtils.invokeMethod(setter, settingsBuilder, streamFactoryType.cast(streamFactory)); + } + } catch (ReflectiveOperationException e) { + throw new IllegalArgumentException("Cannot set StreamFactoryFactory for %s".formatted(settingsBuilder), e); + } + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java index 73ccc381bd..326a5c1e88 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoDbErrorCodes.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,11 +19,14 @@ import org.springframework.lang.Nullable; +import com.mongodb.MongoException; + /** - * {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.err}. + * {@link MongoDbErrorCodes} holds MongoDB specific error codes outlined in {@literal mongo/base/error_codes.yml}. * * @author Christoph Strobl * @author Mark Paluch + * @author SangHyuk Lee * @since 1.8 */ public final class MongoDbErrorCodes { @@ -31,14 +34,16 @@ public final class MongoDbErrorCodes { static HashMap dataAccessResourceFailureCodes; static HashMap dataIntegrityViolationCodes; static HashMap duplicateKeyCodes; - static HashMap invalidDataAccessApiUsageExeption; + static HashMap invalidDataAccessApiUsageException; static HashMap permissionDeniedCodes; + static HashMap clientSessionCodes; + static HashMap transactionCodes; static HashMap errorCodes; static { - dataAccessResourceFailureCodes = new HashMap(10); + dataAccessResourceFailureCodes = new HashMap<>(12, 1f); dataAccessResourceFailureCodes.put(6, "HostUnreachable"); dataAccessResourceFailureCodes.put(7, "HostNotFound"); dataAccessResourceFailureCodes.put(89, "NetworkTimeout"); @@ -52,7 +57,7 @@ public final class MongoDbErrorCodes { dataAccessResourceFailureCodes.put(13441, "BadOffsetInFile"); dataAccessResourceFailureCodes.put(13640, "DataFileHeaderCorrupt"); - dataIntegrityViolationCodes = new HashMap(6); + dataIntegrityViolationCodes = new HashMap<>(6, 1f); dataIntegrityViolationCodes.put(67, "CannotCreateIndex"); dataIntegrityViolationCodes.put(68, "IndexAlreadyExists"); dataIntegrityViolationCodes.put(85, "IndexOptionsConflict"); @@ -60,84 +65,212 @@ public final class MongoDbErrorCodes { dataIntegrityViolationCodes.put(112, "WriteConflict"); dataIntegrityViolationCodes.put(117, "ConflictingOperationInProgress"); - duplicateKeyCodes = new HashMap(3); + duplicateKeyCodes = new HashMap<>(4, 1f); duplicateKeyCodes.put(3, "OBSOLETE_DuplicateKey"); duplicateKeyCodes.put(84, "DuplicateKeyValue"); duplicateKeyCodes.put(11000, "DuplicateKey"); duplicateKeyCodes.put(11001, "DuplicateKey"); - invalidDataAccessApiUsageExeption = new HashMap(); - invalidDataAccessApiUsageExeption.put(5, "GraphContainsCycle"); - invalidDataAccessApiUsageExeption.put(9, "FailedToParse"); - invalidDataAccessApiUsageExeption.put(14, "TypeMismatch"); - invalidDataAccessApiUsageExeption.put(15, "Overflow"); - invalidDataAccessApiUsageExeption.put(16, "InvalidLength"); - invalidDataAccessApiUsageExeption.put(20, "IllegalOperation"); - invalidDataAccessApiUsageExeption.put(21, "EmptyArrayOperation"); - invalidDataAccessApiUsageExeption.put(22, "InvalidBSON"); - invalidDataAccessApiUsageExeption.put(23, "AlreadyInitialized"); - invalidDataAccessApiUsageExeption.put(29, "NonExistentPath"); - invalidDataAccessApiUsageExeption.put(30, "InvalidPath"); - invalidDataAccessApiUsageExeption.put(40, "ConflictingUpdateOperators"); - invalidDataAccessApiUsageExeption.put(45, "UserDataInconsistent"); - invalidDataAccessApiUsageExeption.put(30, "DollarPrefixedFieldName"); - invalidDataAccessApiUsageExeption.put(52, "InvalidPath"); - invalidDataAccessApiUsageExeption.put(53, "InvalidIdField"); - invalidDataAccessApiUsageExeption.put(54, "NotSingleValueField"); - invalidDataAccessApiUsageExeption.put(55, "InvalidDBRef"); - invalidDataAccessApiUsageExeption.put(56, "EmptyFieldName"); - invalidDataAccessApiUsageExeption.put(57, "DottedFieldName"); - invalidDataAccessApiUsageExeption.put(59, "CommandNotFound"); - invalidDataAccessApiUsageExeption.put(60, "DatabaseNotFound"); - invalidDataAccessApiUsageExeption.put(61, "ShardKeyNotFound"); - invalidDataAccessApiUsageExeption.put(62, "OplogOperationUnsupported"); - invalidDataAccessApiUsageExeption.put(66, "ImmutableField"); - invalidDataAccessApiUsageExeption.put(72, "InvalidOptions"); - invalidDataAccessApiUsageExeption.put(115, "CommandNotSupported"); - invalidDataAccessApiUsageExeption.put(116, "DocTooLargeForCapped"); - invalidDataAccessApiUsageExeption.put(130, "SymbolNotFound"); - invalidDataAccessApiUsageExeption.put(17280, "KeyTooLong"); - invalidDataAccessApiUsageExeption.put(13334, "ShardKeyTooBig"); - - permissionDeniedCodes = new HashMap(); + invalidDataAccessApiUsageException = new HashMap<>(31, 1f); + invalidDataAccessApiUsageException.put(5, "GraphContainsCycle"); + invalidDataAccessApiUsageException.put(9, "FailedToParse"); + invalidDataAccessApiUsageException.put(14, "TypeMismatch"); + invalidDataAccessApiUsageException.put(15, "Overflow"); + invalidDataAccessApiUsageException.put(16, "InvalidLength"); + invalidDataAccessApiUsageException.put(20, "IllegalOperation"); + invalidDataAccessApiUsageException.put(21, "EmptyArrayOperation"); + invalidDataAccessApiUsageException.put(22, "InvalidBSON"); + invalidDataAccessApiUsageException.put(23, "AlreadyInitialized"); + invalidDataAccessApiUsageException.put(29, "NonExistentPath"); + invalidDataAccessApiUsageException.put(30, "InvalidPath"); + invalidDataAccessApiUsageException.put(40, "ConflictingUpdateOperators"); + invalidDataAccessApiUsageException.put(45, "UserDataInconsistent"); + invalidDataAccessApiUsageException.put(52, "DollarPrefixedFieldName"); + invalidDataAccessApiUsageException.put(53, "InvalidIdField"); + invalidDataAccessApiUsageException.put(54, "NotSingleValueField"); + invalidDataAccessApiUsageException.put(55, "InvalidDBRef"); + invalidDataAccessApiUsageException.put(56, "EmptyFieldName"); + invalidDataAccessApiUsageException.put(57, "DottedFieldName"); + invalidDataAccessApiUsageException.put(59, "CommandNotFound"); + invalidDataAccessApiUsageException.put(60, "DatabaseNotFound"); + invalidDataAccessApiUsageException.put(61, "ShardKeyNotFound"); + invalidDataAccessApiUsageException.put(62, "OplogOperationUnsupported"); + invalidDataAccessApiUsageException.put(66, "ImmutableField"); + invalidDataAccessApiUsageException.put(72, "InvalidOptions"); + invalidDataAccessApiUsageException.put(115, "CommandNotSupported"); + invalidDataAccessApiUsageException.put(116, "DocTooLargeForCapped"); + invalidDataAccessApiUsageException.put(10003, "CannotGrowDocumentInCappedNamespace"); + invalidDataAccessApiUsageException.put(130, "SymbolNotFound"); + invalidDataAccessApiUsageException.put(17280, "KeyTooLong"); + invalidDataAccessApiUsageException.put(13334, "ShardKeyTooBig"); + + permissionDeniedCodes = new HashMap<>(8, 1f); permissionDeniedCodes.put(11, "UserNotFound"); permissionDeniedCodes.put(18, "AuthenticationFailed"); permissionDeniedCodes.put(31, "RoleNotFound"); permissionDeniedCodes.put(32, "RolesNotRelated"); - permissionDeniedCodes.put(33, "PrvilegeNotFound"); + permissionDeniedCodes.put(33, "PrivilegeNotFound"); permissionDeniedCodes.put(15847, "CannotAuthenticate"); permissionDeniedCodes.put(16704, "CannotAuthenticateToAdminDB"); permissionDeniedCodes.put(16705, "CannotAuthenticateToAdminDB"); - errorCodes = new HashMap(); + clientSessionCodes = new HashMap<>(4, 1f); + clientSessionCodes.put(206, "NoSuchSession"); + clientSessionCodes.put(213, "DuplicateSession"); + clientSessionCodes.put(217, "IncompleteTransactionHistory"); + clientSessionCodes.put(225, "TransactionTooOld"); + clientSessionCodes.put(228, "SessionTransferIncomplete"); + clientSessionCodes.put(244, "TransactionAborted"); + clientSessionCodes.put(251, "NoSuchTransaction"); + clientSessionCodes.put(256, "TransactionCommitted"); + clientSessionCodes.put(257, "TransactionToLarge"); + clientSessionCodes.put(261, "TooManyLogicalSessions"); + clientSessionCodes.put(263, "OperationNotSupportedInTransaction"); + clientSessionCodes.put(264, "TooManyLogicalSessions"); + + errorCodes = new HashMap<>( + dataAccessResourceFailureCodes.size() + dataIntegrityViolationCodes.size() + duplicateKeyCodes.size() + + invalidDataAccessApiUsageException.size() + permissionDeniedCodes.size() + clientSessionCodes.size(), + 1f); errorCodes.putAll(dataAccessResourceFailureCodes); errorCodes.putAll(dataIntegrityViolationCodes); errorCodes.putAll(duplicateKeyCodes); - errorCodes.putAll(invalidDataAccessApiUsageExeption); + errorCodes.putAll(invalidDataAccessApiUsageException); errorCodes.putAll(permissionDeniedCodes); + errorCodes.putAll(clientSessionCodes); + } + + @Nullable + public static String getErrorDescription(@Nullable Integer errorCode) { + return errorCode == null ? null : errorCodes.get(errorCode); } public static boolean isDataIntegrityViolationCode(@Nullable Integer errorCode) { - return errorCode == null ? false : dataIntegrityViolationCodes.containsKey(errorCode); + return errorCode != null && dataIntegrityViolationCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDataIntegrityViolationError(Exception exception) { + + if (exception instanceof MongoException me) { + return isDataIntegrityViolationCode(me.getCode()); + } + return false; } public static boolean isDataAccessResourceFailureCode(@Nullable Integer errorCode) { - return errorCode == null ? false : dataAccessResourceFailureCodes.containsKey(errorCode); + return errorCode != null && dataAccessResourceFailureCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDataAccessResourceError(Exception exception) { + + if (exception instanceof MongoException me) { + return isDataAccessResourceFailureCode(me.getCode()); + } + return false; } public static boolean isDuplicateKeyCode(@Nullable Integer errorCode) { - return errorCode == null ? false : duplicateKeyCodes.containsKey(errorCode); + return errorCode != null && duplicateKeyCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDuplicateKeyError(Exception exception) { + + if (exception instanceof MongoException me) { + return isDuplicateKeyCode(me.getCode()); + } + return false; + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isDataDuplicateKeyError(Exception exception) { + return isDuplicateKeyError(exception); } public static boolean isPermissionDeniedCode(@Nullable Integer errorCode) { - return errorCode == null ? false : permissionDeniedCodes.containsKey(errorCode); + return errorCode != null && permissionDeniedCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isPermissionDeniedError(Exception exception) { + + if (exception instanceof MongoException) { + return isPermissionDeniedCode(((MongoException) exception).getCode()); + } + return false; } public static boolean isInvalidDataAccessApiUsageCode(@Nullable Integer errorCode) { - return errorCode == null ? false : invalidDataAccessApiUsageExeption.containsKey(errorCode); + return errorCode != null && invalidDataAccessApiUsageException.containsKey(errorCode); } - public static String getErrorDescription(@Nullable Integer errorCode) { - return errorCode == null ? null : errorCodes.get(errorCode); + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isInvalidDataAccessApiUsageError(Exception exception) { + + if (exception instanceof MongoException me) { + return isInvalidDataAccessApiUsageCode(me.getCode()); + } + return false; + } + + /** + * Check if the given error code matches a know session related error. + * + * @param errorCode the error code to check. + * @return {@literal true} if error matches. + * @since 2.1 + */ + public static boolean isClientSessionFailureCode(@Nullable Integer errorCode) { + return errorCode != null && clientSessionCodes.containsKey(errorCode); + } + + /** + * Check if the given error code matches a know transaction related error. + * + * @param errorCode the error code to check. + * @return {@literal true} if error matches. + * @since 2.1 + */ + public static boolean isTransactionFailureCode(@Nullable Integer errorCode) { + return errorCode != null && transactionCodes.containsKey(errorCode); + } + + /** + * @param exception can be {@literal null}. + * @return + * @since 4.4 + */ + public static boolean isClientSessionFailure(Exception exception) { + + if (exception instanceof MongoException me) { + return isClientSessionFailureCode(me.getCode()); + } + return false; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java new file mode 100644 index 0000000000..23c96f9e46 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java @@ -0,0 +1,116 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.regex.Pattern; + +import org.springframework.lang.Nullable; + +/** + * Utility to translate {@link Pattern#flags() regex flags} to MongoDB regex options and vice versa. + * + * @author Mark Paluch + * @since 3.3 + */ +public abstract class RegexFlags { + + private static final int[] FLAG_LOOKUP = new int[Character.MAX_VALUE]; + + static { + FLAG_LOOKUP['g'] = 256; + FLAG_LOOKUP['i'] = Pattern.CASE_INSENSITIVE; + FLAG_LOOKUP['m'] = Pattern.MULTILINE; + FLAG_LOOKUP['s'] = Pattern.DOTALL; + FLAG_LOOKUP['c'] = Pattern.CANON_EQ; + FLAG_LOOKUP['x'] = Pattern.COMMENTS; + FLAG_LOOKUP['d'] = Pattern.UNIX_LINES; + FLAG_LOOKUP['t'] = Pattern.LITERAL; + FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; + } + + private RegexFlags() { + + } + + /** + * Lookup the MongoDB specific options from given {@link Pattern#flags() flags}. + * + * @param flags the Regex flags to look up. + * @return the options string. May be empty. + */ + public static String toRegexOptions(int flags) { + + if (flags == 0) { + return ""; + } + + StringBuilder buf = new StringBuilder(); + + for (int i = 'a'; i < 'z'; i++) { + + if (FLAG_LOOKUP[i] == 0) { + continue; + } + + if ((flags & FLAG_LOOKUP[i]) > 0) { + buf.append((char) i); + } + } + + return buf.toString(); + } + + /** + * Lookup the MongoDB specific flags for a given regex option string. + * + * @param s the Regex option/flag to look up. Can be {@literal null}. + * @return zero if given {@link String} is {@literal null} or empty. + * @since 2.2 + */ + public static int toRegexFlags(@Nullable String s) { + + int flags = 0; + + if (s == null) { + return flags; + } + + for (char f : s.toLowerCase().toCharArray()) { + flags |= toRegexFlag(f); + } + + return flags; + } + + /** + * Lookup the MongoDB specific flags for a given character. + * + * @param c the Regex option/flag to look up. + * @return + * @throws IllegalArgumentException for unknown flags + * @since 2.2 + */ + public static int toRegexFlag(char c) { + + int flag = FLAG_LOOKUP[c]; + + if (flag == 0) { + throw new IllegalArgumentException(String.format("Unrecognized flag [%c]", c)); + } + + return flag; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java new file mode 100644 index 0000000000..344244717e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java @@ -0,0 +1,86 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.aggregation; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.Field; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class TestAggregationContext implements AggregationOperationContext { + + private final AggregationOperationContext delegate; + + private TestAggregationContext(AggregationOperationContext delegate) { + this.delegate = delegate; + } + + public static AggregationOperationContext contextFor(@Nullable Class type) { + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return contextFor(type, mongoConverter); + } + + public static AggregationOperationContext contextFor(@Nullable Class type, MongoConverter mongoConverter) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + return new TestAggregationContext(new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference()); + } + + @Override + public Document getMappedObject(Document document) { + return delegate.getMappedObject(document); + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return delegate.getMappedObject(document, type); + } + + @Override + public FieldReference getReference(Field field) { + return delegate.getReference(field); + } + + @Override + public FieldReference getReference(String name) { + return delegate.getReference(name); + } + + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java new file mode 100644 index 0000000000..9dd3f1d8fb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.encryption; + +import java.util.Base64; +import java.util.UUID; +import java.util.function.Supplier; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.types.Binary; +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Internal utility class for dealing with encryption related matters. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class EncryptionUtils { + + /** + * Resolve a given plain {@link String} value into the store native {@literal keyId} format, considering potential + * {@link Expression expressions}.
          + * The potential keyId is probed against an {@link UUID#fromString(String) UUID value} or decoded from the + * {@literal base64} representation prior to conversion into its {@link Binary} format. + * + * @param value the source value to resolve the keyId for. Must not be {@literal null}. + * @param evaluationContext a {@link Supplier} used to provide the {@link EvaluationContext} in case an + * {@link Expression} is {@link ExpressionUtils#detectExpression(String) detected}. + * @return can be {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + */ + @Nullable + public static Object resolveKeyId(String value, Supplier evaluationContext) { + + Assert.notNull(value, "Value must not be null"); + + Object potentialKeyId = value; + Expression expression = ExpressionUtils.detectExpression(value); + if (expression != null) { + potentialKeyId = expression.getValue(evaluationContext.get()); + if (!(potentialKeyId instanceof String)) { + return potentialKeyId; + } + } + + try { + return new Binary(BsonBinarySubType.UUID_STANDARD, + new BsonBinary(UUID.fromString(potentialKeyId.toString())).getData()); + } catch (IllegalArgumentException e) { + + return new Binary(BsonBinarySubType.UUID_STANDARD, Base64.getDecoder().decode(potentialKeyId.toString())); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/DateTimeFormatter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/DateTimeFormatter.java new file mode 100644 index 0000000000..b5c26755cf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/DateTimeFormatter.java @@ -0,0 +1,56 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.time.format.DateTimeFormatter.*; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; + +/** + * DateTimeFormatter implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
          + * Formatted and modified. + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +class DateTimeFormatter { + + private static final int DATE_STRING_LENGTH = "1970-01-01".length(); + + static long parse(final String dateTimeString) { + // ISO_OFFSET_DATE_TIME will not parse date strings consisting of just year-month-day, so use ISO_LOCAL_DATE for + // those + if (dateTimeString.length() == DATE_STRING_LENGTH) { + return LocalDate.parse(dateTimeString, ISO_LOCAL_DATE).atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli(); + } else { + return ISO_OFFSET_DATE_TIME.parse(dateTimeString, Instant::from).toEpochMilli(); + } + } + + static String format(final long dateTime) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateTime), ZoneId.of("Z")).format(ISO_OFFSET_DATE_TIME); + } + + private DateTimeFormatter() { + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/EvaluationContextExpressionEvaluator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/EvaluationContextExpressionEvaluator.java new file mode 100644 index 0000000000..6c31a9721f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/EvaluationContextExpressionEvaluator.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import java.util.Collections; +import java.util.Map; + +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + * @since 3.3.5 + */ +class EvaluationContextExpressionEvaluator implements ValueExpressionEvaluator { + + final ValueProvider valueProvider; + final ExpressionParser expressionParser; + + EvaluationContextExpressionEvaluator(ValueProvider valueProvider, ExpressionParser expressionParser) { + + this.valueProvider = valueProvider; + this.expressionParser = expressionParser; + } + + @Nullable + @Override + public T evaluate(String expression) { + return evaluateExpression(expression, Collections.emptyMap()); + } + + EvaluationContext getEvaluationContext(String expressionString) { + return new StandardEvaluationContext(); + } + + Expression getParsedExpression(String expressionString) { + return expressionParser.parseExpression(expressionString); + } + + @SuppressWarnings("unchecked") + T evaluateExpression(String expressionString, Map variables) { + + Expression expression = getParsedExpression(expressionString); + EvaluationContext ctx = getEvaluationContext(expressionString); + variables.forEach(ctx::setVariable); + + Object result = expression.getValue(ctx, Object.class); + return (T) result; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonBuffer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonBuffer.java new file mode 100644 index 0000000000..4b4b497dae --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonBuffer.java @@ -0,0 +1,73 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import org.bson.json.JsonParseException; + +/** + * JsonBuffer implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
          + * Formatted and modified. + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +class JsonBuffer { + + private final String buffer; + private int position; + private boolean eof; + + JsonBuffer(final String buffer) { + this.buffer = buffer; + } + + public int getPosition() { + return position; + } + + public void setPosition(final int position) { + this.position = position; + } + + public int read() { + if (eof) { + throw new JsonParseException("Trying to read past EOF."); + } else if (position >= buffer.length()) { + eof = true; + return -1; + } else { + return buffer.charAt(position++); + } + } + + public void unread(final int c) { + eof = false; + if (c != -1 && buffer.charAt(position - 1) == c) { + position--; + } + } + + public String substring(final int beginIndex) { + return buffer.substring(beginIndex); + } + + public String substring(final int beginIndex, final int endIndex) { + return buffer.substring(beginIndex, endIndex); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonScanner.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonScanner.java new file mode 100644 index 0000000000..ca4fbddd60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonScanner.java @@ -0,0 +1,623 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import org.bson.BsonRegularExpression; +import org.bson.json.JsonParseException; + +/** + * Parses the string representation of a JSON object into a set of {@link JsonToken}-derived objects.
          + * JsonScanner implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
          + * Formatted and modified to allow reading Spring Data specific placeholder values. + * + * @author Jeff Yemin + * @author Trisha Gee + * @author Robert Guo + * @author Ross Lawley + * @author Christoph Strobl + * @since 2.2 + */ +class JsonScanner { + + private final JsonBuffer buffer; + + JsonScanner(final String json) { + this(new JsonBuffer(json)); + } + + JsonScanner(final JsonBuffer buffer) { + this.buffer = buffer; + } + + /** + * @param newPosition the new position of the cursor position in the buffer + */ + public void setBufferPosition(final int newPosition) { + buffer.setPosition(newPosition); + } + + /** + * @return the current location of the cursor in the buffer + */ + public int getBufferPosition() { + return buffer.getPosition(); + } + + /** + * Finds and returns the next complete token from this scanner. If scanner reached the end of the source, it will + * return a token with {@code JSONTokenType.END_OF_FILE} type. + * + * @return The next token. + * @throws JsonParseException if source is invalid. + */ + public JsonToken nextToken() { + + int c = buffer.read(); + while (c != -1 && Character.isWhitespace(c)) { + c = buffer.read(); + } + if (c == -1) { + return new JsonToken(JsonTokenType.END_OF_FILE, ""); + } + + switch (c) { + case '{': + return new JsonToken(JsonTokenType.BEGIN_OBJECT, "{"); + case '}': + return new JsonToken(JsonTokenType.END_OBJECT, "}"); + case '[': + return new JsonToken(JsonTokenType.BEGIN_ARRAY, "["); + case ']': + return new JsonToken(JsonTokenType.END_ARRAY, "]"); + case '(': + return new JsonToken(JsonTokenType.LEFT_PAREN, "("); + case ')': + return new JsonToken(JsonTokenType.RIGHT_PAREN, ")"); + case ':': + + c = buffer.read(); + buffer.unread(c); + + if (c == '#') { // for binding the SQL style ':#{#firstname}"' + return scanBindString(); + } + + return new JsonToken(JsonTokenType.COLON, ":"); + case ',': + return new JsonToken(JsonTokenType.COMMA, ","); + case '\'': + case '"': + return scanString((char) c); + case '/': + return scanRegularExpression(); + default: + if (c == '-' || Character.isDigit(c)) { + return scanNumber((char) c); + } else if (c == '$' || c == '_' || Character.isLetter(c)) { + return scanUnquotedString(); + } else if (c == '?') { // for binding parameters. Both simple and SpEL ones. + return scanBindString(); + } else { + int position = buffer.getPosition(); + buffer.unread(c); + throw new JsonParseException("Invalid JSON input; Position: %d; Character: '%c'.", position, c); + } + } + } + + /** + * Reads {@code RegularExpressionToken} from source. The following variants of lexemes are possible: + * + *

          +	 *  /pattern/
          +	 *  /\(pattern\)/
          +	 *  /pattern/ims
          +	 * 
          + * + * Options can include 'i','m','x','s' + * + * @return The regular expression token. + * @throws JsonParseException if regular expression representation is not valid. + */ + private JsonToken scanRegularExpression() { + + int start = buffer.getPosition() - 1; + int options = -1; + + RegularExpressionState state = RegularExpressionState.IN_PATTERN; + while (true) { + int c = buffer.read(); + switch (state) { + case IN_PATTERN: + switch (c) { + case -1: + state = RegularExpressionState.INVALID; + break; + case '/': + state = RegularExpressionState.IN_OPTIONS; + options = buffer.getPosition(); + break; + case '\\': + state = RegularExpressionState.IN_ESCAPE_SEQUENCE; + break; + default: + state = RegularExpressionState.IN_PATTERN; + break; + } + break; + case IN_ESCAPE_SEQUENCE: + state = RegularExpressionState.IN_PATTERN; + break; + case IN_OPTIONS: + switch (c) { + case 'i': + case 'm': + case 'x': + case 's': + state = RegularExpressionState.IN_OPTIONS; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = RegularExpressionState.DONE; + break; + default: + if (Character.isWhitespace(c)) { + state = RegularExpressionState.DONE; + } else { + state = RegularExpressionState.INVALID; + } + break; + } + break; + default: + break; + } + + switch (state) { + case DONE: + buffer.unread(c); + int end = buffer.getPosition(); + BsonRegularExpression regex = new BsonRegularExpression(buffer.substring(start + 1, options - 1), + buffer.substring(options, end)); + return new JsonToken(JsonTokenType.REGULAR_EXPRESSION, regex); + case INVALID: + throw new JsonParseException("Invalid JSON regular expression; Position: %d.", buffer.getPosition()); + default: + } + } + } + + /** + * Reads {@code StringToken} from source. + * + * @return The string token. + */ + private JsonToken scanBindString() { + + int start = buffer.getPosition() - 1; + int c = buffer.read(); + + int charCount = 0; + boolean isExpression = false; + int parenthesisCount = 0; + + while (c == '$' || c == '_' || Character.isLetterOrDigit(c) || c == '#' || c == '{' || c == '[' + || (isExpression && isExpressionAllowedChar(c))) { + + if (charCount == 0 && (c == '#' || c == '$')) { + isExpression = true; + } else if (isExpression) { + if (c == '{') { + parenthesisCount++; + } else if (c == '}') { + + parenthesisCount--; + if (parenthesisCount == 0) { + c = buffer.read(); + break; + } + } + } + charCount++; + c = buffer.read(); + } + buffer.unread(c); + String lexeme = buffer.substring(start, buffer.getPosition()); + + return new JsonToken(JsonTokenType.UNQUOTED_STRING, lexeme); + } + + private static boolean isExpressionAllowedChar(int c) { + + return (c == '+' || // + c == '-' || // + c == ':' || // + c == '.' || // + c == ',' || // + c == '*' || // + c == '/' || // + c == '%' || // + c == '(' || // + c == ')' || // + c == '[' || // + c == ']' || // + c == '#' || // + c == '{' || // + c == '}' || // + c == '@' || // + c == '^' || // + c == '!' || // + c == '=' || // + c == '&' || // + c == '|' || // + c == '?' || // + c == '$' || // + c == '>' || // + c == '<' || // + c == '"' || // + c == '\'' || // + c == ' '); + } + + /** + * Reads {@code StringToken} from source. + * + * @return The string token. + */ + private JsonToken scanUnquotedString() { + int start = buffer.getPosition() - 1; + int c = buffer.read(); + while (c == '$' || c == '_' || Character.isLetterOrDigit(c)) { + c = buffer.read(); + } + buffer.unread(c); + String lexeme = buffer.substring(start, buffer.getPosition()); + return new JsonToken(JsonTokenType.UNQUOTED_STRING, lexeme); + } + + /** + * Reads number token from source. The following variants of lexemes are possible: + * + *
          +	 *  12
          +	 *  123
          +	 *  -0
          +	 *  -345
          +	 *  -0.0
          +	 *  0e1
          +	 *  0e-1
          +	 *  -0e-1
          +	 *  1e12
          +	 *  -Infinity
          +	 * 
          + * + * @return The number token. + * @throws JsonParseException if number representation is invalid. + */ + // CHECKSTYLE:OFF + private JsonToken scanNumber(final char firstChar) { + + int c = firstChar; + + int start = buffer.getPosition() - 1; + + NumberState state; + + switch (c) { + case '-': + state = NumberState.SAW_LEADING_MINUS; + break; + case '0': + state = NumberState.SAW_LEADING_ZERO; + break; + default: + state = NumberState.SAW_INTEGER_DIGITS; + break; + } + + JsonTokenType type = JsonTokenType.INT64; + + while (true) { + c = buffer.read(); + switch (state) { + case SAW_LEADING_MINUS: + switch (c) { + case '0': + state = NumberState.SAW_LEADING_ZERO; + break; + case 'I': + state = NumberState.SAW_MINUS_I; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_INTEGER_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_LEADING_ZERO: + switch (c) { + case '.': + state = NumberState.SAW_DECIMAL_POINT; + break; + case 'e': + case 'E': + state = NumberState.SAW_EXPONENT_LETTER; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_INTEGER_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_INTEGER_DIGITS: + switch (c) { + case '.': + state = NumberState.SAW_DECIMAL_POINT; + break; + case 'e': + case 'E': + state = NumberState.SAW_EXPONENT_LETTER; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_INTEGER_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_DECIMAL_POINT: + type = JsonTokenType.DOUBLE; + if (Character.isDigit(c)) { + state = NumberState.SAW_FRACTION_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + case SAW_FRACTION_DIGITS: + switch (c) { + case 'e': + case 'E': + state = NumberState.SAW_EXPONENT_LETTER; + break; + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_FRACTION_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_EXPONENT_LETTER: + type = JsonTokenType.DOUBLE; + switch (c) { + case '+': + case '-': + state = NumberState.SAW_EXPONENT_SIGN; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_EXPONENT_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_EXPONENT_SIGN: + if (Character.isDigit(c)) { + state = NumberState.SAW_EXPONENT_DIGITS; + } else { + state = NumberState.INVALID; + } + break; + case SAW_EXPONENT_DIGITS: + switch (c) { + case ',': + case '}': + case ']': + case ')': + state = NumberState.DONE; + break; + default: + if (Character.isDigit(c)) { + state = NumberState.SAW_EXPONENT_DIGITS; + } else if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + break; + case SAW_MINUS_I: + boolean sawMinusInfinity = true; + char[] nfinity = new char[] { 'n', 'f', 'i', 'n', 'i', 't', 'y' }; + for (int i = 0; i < nfinity.length; i++) { + if (c != nfinity[i]) { + sawMinusInfinity = false; + break; + } + c = buffer.read(); + } + if (sawMinusInfinity) { + type = JsonTokenType.DOUBLE; + switch (c) { + case ',': + case '}': + case ']': + case ')': + case -1: + state = NumberState.DONE; + break; + default: + if (Character.isWhitespace(c)) { + state = NumberState.DONE; + } else { + state = NumberState.INVALID; + } + break; + } + } else { + state = NumberState.INVALID; + } + break; + default: + } + + switch (state) { + case INVALID: + throw new JsonParseException("Invalid JSON number"); + case DONE: + buffer.unread(c); + String lexeme = buffer.substring(start, buffer.getPosition()); + if (type == JsonTokenType.DOUBLE) { + return new JsonToken(JsonTokenType.DOUBLE, Double.parseDouble(lexeme)); + } else { + long value = Long.parseLong(lexeme); + if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { + return new JsonToken(JsonTokenType.INT64, value); + } else { + return new JsonToken(JsonTokenType.INT32, (int) value); + } + } + default: + } + } + + } + // CHECKSTYLE:ON + + /** + * Reads {@code StringToken} from source. + * + * @return The string token. + */ + // CHECKSTYLE:OFF + private JsonToken scanString(final char quoteCharacter) { + + StringBuilder sb = new StringBuilder(); + + while (true) { + int c = buffer.read(); + switch (c) { + case '\\': + c = buffer.read(); + switch (c) { + case '\'': + sb.append('\''); + break; + case '"': + sb.append('"'); + break; + case '\\': + sb.append('\\'); + break; + case '/': + sb.append('/'); + break; + case 'b': + sb.append('\b'); + break; + case 'f': + sb.append('\f'); + break; + case 'n': + sb.append('\n'); + break; + case 'r': + sb.append('\r'); + break; + case 't': + sb.append('\t'); + break; + case 'u': + int u1 = buffer.read(); + int u2 = buffer.read(); + int u3 = buffer.read(); + int u4 = buffer.read(); + if (u4 != -1) { + String hex = new String(new char[] { (char) u1, (char) u2, (char) u3, (char) u4 }); + sb.append((char) Integer.parseInt(hex, 16)); + } + break; + default: + throw new JsonParseException("Invalid escape sequence in JSON string '\\%c'.", c); + } + break; + + default: + if (c == quoteCharacter) { + return new JsonToken(JsonTokenType.STRING, sb.toString()); + } + if (c != -1) { + sb.append((char) c); + } + } + if (c == -1) { + throw new JsonParseException("End of file in JSON string."); + } + } + } + + private enum NumberState { + SAW_LEADING_MINUS, SAW_LEADING_ZERO, SAW_INTEGER_DIGITS, SAW_DECIMAL_POINT, SAW_FRACTION_DIGITS, SAW_EXPONENT_LETTER, SAW_EXPONENT_SIGN, SAW_EXPONENT_DIGITS, SAW_MINUS_I, DONE, INVALID + } + + private enum RegularExpressionState { + IN_PATTERN, IN_ESCAPE_SEQUENCE, IN_OPTIONS, DONE, INVALID + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonToken.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonToken.java new file mode 100644 index 0000000000..293736123e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonToken.java @@ -0,0 +1,86 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.lang.String.*; + +import org.bson.BsonDouble; +import org.bson.json.JsonParseException; +import org.bson.types.Decimal128; + +/** + * JsonToken implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
          + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +class JsonToken { + + private final Object value; + private final JsonTokenType type; + + JsonToken(final JsonTokenType type, final Object value) { + + this.value = value; + this.type = type; + } + + Object getValue() { + return value; + } + + T getValue(final Class clazz) { + + try { + if (Long.class == clazz) { + if (value instanceof Integer integerValue) { + return clazz.cast(integerValue.longValue()); + } else if (value instanceof String stringValue) { + return clazz.cast(Long.valueOf(stringValue)); + } + } else if (Integer.class == clazz) { + if (value instanceof String stringValue) { + return clazz.cast(Integer.valueOf(stringValue)); + } + } else if (Double.class == clazz) { + if (value instanceof String stringValue) { + return clazz.cast(Double.valueOf(stringValue)); + } + } else if (Decimal128.class == clazz) { + if (value instanceof Integer integerValue) { + return clazz.cast(new Decimal128(integerValue)); + } else if (value instanceof Long longValue) { + return clazz.cast(new Decimal128(longValue)); + } else if (value instanceof Double doubleValue) { + return clazz.cast(new BsonDouble(doubleValue).decimal128Value()); + } else if (value instanceof String stringValue) { + return clazz.cast(Decimal128.parse(stringValue)); + } + } + + return clazz.cast(value); + } catch (Exception e) { + throw new JsonParseException(format("Exception converting value '%s' to type %s", value, clazz.getName()), e); + } + } + + public JsonTokenType getType() { + return type; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonTokenType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonTokenType.java new file mode 100644 index 0000000000..bbdfbc4ae8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/JsonTokenType.java @@ -0,0 +1,107 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +/** + * JsonTokenType implementation borrowed from MongoDB + * Inc. licensed under the Apache License, Version 2.0.
          + * + * @author Jeff Yemin + * @author Ross Lawley + * @since 2.2 + */ +enum JsonTokenType { + /** + * An invalid token. + */ + INVALID, + + /** + * A begin array token (a '['). + */ + BEGIN_ARRAY, + + /** + * A begin object token (a '{'). + */ + BEGIN_OBJECT, + + /** + * An end array token (a ']'). + */ + END_ARRAY, + + /** + * A left parenthesis (a '('). + */ + LEFT_PAREN, + + /** + * A right parenthesis (a ')'). + */ + RIGHT_PAREN, + + /** + * An end object token (a '}'). + */ + END_OBJECT, + + /** + * A colon token (a ':'). + */ + COLON, + + /** + * A comma token (a ','). + */ + COMMA, + + /** + * A Double token. + */ + DOUBLE, + + /** + * An Int32 token. + */ + INT32, + + /** + * And Int64 token. + */ + INT64, + + /** + * A regular expression token. + */ + REGULAR_EXPRESSION, + + /** + * A string token. + */ + STRING, + + /** + * An unquoted string token. + */ + UNQUOTED_STRING, + + /** + * An end of file token. + */ + END_OF_FILE +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingContext.java new file mode 100644 index 0000000000..b4fd13b3af --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingContext.java @@ -0,0 +1,178 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.springframework.data.mapping.model.SpELExpressionEvaluator; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ExpressionParser; +import org.springframework.expression.ParseException; +import org.springframework.expression.ParserContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; + +/** + * Reusable context for binding parameters to a placeholder or a SpEL expression within a JSON structure.
          + * To be used along with {@link ParameterBindingDocumentCodec#decode(String, ParameterBindingContext)}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +public class ParameterBindingContext { + + private final ValueProvider valueProvider; + private final ValueExpressionEvaluator expressionEvaluator; + + /** + * @param valueProvider + * @param expressionParser + * @param evaluationContext + * @deprecated since 4.4.0, use {@link #ParameterBindingContext(ValueProvider, ExpressionParser, Supplier)} instead. + */ + @Deprecated(since = "4.4.0") + public ParameterBindingContext(ValueProvider valueProvider, SpelExpressionParser expressionParser, + EvaluationContext evaluationContext) { + this(valueProvider, expressionParser, () -> evaluationContext); + } + + /** + * @param valueProvider + * @param expressionEvaluator + * @since 3.1 + * @deprecated since 4.4.0, use {@link #ParameterBindingContext(ValueProvider, ValueExpressionEvaluator)} instead. + */ + @Deprecated(since = "4.4.0") + public ParameterBindingContext(ValueProvider valueProvider, SpELExpressionEvaluator expressionEvaluator) { + this(valueProvider, (ValueExpressionEvaluator) expressionEvaluator); + } + + /** + * @param valueProvider + * @param expressionParser + * @param evaluationContext a {@link Supplier} for {@link Lazy} context retrieval. + * @since 2.2.3 + */ + public ParameterBindingContext(ValueProvider valueProvider, ExpressionParser expressionParser, + Supplier evaluationContext) { + this(valueProvider, new EvaluationContextExpressionEvaluator(valueProvider, unwrap(expressionParser)) { + @Override + public EvaluationContext getEvaluationContext(String expressionString) { + return evaluationContext.get(); + } + }); + } + + private static ExpressionParser unwrap(ExpressionParser expressionParser) { + return new ExpressionParser() { + @Override + public Expression parseExpression(String expressionString) throws ParseException { + return expressionParser.parseExpression(unwrap(expressionString)); + } + + @Override + public Expression parseExpression(String expressionString, ParserContext context) throws ParseException { + return expressionParser.parseExpression(unwrap(expressionString), context); + } + }; + } + + private static String unwrap(String expressionString) { + return expressionString.startsWith("#{") && expressionString.endsWith("}") + ? expressionString.substring(2, expressionString.length() - 1).trim() + : expressionString; + } + + /** + * @param valueProvider + * @param expressionEvaluator + * @since 4.4.0 + */ + public ParameterBindingContext(ValueProvider valueProvider, ValueExpressionEvaluator expressionEvaluator) { + this.valueProvider = valueProvider; + this.expressionEvaluator = expressionEvaluator; + } + + /** + * Create a new {@link ParameterBindingContext} that is capable of expression parsing and can provide a + * {@link EvaluationContext} based on {@link ExpressionDependencies}. + * + * @param valueProvider + * @param expressionParser + * @param contextFunction + * @return + * @since 3.1 + */ + public static ParameterBindingContext forExpressions(ValueProvider valueProvider, ExpressionParser expressionParser, + Function contextFunction) { + + return new ParameterBindingContext(valueProvider, + new EvaluationContextExpressionEvaluator(valueProvider, expressionParser) { + + @Override + public EvaluationContext getEvaluationContext(String expressionString) { + + Expression expression = getParsedExpression(expressionString); + ExpressionDependencies dependencies = ExpressionDependencies.discover(expression); + return contextFunction.apply(dependencies); + } + }); + } + + /** + * Create a new {@link ParameterBindingContext} that is capable of expression parsing. + * + * @param valueProvider + * @param expressionEvaluator + * @return + * @since 4.4.0 + */ + public static ParameterBindingContext forExpressions(ValueProvider valueProvider, + ValueExpressionEvaluator expressionEvaluator) { + + return new ParameterBindingContext(valueProvider, expressionEvaluator); + } + + @Nullable + public Object bindableValueForIndex(int index) { + return valueProvider.getBindableValue(index); + } + + @Nullable + public Object evaluateExpression(String expressionString) { + return expressionEvaluator.evaluate(expressionString); + } + + @Nullable + public Object evaluateExpression(String expressionString, Map variables) { + + if (expressionEvaluator instanceof EvaluationContextExpressionEvaluator expressionEvaluator) { + return expressionEvaluator.evaluateExpression(expressionString, variables); + } + return expressionEvaluator.evaluate(expressionString); + } + + public ValueProvider getValueProvider() { + return valueProvider; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingDocumentCodec.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingDocumentCodec.java new file mode 100644 index 0000000000..ffa226ab69 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingDocumentCodec.java @@ -0,0 +1,412 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.util.Arrays.*; +import static org.bson.assertions.Assertions.*; +import static org.bson.codecs.configuration.CodecRegistries.*; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; + +import org.bson.AbstractBsonReader.State; +import org.bson.BsonBinarySubType; +import org.bson.BsonDocument; +import org.bson.BsonDocumentWriter; +import org.bson.BsonInvalidOperationException; +import org.bson.BsonReader; +import org.bson.BsonType; +import org.bson.BsonValue; +import org.bson.BsonWriter; +import org.bson.Document; +import org.bson.Transformer; +import org.bson.codecs.*; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.json.JsonParseException; + +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mapping.model.ValueExpressionEvaluator; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * A {@link Codec} implementation that allows binding parameters to placeholders or SpEL expressions when decoding a + * JSON String.
          + * Modified version of MongoDB + * Inc. DocumentCodec licensed under the Apache License, Version 2.0.
          + * + * @author Jeff Yemin + * @author Ross Lawley + * @author Ralph Schaer + * @author Christoph Strobl + * @author Rocco Lagrotteria + * @since 2.2 + */ +public class ParameterBindingDocumentCodec implements CollectibleCodec { + + private static final String ID_FIELD_NAME = FieldName.ID.name(); + private static final CodecRegistry DEFAULT_REGISTRY = fromProviders( + asList(new ValueCodecProvider(), new BsonValueCodecProvider(), new DocumentCodecProvider())); + private static final BsonTypeClassMap DEFAULT_BSON_TYPE_CLASS_MAP = new BsonTypeClassMap(); + + private final BsonTypeCodecMap bsonTypeCodecMap; + private final CodecRegistry registry; + private final IdGenerator idGenerator; + private final Transformer valueTransformer; + + /** + * Construct a new instance with a default {@code CodecRegistry}. + */ + public ParameterBindingDocumentCodec() { + this(DEFAULT_REGISTRY); + } + + /** + * Construct a new instance with the given registry. + * + * @param registry the registry + */ + public ParameterBindingDocumentCodec(final CodecRegistry registry) { + this(registry, DEFAULT_BSON_TYPE_CLASS_MAP); + } + + /** + * Construct a new instance with the given registry and BSON type class map. + * + * @param registry the registry + * @param bsonTypeClassMap the BSON type class map + */ + public ParameterBindingDocumentCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap) { + this(registry, bsonTypeClassMap, null); + } + + /** + * Construct a new instance with the given registry and BSON type class map. The transformer is applied as a last step + * when decoding values, which allows users of this codec to control the decoding process. For example, a user of this + * class could substitute a value decoded as a Document with an instance of a special purpose class (e.g., one + * representing a DBRef in MongoDB). + * + * @param registry the registry + * @param bsonTypeClassMap the BSON type class map + * @param valueTransformer the value transformer to use as a final step when decoding the value of any field in the + * document + */ + public ParameterBindingDocumentCodec(final CodecRegistry registry, final BsonTypeClassMap bsonTypeClassMap, + final Transformer valueTransformer) { + this.registry = notNull("registry", registry); + this.bsonTypeCodecMap = new BsonTypeCodecMap(notNull("bsonTypeClassMap", bsonTypeClassMap), registry); + this.idGenerator = new ObjectIdGenerator(); + this.valueTransformer = valueTransformer != null ? valueTransformer : new Transformer() { + @Override + public Object transform(final Object value) { + return value; + } + }; + } + + @Override + public boolean documentHasId(final Document document) { + return document.containsKey(ID_FIELD_NAME); + } + + @Override + public BsonValue getDocumentId(final Document document) { + if (!documentHasId(document)) { + throw new IllegalStateException("The document does not contain an _id"); + } + + Object id = document.get(ID_FIELD_NAME); + if (id instanceof BsonValue bsonValue) { + return bsonValue; + } + + BsonDocument idHoldingDocument = new BsonDocument(); + BsonWriter writer = new BsonDocumentWriter(idHoldingDocument); + writer.writeStartDocument(); + writer.writeName(ID_FIELD_NAME); + writeValue(writer, EncoderContext.builder().build(), id); + writer.writeEndDocument(); + return idHoldingDocument.get(ID_FIELD_NAME); + } + + @Override + public Document generateIdIfAbsentFromDocument(final Document document) { + if (!documentHasId(document)) { + document.put(ID_FIELD_NAME, idGenerator.generate()); + } + return document; + } + + @Override + public void encode(final BsonWriter writer, final Document document, final EncoderContext encoderContext) { + writeMap(writer, document, encoderContext); + } + + // Spring Data Customization START + public Document decode(@Nullable String json, Object[] values) { + + return decode(json, new ParameterBindingContext((index) -> values[index], new SpelExpressionParser(), + EvaluationContextProvider.DEFAULT.getEvaluationContext(values))); + } + + public Document decode(@Nullable String json, ParameterBindingContext bindingContext) { + + if (!StringUtils.hasText(json)) { + return new Document(); + } + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, bindingContext); + return this.decode(reader, DecoderContext.builder().build()); + } + + /** + * Determine {@link ExpressionDependencies} from Expressions that are nested in the {@code json} content. Returns + * {@link Optional#empty()} if {@code json} is empty or of it does not contain any SpEL expressions. + * + * @param json + * @param expressionParser + * @return merged {@link ExpressionDependencies} object if expressions were found, otherwise + * {@link ExpressionDependencies#none()}. + * @since 3.1 + */ + public ExpressionDependencies captureExpressionDependencies(@Nullable String json, ValueProvider valueProvider, + ValueExpressionParser expressionParser) { + + if (!StringUtils.hasText(json)) { + return ExpressionDependencies.none(); + } + + DependencyCapturingExpressionEvaluator expressionEvaluator = new DependencyCapturingExpressionEvaluator( + expressionParser); + this.decode(new ParameterBindingJsonReader(json, new ParameterBindingContext(valueProvider, expressionEvaluator)), + DecoderContext.builder().build()); + + return expressionEvaluator.getCapturedDependencies(); + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + @Override + public Document decode(final BsonReader reader, final DecoderContext decoderContext) { + + if (reader instanceof ParameterBindingJsonReader bindingReader) { + + // check if the reader has actually found something to replace on top level and did so. + // binds just placeholder queries like: `@Query(?0)` + if (bindingReader.currentValue instanceof org.bson.Document document) { + return document; + } else if (bindingReader.currentValue instanceof String stringValue) { + try { + return decode(stringValue, new Object[0]); + } catch (JsonParseException jsonParseException) { + throw new IllegalArgumentException("Expression result is not a valid json document", jsonParseException); + } + } else if (bindingReader.currentValue instanceof Map) { + return new Document((Map) bindingReader.currentValue); + } + } + + Document document = new Document(); + + try { + + reader.readStartDocument(); + + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + String fieldName = reader.readName(); + Object value = readValue(reader, decoderContext); + document.put(fieldName, value); + } + + reader.readEndDocument(); + + } catch (JsonParseException | BsonInvalidOperationException e) { + try { + + Object value = readValue(reader, decoderContext); + if (value instanceof Map map) { + if (!map.isEmpty()) { + return new Document((Map) value); + } + } + } catch (Exception ex) { + e.addSuppressed(ex); + throw e; + } + } + + return document; + } + + // Spring Data Customization END + + @Override + public Class getEncoderClass() { + return Document.class; + } + + private void beforeFields(final BsonWriter bsonWriter, final EncoderContext encoderContext, + final Map document) { + if (encoderContext.isEncodingCollectibleDocument() && document.containsKey(ID_FIELD_NAME)) { + bsonWriter.writeName(ID_FIELD_NAME); + writeValue(bsonWriter, encoderContext, document.get(ID_FIELD_NAME)); + } + } + + private boolean skipField(final EncoderContext encoderContext, final String key) { + return encoderContext.isEncodingCollectibleDocument() && key.equals(ID_FIELD_NAME); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private void writeValue(final BsonWriter writer, final EncoderContext encoderContext, final Object value) { + if (value == null) { + writer.writeNull(); + } else if (value instanceof Iterable) { + writeIterable(writer, (Iterable) value, encoderContext.getChildContext()); + } else if (value instanceof Map) { + writeMap(writer, (Map) value, encoderContext.getChildContext()); + } else { + Codec codec = registry.get(value.getClass()); + encoderContext.encodeWithChildContext(codec, writer, value); + } + } + + private void writeMap(final BsonWriter writer, final Map map, final EncoderContext encoderContext) { + writer.writeStartDocument(); + + beforeFields(writer, encoderContext, map); + + for (final Map.Entry entry : map.entrySet()) { + if (skipField(encoderContext, entry.getKey())) { + continue; + } + writer.writeName(entry.getKey()); + writeValue(writer, encoderContext, entry.getValue()); + } + writer.writeEndDocument(); + } + + private void writeIterable(final BsonWriter writer, final Iterable list, + final EncoderContext encoderContext) { + writer.writeStartArray(); + for (final Object value : list) { + writeValue(writer, encoderContext, value); + } + writer.writeEndArray(); + } + + private Object readValue(final BsonReader reader, final DecoderContext decoderContext) { + + // Spring Data Customization START + if (reader instanceof ParameterBindingJsonReader bindingReader) { + + // check if the reader has actually found something to replace and did so. + // resets the reader state to move on after the actual value + // returns the replacement value + if (bindingReader.currentValue != null) { + + Object value = bindingReader.currentValue; + + if (ObjectUtils.nullSafeEquals(BsonType.DATE_TIME, bindingReader.getCurrentBsonType()) + && !(value instanceof Date)) { + + if (value instanceof Number numberValue) { + value = new Date(NumberUtils.convertNumberToTargetClass(numberValue, Long.class)); + } else if (value instanceof String stringValue) { + value = new Date(DateTimeFormatter.parse(stringValue)); + } + } + + bindingReader.setState(State.TYPE); + bindingReader.currentValue = null; + return value; + } + } + + // Spring Data Customization END + + BsonType bsonType = reader.getCurrentBsonType(); + if (bsonType == BsonType.NULL) { + reader.readNull(); + return null; + } else if (bsonType == BsonType.ARRAY) { + return readList(reader, decoderContext); + } else if (bsonType == BsonType.BINARY && BsonBinarySubType.isUuid(reader.peekBinarySubType()) + && reader.peekBinarySize() == 16) { + return registry.get(UUID.class).decode(reader, decoderContext); + } + + // Spring Data Customization START + // By default the registry uses DocumentCodec for parsing. + // We need to reroute that to our very own implementation or we'll end up only mapping half the placeholders. + Codec codecToUse = bsonTypeCodecMap.get(bsonType); + if (codecToUse instanceof org.bson.codecs.DocumentCodec) { + codecToUse = this; + } + + return valueTransformer.transform(codecToUse.decode(reader, decoderContext)); + // Spring Data Customization END + } + + private List readList(final BsonReader reader, final DecoderContext decoderContext) { + reader.readStartArray(); + List list = new ArrayList<>(); + while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) { + list.add(readValue(reader, decoderContext)); + } + reader.readEndArray(); + return list; + } + + /** + * @author Christoph Strobl + * @since 3.1 + */ + static class DependencyCapturingExpressionEvaluator implements ValueExpressionEvaluator { + + private static final Object PLACEHOLDER = new Object(); + + private final ValueExpressionParser expressionParser; + private final List dependencies = new ArrayList<>(); + + DependencyCapturingExpressionEvaluator(ValueExpressionParser expressionParser) { + this.expressionParser = expressionParser; + } + + @Nullable + @Override + public T evaluate(String expression) { + + dependencies.add(expressionParser.parse(expression).getExpressionDependencies()); + return (T) PLACEHOLDER; + } + + ExpressionDependencies getCapturedDependencies() { + return ExpressionDependencies.merged(dependencies); + } + + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReader.java new file mode 100644 index 0000000000..8dd42e2427 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReader.java @@ -0,0 +1,1722 @@ +/* + * Copyright 2008-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static java.lang.String.*; + +import java.text.DateFormat; +import java.text.ParsePosition; +import java.text.SimpleDateFormat; +import java.time.format.DateTimeParseException; +import java.util.Base64; +import java.util.Calendar; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; +import java.util.function.Supplier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.*; +import org.bson.json.JsonParseException; +import org.bson.types.Decimal128; +import org.bson.types.MaxKey; +import org.bson.types.MinKey; +import org.bson.types.ObjectId; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.NumberUtils; +import org.springframework.util.ObjectUtils; + +/** + * Reads a JSON and evaluates placehoders and SpEL expressions. Modified version of MongoDB Inc. + * JsonReader licensed under the Apache License, Version 2.0.
          + * + * @author Jeff Yemin + * @author Ross Lawley + * @author Trisha Gee + * @author Robert Guo + * @author Florian Buecklers + * @author Brendon Puntin + * @author Christoph Strobl + * @author Rocco Lagrotteria + * @since 2.2 + */ +public class ParameterBindingJsonReader extends AbstractBsonReader { + + private static final Pattern ENTIRE_QUERY_BINDING_PATTERN = Pattern.compile("^\\?(\\d+)$|^[\\?:][#$]\\{.*\\}$"); + private static final Pattern PARAMETER_BINDING_PATTERN = Pattern.compile("\\?(\\d+)"); + private static final Pattern EXPRESSION_BINDING_PATTERN = Pattern.compile("[\\?:][#$]\\{.*\\}"); + private static final Pattern SPEL_PARAMETER_BINDING_PATTERN = Pattern.compile("('\\?(\\d+)'|\\?(\\d+))"); + + private final ParameterBindingContext bindingContext; + + private final JsonScanner scanner; + private JsonToken pushedToken; + Object currentValue; + + /** + * Constructs a new instance with the given JSON string. + * + * @param json A string representation of a JSON. + */ + public ParameterBindingJsonReader(final String json) { + this(json, new Object[] {}); + } + + // Spring Data Customization START + + /** + * Constructs a new instance with the given JSON string. + * + * @param json A string representation of a JSON. + */ + public ParameterBindingJsonReader(String json, Object[] values) { + + this(json, (index) -> values[index], new SpelExpressionParser(), + EvaluationContextProvider.DEFAULT.getEvaluationContext(values)); + } + + public ParameterBindingJsonReader(String json, ValueProvider accessor, SpelExpressionParser spelExpressionParser, + EvaluationContext evaluationContext) { + + this(json, accessor, spelExpressionParser, () -> evaluationContext); + } + + /** + * @since 2.2.3 + */ + public ParameterBindingJsonReader(String json, ValueProvider accessor, SpelExpressionParser spelExpressionParser, + Supplier evaluationContext) { + + this(json, new ParameterBindingContext(accessor, spelExpressionParser, evaluationContext)); + + } + + public ParameterBindingJsonReader(String json, ParameterBindingContext bindingContext) { + + this.scanner = new JsonScanner(json); + setContext(new Context(null, BsonContextType.TOP_LEVEL)); + + this.bindingContext = bindingContext; + + Matcher matcher = ENTIRE_QUERY_BINDING_PATTERN.matcher(json); + if (matcher.find()) { + BindableValue bindingResult = bindableValueFor(new JsonToken(JsonTokenType.UNQUOTED_STRING, json)); + currentValue = bindingResult.getValue(); + } + } + + // Spring Data Customization END + + @Override + protected BsonBinary doReadBinaryData() { + return (BsonBinary) currentValue; + } + + @Override + protected byte doPeekBinarySubType() { + return doReadBinaryData().getType(); + } + + @Override + protected int doPeekBinarySize() { + return doReadBinaryData().getData().length; + } + + @Override + protected boolean doReadBoolean() { + return (Boolean) currentValue; + } + + // CHECKSTYLE:OFF + @Override + public BsonType readBsonType() { + + if (isClosed()) { + throw new IllegalStateException("This instance has been closed"); + } + if (getState() == State.INITIAL || getState() == State.DONE || getState() == State.SCOPE_DOCUMENT) { + // in JSON the top level value can be of any type so fall through + setState(State.TYPE); + } + if (getState() != State.TYPE) { + throwInvalidState("readBSONType", State.TYPE); + } + + if (getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken nameToken = popToken(); + switch (nameToken.getType()) { + case STRING: + case UNQUOTED_STRING: + + // Spring Data Customization START + + setCurrentName(bindableValueFor(nameToken).getValue().toString()); + + // Spring Data Customization END + break; + case END_OBJECT: + setState(State.END_OF_DOCUMENT); + return BsonType.END_OF_DOCUMENT; + default: + throw new JsonParseException("JSON reader was expecting a name but found '%s'.", nameToken.getValue()); + } + + JsonToken colonToken = popToken(); + if (colonToken.getType() != JsonTokenType.COLON) { + throw new JsonParseException("JSON reader was expecting ':' but found '%s'.", colonToken.getValue()); + } + } + + JsonToken token = popToken(); + if (getContext().getContextType() == BsonContextType.ARRAY && token.getType() == JsonTokenType.END_ARRAY) { + setState(State.END_OF_ARRAY); + return BsonType.END_OF_DOCUMENT; + } + + // Spring Data Customization START + + boolean noValueFound = false; + BindableValue bindableValue = null; + + switch (token.getType()) { + case BEGIN_ARRAY: + setCurrentBsonType(BsonType.ARRAY); + break; + case BEGIN_OBJECT: + visitExtendedJSON(); + break; + case DOUBLE: + setCurrentBsonType(BsonType.DOUBLE); + currentValue = token.getValue(); + break; + case END_OF_FILE: + setCurrentBsonType(BsonType.END_OF_DOCUMENT); + break; + case INT32: + setCurrentBsonType(BsonType.INT32); + currentValue = token.getValue(); + break; + case INT64: + setCurrentBsonType(BsonType.INT64); + currentValue = token.getValue(); + break; + case REGULAR_EXPRESSION: + + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + currentValue = bindableValueFor(token).getValue(); + break; + case STRING: + + setCurrentBsonType(BsonType.STRING); + currentValue = bindableValueFor(token).getValue().toString(); + break; + case UNQUOTED_STRING: + + String value = token.getValue(String.class); + + if ("false".equals(value) || "true".equals(value)) { + setCurrentBsonType(BsonType.BOOLEAN); + currentValue = Boolean.parseBoolean(value); + } else if ("Infinity".equals(value)) { + setCurrentBsonType(BsonType.DOUBLE); + currentValue = Double.POSITIVE_INFINITY; + } else if ("NaN".equals(value)) { + setCurrentBsonType(BsonType.DOUBLE); + currentValue = Double.NaN; + } else if ("null".equals(value)) { + setCurrentBsonType(BsonType.NULL); + } else if ("undefined".equals(value)) { + setCurrentBsonType(BsonType.UNDEFINED); + } else if ("MinKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MIN_KEY); + currentValue = new MinKey(); + } else if ("MaxKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MAX_KEY); + currentValue = new MaxKey(); + } else if ("BinData".equals(value)) { + setCurrentBsonType(BsonType.BINARY); + currentValue = visitBinDataConstructor(); + } else if ("Date".equals(value)) { + currentValue = visitDateTimeConstructorWithOutNew(); + setCurrentBsonType(BsonType.STRING); + } else if ("HexData".equals(value)) { + setCurrentBsonType(BsonType.BINARY); + currentValue = visitHexDataConstructor(); + } else if ("ISODate".equals(value)) { + setCurrentBsonType(BsonType.DATE_TIME); + currentValue = visitISODateTimeConstructor(); + } else if ("NumberInt".equals(value)) { + setCurrentBsonType(BsonType.INT32); + currentValue = visitNumberIntConstructor(); + } else if ("NumberLong".equals(value)) { + setCurrentBsonType(BsonType.INT64); + currentValue = visitNumberLongConstructor(); + } else if ("NumberDecimal".equals(value)) { + setCurrentBsonType(BsonType.DECIMAL128); + currentValue = visitNumberDecimalConstructor(); + } else if ("ObjectId".equals(value)) { + setCurrentBsonType(BsonType.OBJECT_ID); + currentValue = visitObjectIdConstructor(); + } else if ("Timestamp".equals(value)) { + setCurrentBsonType(BsonType.TIMESTAMP); + currentValue = visitTimestampConstructor(); + } else if ("RegExp".equals(value)) { + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + currentValue = visitRegularExpressionConstructor(); + } else if ("DBPointer".equals(value)) { + setCurrentBsonType(BsonType.DB_POINTER); + currentValue = visitDBPointerConstructor(); + } else if ("UUID".equals(value)) { + setCurrentBsonType(BsonType.BINARY); + currentValue = visitUUIDConstructor(); + } else if ("new".equals(value)) { + visitNew(); + } else { + + bindableValue = bindableValueFor(token); + if (bindableValue != null) { + + if (bindableValue.getIndex() != -1) { + setCurrentBsonType(bindableValue.getType()); + } else { + setCurrentBsonType(BsonType.STRING); + } + + currentValue = bindableValue.getValue(); + } else { + noValueFound = true; + } + } + break; + default: + noValueFound = true; + break; + } + + // Spring Data Customization END + + if (noValueFound) { + throw new JsonParseException("JSON reader was expecting a value but found '%s'.", token.getValue()); + } + + if (getContext().getContextType() == BsonContextType.ARRAY + || getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken commaToken = popToken(); + if (commaToken.getType() != JsonTokenType.COMMA) { + pushToken(commaToken); + } + } + + switch (getContext().getContextType()) { + case DOCUMENT: + case SCOPE_DOCUMENT: + default: + setState(State.NAME); + break; + case ARRAY: + case JAVASCRIPT_WITH_SCOPE: + case TOP_LEVEL: + setState(State.VALUE); + break; + } + return getCurrentBsonType(); + } + + // Spring Data Customization START + + @Override + public void setState(State newState) { + super.setState(newState); + } + + private BindableValue bindableValueFor(JsonToken token) { + + if (!JsonTokenType.STRING.equals(token.getType()) && !JsonTokenType.UNQUOTED_STRING.equals(token.getType()) + && !JsonTokenType.REGULAR_EXPRESSION.equals(token.getType())) { + return null; + } + + boolean isRegularExpression = token.getType().equals(JsonTokenType.REGULAR_EXPRESSION); + + BindableValue bindableValue = new BindableValue(); + String tokenValue = isRegularExpression ? token.getValue(BsonRegularExpression.class).getPattern() + : String.class.cast(token.getValue()); + Matcher matcher = PARAMETER_BINDING_PATTERN.matcher(tokenValue); + + if (token.getType().equals(JsonTokenType.UNQUOTED_STRING)) { + + Matcher regexMatcher = EXPRESSION_BINDING_PATTERN.matcher(tokenValue); + if (regexMatcher.find()) { + + String binding = regexMatcher.group(); + String expression = binding.substring(3, binding.length() - 1); + String expressionString = binding.substring(1); + + Matcher inSpelMatcher = SPEL_PARAMETER_BINDING_PATTERN.matcher(expression); // ?0 '?0' + Map innerSpelVariables = new HashMap<>(); + + while (inSpelMatcher.find()) { + + String group = inSpelMatcher.group(); + int index = computeParameterIndex(group); + Object value = getBindableValueForIndex(index); + String varName = "__QVar" + innerSpelVariables.size(); + expression = expression.replace(group, "#" + varName); + expressionString = expressionString.replace(group, "#" + varName); + if(group.startsWith("'")) { // retain the string semantic + innerSpelVariables.put(varName, nullSafeToString(value)); + } else { + innerSpelVariables.put(varName, value); + } + } + + Object value = evaluateExpression(expressionString, innerSpelVariables); + bindableValue.setValue(value); + bindableValue.setType(bsonTypeForValue(value)); + return bindableValue; + } + + if (matcher.find()) { + + int index = computeParameterIndex(matcher.group()); + Object bindableValueForIndex = getBindableValueForIndex(index); + bindableValue.setValue(bindableValueForIndex); + bindableValue.setType(bsonTypeForValue(bindableValueForIndex)); + return bindableValue; + } + + bindableValue.setValue(tokenValue); + bindableValue.setType(BsonType.STRING); + return bindableValue; + + } + + String computedValue = tokenValue; + + Matcher regexMatcher = EXPRESSION_BINDING_PATTERN.matcher(computedValue); + + while (regexMatcher.find()) { + + String binding = regexMatcher.group(); + String expression = binding.substring(3, binding.length() - 1); + String expressionString = binding.substring(1); + + Matcher inSpelMatcher = SPEL_PARAMETER_BINDING_PATTERN.matcher(expression); + Map innerSpelVariables = new HashMap<>(); + + while (inSpelMatcher.find()) { + + String group = inSpelMatcher.group(); + int index = computeParameterIndex(group); + Object value = getBindableValueForIndex(index); + String varName = "__QVar" + innerSpelVariables.size(); + expression = expression.replace(group, "#" + varName); + expressionString = expressionString.replace(group, "#" + varName); + if(group.startsWith("'")) { // retain the string semantic + innerSpelVariables.put(varName, nullSafeToString(value)); + } else { + innerSpelVariables.put(varName, value); + } + } + + computedValue = computedValue.replace(binding, + nullSafeToString(evaluateExpression(expressionString, innerSpelVariables))); + + bindableValue.setValue(computedValue); + bindableValue.setType(BsonType.STRING); + + return bindableValue; + } + + while (matcher.find()) { + + String group = matcher.group(); + int index = computeParameterIndex(group); + computedValue = computedValue.replace(group, nullSafeToString(getBindableValueForIndex(index))); + } + + if (isRegularExpression) { + + BsonRegularExpression originalExpression = token.getValue(BsonRegularExpression.class); + + bindableValue.setValue(new BsonRegularExpression(computedValue, originalExpression.getOptions())); + bindableValue.setType(BsonType.REGULAR_EXPRESSION); + } else { + + bindableValue.setValue(computedValue); + bindableValue.setType(BsonType.STRING); + } + return bindableValue; + } + + private static String nullSafeToString(@Nullable Object value) { + + if (value instanceof Date date) { + return DateTimeFormatter.format(date.getTime()); + } + + return ObjectUtils.nullSafeToString(value); + } + + private static int computeParameterIndex(String parameter) { + return NumberUtils.parseNumber(parameter.replace("?", "").replace("'", ""), Integer.class); + } + + private Object getBindableValueForIndex(int index) { + return bindingContext.bindableValueForIndex(index); + } + + private BsonType bsonTypeForValue(Object value) { + + if (value == null) { + return BsonType.NULL; + } + + Class type = value.getClass(); + + if (ClassUtils.isAssignable(String.class, type)) { + + if (((String) value).startsWith("{")) { + return BsonType.DOCUMENT; + } + return BsonType.STRING; + } + if (ClassUtils.isAssignable(Boolean.class, type)) { + return BsonType.BOOLEAN; + } + if (ClassUtils.isAssignable(Document.class, type)) { + return BsonType.DOCUMENT; + } + if (ClassUtils.isAssignable(Double.class, type)) { + return BsonType.DOUBLE; + } + if (ClassUtils.isAssignable(Long.class, type)) { + return BsonType.INT64; + } + if (ClassUtils.isAssignable(Integer.class, type)) { + return BsonType.INT32; + } + if (ClassUtils.isAssignable(Pattern.class, type)) { + return BsonType.REGULAR_EXPRESSION; + } + if (ClassUtils.isAssignable(Iterable.class, type)) { + return BsonType.ARRAY; + } + if (ClassUtils.isAssignable(Map.class, type)) { + return BsonType.DOCUMENT; + } + + return BsonType.UNDEFINED; + } + + @Nullable + private Object evaluateExpression(String expressionString) { + return bindingContext.evaluateExpression(expressionString, Collections.emptyMap()); + } + + @Nullable + private Object evaluateExpression(String expressionString, Map variables) { + return bindingContext.evaluateExpression(expressionString, variables); + } + + // Spring Data Customization END + // CHECKSTYLE:ON + + @Override + public Decimal128 doReadDecimal128() { + return (Decimal128) currentValue; + } + + @Override + protected long doReadDateTime() { + return (Long) currentValue; + } + + @Override + protected double doReadDouble() { + return (Double) currentValue; + } + + @Override + protected void doReadEndArray() { + setContext(getContext().getParentContext()); + + if (getContext().getContextType() == BsonContextType.ARRAY + || getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken commaToken = popToken(); + if (commaToken.getType() != JsonTokenType.COMMA) { + pushToken(commaToken); + } + } + } + + @Override + protected void doReadEndDocument() { + setContext(getContext().getParentContext()); + if (getContext() != null && getContext().getContextType() == BsonContextType.SCOPE_DOCUMENT) { + setContext(getContext().getParentContext()); // JavaScriptWithScope + verifyToken(JsonTokenType.END_OBJECT); // outermost closing bracket for JavaScriptWithScope + } + + if (getContext() == null) { + throw new JsonParseException("Unexpected end of document."); + } + + if (getContext().getContextType() == BsonContextType.ARRAY + || getContext().getContextType() == BsonContextType.DOCUMENT) { + JsonToken commaToken = popToken(); + if (commaToken.getType() != JsonTokenType.COMMA) { + pushToken(commaToken); + } + } + } + + @Override + protected int doReadInt32() { + return (Integer) currentValue; + } + + @Override + protected long doReadInt64() { + return (Long) currentValue; + } + + @Override + protected String doReadJavaScript() { + return (String) currentValue; + } + + @Override + protected String doReadJavaScriptWithScope() { + return (String) currentValue; + } + + @Override + protected void doReadMaxKey() {} + + @Override + protected void doReadMinKey() {} + + @Override + protected void doReadNull() {} + + @Override + protected ObjectId doReadObjectId() { + return (ObjectId) currentValue; + } + + @Override + protected BsonRegularExpression doReadRegularExpression() { + return (BsonRegularExpression) currentValue; + } + + @Override + protected BsonDbPointer doReadDBPointer() { + return (BsonDbPointer) currentValue; + } + + @Override + protected void doReadStartArray() { + setContext(new Context(getContext(), BsonContextType.ARRAY)); + } + + @Override + protected void doReadStartDocument() { + setContext(new Context(getContext(), BsonContextType.DOCUMENT)); + } + + @Override + protected String doReadString() { + return (String) currentValue; + } + + @Override + protected String doReadSymbol() { + return (String) currentValue; + } + + @Override + protected BsonTimestamp doReadTimestamp() { + return (BsonTimestamp) currentValue; + } + + @Override + protected void doReadUndefined() {} + + @Override + protected void doSkipName() {} + + @Override + protected void doSkipValue() { + switch (getCurrentBsonType()) { + case ARRAY: + readStartArray(); + while (readBsonType() != BsonType.END_OF_DOCUMENT) { + skipValue(); + } + readEndArray(); + break; + case BINARY: + readBinaryData(); + break; + case BOOLEAN: + readBoolean(); + break; + case DATE_TIME: + readDateTime(); + break; + case DOCUMENT: + readStartDocument(); + while (readBsonType() != BsonType.END_OF_DOCUMENT) { + skipName(); + skipValue(); + } + readEndDocument(); + break; + case DOUBLE: + readDouble(); + break; + case INT32: + readInt32(); + break; + case INT64: + readInt64(); + break; + case DECIMAL128: + readDecimal128(); + break; + case JAVASCRIPT: + readJavaScript(); + break; + case JAVASCRIPT_WITH_SCOPE: + readJavaScriptWithScope(); + readStartDocument(); + while (readBsonType() != BsonType.END_OF_DOCUMENT) { + skipName(); + skipValue(); + } + readEndDocument(); + break; + case MAX_KEY: + readMaxKey(); + break; + case MIN_KEY: + readMinKey(); + break; + case NULL: + readNull(); + break; + case OBJECT_ID: + readObjectId(); + break; + case REGULAR_EXPRESSION: + readRegularExpression(); + break; + case STRING: + readString(); + break; + case SYMBOL: + readSymbol(); + break; + case TIMESTAMP: + readTimestamp(); + break; + case UNDEFINED: + readUndefined(); + break; + default: + } + } + + private JsonToken popToken() { + if (pushedToken != null) { + JsonToken token = pushedToken; + pushedToken = null; + return token; + } else { + return scanner.nextToken(); + } + } + + private void pushToken(final JsonToken token) { + if (pushedToken == null) { + pushedToken = token; + } else { + throw new BsonInvalidOperationException("There is already a pending token."); + } + } + + private void verifyToken(final JsonTokenType expectedType) { + JsonToken token = popToken(); + if (expectedType != token.getType()) { + throw new JsonParseException("JSON reader expected token type '%s' but found '%s'.", expectedType, + token.getValue()); + } + } + + private void verifyToken(final JsonTokenType expectedType, final Object expectedValue) { + JsonToken token = popToken(); + if (expectedType != token.getType()) { + throw new JsonParseException("JSON reader expected token type '%s' but found '%s'.", expectedType, + token.getValue()); + } + if (!expectedValue.equals(token.getValue())) { + throw new JsonParseException("JSON reader expected '%s' but found '%s'.", expectedValue, token.getValue()); + } + } + + private void verifyString(final String expected) { + if (expected == null) { + throw new IllegalArgumentException("Can't be null"); + } + + JsonToken token = popToken(); + JsonTokenType type = token.getType(); + + if ((type != JsonTokenType.STRING && type != JsonTokenType.UNQUOTED_STRING) || !expected.equals(token.getValue())) { + throw new JsonParseException("JSON reader expected '%s' but found '%s'.", expected, token.getValue()); + } + } + + private void visitNew() { + JsonToken typeToken = popToken(); + if (typeToken.getType() != JsonTokenType.UNQUOTED_STRING) { + throw new JsonParseException("JSON reader expected a type name but found '%s'.", typeToken.getValue()); + } + + String value = typeToken.getValue(String.class); + + if ("MinKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MIN_KEY); + currentValue = new MinKey(); + } else if ("MaxKey".equals(value)) { + visitEmptyConstructor(); + setCurrentBsonType(BsonType.MAX_KEY); + currentValue = new MaxKey(); + } else if ("BinData".equals(value)) { + currentValue = visitBinDataConstructor(); + setCurrentBsonType(BsonType.BINARY); + } else if ("Date".equals(value)) { + currentValue = visitDateTimeConstructor(); + setCurrentBsonType(BsonType.DATE_TIME); + } else if ("HexData".equals(value)) { + currentValue = visitHexDataConstructor(); + setCurrentBsonType(BsonType.BINARY); + } else if ("ISODate".equals(value)) { + currentValue = visitISODateTimeConstructor(); + setCurrentBsonType(BsonType.DATE_TIME); + } else if ("NumberInt".equals(value)) { + currentValue = visitNumberIntConstructor(); + setCurrentBsonType(BsonType.INT32); + } else if ("NumberLong".equals(value)) { + currentValue = visitNumberLongConstructor(); + setCurrentBsonType(BsonType.INT64); + } else if ("NumberDecimal".equals(value)) { + currentValue = visitNumberDecimalConstructor(); + setCurrentBsonType(BsonType.DECIMAL128); + } else if ("ObjectId".equals(value)) { + currentValue = visitObjectIdConstructor(); + setCurrentBsonType(BsonType.OBJECT_ID); + } else if ("RegExp".equals(value)) { + currentValue = visitRegularExpressionConstructor(); + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + } else if ("DBPointer".equals(value)) { + currentValue = visitDBPointerConstructor(); + setCurrentBsonType(BsonType.DB_POINTER); + } else if ("UUID".equals(value)) { + currentValue = visitUUIDConstructor(); + setCurrentBsonType(BsonType.BINARY); + } else { + throw new JsonParseException("JSON reader expected a type name but found '%s'.", value); + } + } + + private void visitExtendedJSON() { + JsonToken nameToken = popToken(); + String value = nameToken.getValue(String.class); + JsonTokenType type = nameToken.getType(); + + if (type == JsonTokenType.STRING || type == JsonTokenType.UNQUOTED_STRING) { + + if ("$binary".equals(value) || "$type".equals(value)) { + currentValue = visitBinDataExtendedJson(value); + if (currentValue != null) { + setCurrentBsonType(BsonType.BINARY); + return; + } + } + if ("$uuid".equals(value)) { + currentValue = visitUuidExtendedJson(); + setCurrentBsonType(BsonType.BINARY); + return; + } + else if ("$regex".equals(value) || "$options".equals(value)) { + currentValue = visitRegularExpressionExtendedJson(value); + if (currentValue != null) { + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + return; + } + } else if ("$code".equals(value)) { + visitJavaScriptExtendedJson(); + return; + } else if ("$date".equals(value)) { + currentValue = visitDateTimeExtendedJson(); + setCurrentBsonType(BsonType.DATE_TIME); + return; + } else if ("$maxKey".equals(value)) { + currentValue = visitMaxKeyExtendedJson(); + setCurrentBsonType(BsonType.MAX_KEY); + return; + } else if ("$minKey".equals(value)) { + currentValue = visitMinKeyExtendedJson(); + setCurrentBsonType(BsonType.MIN_KEY); + return; + } else if ("$oid".equals(value)) { + currentValue = visitObjectIdExtendedJson(); + setCurrentBsonType(BsonType.OBJECT_ID); + return; + } else if ("$regularExpression".equals(value)) { + currentValue = visitNewRegularExpressionExtendedJson(); + setCurrentBsonType(BsonType.REGULAR_EXPRESSION); + return; + } else if ("$symbol".equals(value)) { + currentValue = visitSymbolExtendedJson(); + setCurrentBsonType(BsonType.SYMBOL); + return; + } else if ("$timestamp".equals(value)) { + currentValue = visitTimestampExtendedJson(); + setCurrentBsonType(BsonType.TIMESTAMP); + return; + } else if ("$undefined".equals(value)) { + currentValue = visitUndefinedExtendedJson(); + setCurrentBsonType(BsonType.UNDEFINED); + return; + } else if ("$numberLong".equals(value)) { + currentValue = visitNumberLongExtendedJson(); + setCurrentBsonType(BsonType.INT64); + return; + } else if ("$numberInt".equals(value)) { + currentValue = visitNumberIntExtendedJson(); + setCurrentBsonType(BsonType.INT32); + return; + } else if ("$numberDouble".equals(value)) { + currentValue = visitNumberDoubleExtendedJson(); + setCurrentBsonType(BsonType.DOUBLE); + return; + } else if ("$numberDecimal".equals(value)) { + currentValue = visitNumberDecimalExtendedJson(); + setCurrentBsonType(BsonType.DECIMAL128); + return; + } else if ("$dbPointer".equals(value)) { + currentValue = visitDbPointerExtendedJson(); + setCurrentBsonType(BsonType.DB_POINTER); + return; + } + } + + pushToken(nameToken); + setCurrentBsonType(BsonType.DOCUMENT); + } + + private void visitEmptyConstructor() { + JsonToken nextToken = popToken(); + if (nextToken.getType() == JsonTokenType.LEFT_PAREN) { + verifyToken(JsonTokenType.RIGHT_PAREN); + } else { + pushToken(nextToken); + } + } + + private BsonBinary visitBinDataConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken subTypeToken = popToken(); + if (subTypeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected a binary subtype but found '%s'.", subTypeToken.getValue()); + } + verifyToken(JsonTokenType.COMMA); + JsonToken bytesToken = popToken(); + if (bytesToken.getType() != JsonTokenType.UNQUOTED_STRING && bytesToken.getType() != JsonTokenType.STRING) { + throw new JsonParseException("JSON reader expected a string but found '%s'.", bytesToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + + byte[] bytes = Base64.getDecoder().decode(bytesToken.getValue(String.class)); + return new BsonBinary(subTypeToken.getValue(Integer.class).byteValue(), bytes); + } + + private BsonBinary visitUUIDConstructor() { + this.verifyToken(JsonTokenType.LEFT_PAREN); + String hexString = this.readStringFromExtendedJson().replace("-", ""); + + this.verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString)); + } + + private BsonRegularExpression visitRegularExpressionConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + String pattern = readStringFromExtendedJson(); + String options = ""; + JsonToken commaToken = popToken(); + if (commaToken.getType() == JsonTokenType.COMMA) { + options = readStringFromExtendedJson(); + } else { + pushToken(commaToken); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonRegularExpression(pattern, options); + } + + private ObjectId visitObjectIdConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + ObjectId objectId = new ObjectId(readStringFromExtendedJson()); + verifyToken(JsonTokenType.RIGHT_PAREN); + return objectId; + } + + private BsonTimestamp visitTimestampConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken timeToken = popToken(); + int time; + if (timeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", timeToken.getValue()); + } else { + time = timeToken.getValue(Integer.class); + } + verifyToken(JsonTokenType.COMMA); + JsonToken incrementToken = popToken(); + int increment; + if (incrementToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", timeToken.getValue()); + } else { + increment = incrementToken.getValue(Integer.class); + } + + verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonTimestamp(time, increment); + } + + private BsonDbPointer visitDBPointerConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + String namespace = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + ObjectId id = new ObjectId(readStringFromExtendedJson()); + verifyToken(JsonTokenType.RIGHT_PAREN); + return new BsonDbPointer(namespace, id); + } + + private int visitNumberIntConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken valueToken = popToken(); + int value; + if (valueToken.getType() == JsonTokenType.INT32) { + value = valueToken.getValue(Integer.class); + } else if (valueToken.getType() == JsonTokenType.STRING) { + value = Integer.parseInt(valueToken.getValue(String.class)); + } else { + throw new JsonParseException("JSON reader expected an integer or a string but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return value; + } + + private long visitNumberLongConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken valueToken = popToken(); + long value; + if (valueToken.getType() == JsonTokenType.INT32 || valueToken.getType() == JsonTokenType.INT64) { + value = valueToken.getValue(Long.class); + } else if (valueToken.getType() == JsonTokenType.STRING) { + value = Long.parseLong(valueToken.getValue(String.class)); + } else { + throw new JsonParseException("JSON reader expected an integer or a string but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return value; + } + + private Decimal128 visitNumberDecimalConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken valueToken = popToken(); + Decimal128 value; + if (valueToken.getType() == JsonTokenType.INT32 || valueToken.getType() == JsonTokenType.INT64 + || valueToken.getType() == JsonTokenType.DOUBLE) { + value = valueToken.getValue(Decimal128.class); + } else if (valueToken.getType() == JsonTokenType.STRING) { + value = Decimal128.parse(valueToken.getValue(String.class)); + } else { + throw new JsonParseException("JSON reader expected a number or a string but found '%s'.", valueToken.getValue()); + } + verifyToken(JsonTokenType.RIGHT_PAREN); + return value; + } + + private long visitISODateTimeConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + + JsonToken token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + return new Date().getTime(); + } else if (token.getType() != JsonTokenType.STRING) { + throw new JsonParseException("JSON reader expected a string but found '%s'.", token.getValue()); + } + + verifyToken(JsonTokenType.RIGHT_PAREN); + + String dateTimeString = token.getValue(String.class); + + try { + return DateTimeFormatter.parse(dateTimeString); + } catch (DateTimeParseException e) { + throw new JsonParseException("Failed to parse string as a date: " + dateTimeString, e); + } + } + + private BsonBinary visitHexDataConstructor() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken subTypeToken = popToken(); + if (subTypeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected a binary subtype but found '%s'.", subTypeToken.getValue()); + } + verifyToken(JsonTokenType.COMMA); + String hex = readStringFromExtendedJson(); + verifyToken(JsonTokenType.RIGHT_PAREN); + + if ((hex.length() & 1) != 0) { + hex = "0" + hex; + } + + for (final BsonBinarySubType subType : BsonBinarySubType.values()) { + if (subType.getValue() == subTypeToken.getValue(Integer.class)) { + return new BsonBinary(subType, decodeHex(hex)); + } + } + return new BsonBinary(decodeHex(hex)); + } + + private long visitDateTimeConstructor() { + DateFormat format = new SimpleDateFormat("EEE MMM dd yyyy HH:mm:ss z", Locale.ENGLISH); + + verifyToken(JsonTokenType.LEFT_PAREN); + + JsonToken token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + return new Date().getTime(); + } else if (token.getType() == JsonTokenType.STRING) { + verifyToken(JsonTokenType.RIGHT_PAREN); + String s = token.getValue(String.class); + ParsePosition pos = new ParsePosition(0); + Date dateTime = format.parse(s, pos); + if (dateTime != null && pos.getIndex() == s.length()) { + return dateTime.getTime(); + } else { + throw new JsonParseException( + "JSON reader expected a date in 'EEE MMM dd yyyy HH:mm:ss z' format but found '%s'.", s); + } + + } else if (token.getType() == JsonTokenType.INT32 || token.getType() == JsonTokenType.INT64) { + long[] values = new long[7]; + int pos = 0; + while (true) { + if (pos < values.length) { + values[pos++] = token.getValue(Long.class); + } + token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + break; + } + if (token.getType() != JsonTokenType.COMMA) { + throw new JsonParseException("JSON reader expected a ',' or a ')' but found '%s'.", token.getValue()); + } + token = popToken(); + if (token.getType() != JsonTokenType.INT32 && token.getType() != JsonTokenType.INT64) { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", token.getValue()); + } + } + if (pos == 1) { + return values[0]; + } else if (pos < 3 || pos > 7) { + throw new JsonParseException("JSON reader expected 1 or 3-7 integers but found %d.", pos); + } + + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + calendar.set(Calendar.YEAR, (int) values[0]); + calendar.set(Calendar.MONTH, (int) values[1]); + calendar.set(Calendar.DAY_OF_MONTH, (int) values[2]); + calendar.set(Calendar.HOUR_OF_DAY, (int) values[3]); + calendar.set(Calendar.MINUTE, (int) values[4]); + calendar.set(Calendar.SECOND, (int) values[5]); + calendar.set(Calendar.MILLISECOND, (int) values[6]); + return calendar.getTimeInMillis(); + } else { + throw new JsonParseException("JSON reader expected an integer or a string but found '%s'.", token.getValue()); + } + } + + private String visitDateTimeConstructorWithOutNew() { + verifyToken(JsonTokenType.LEFT_PAREN); + JsonToken token = popToken(); + if (token.getType() != JsonTokenType.RIGHT_PAREN) { + while (token.getType() != JsonTokenType.END_OF_FILE) { + token = popToken(); + if (token.getType() == JsonTokenType.RIGHT_PAREN) { + break; + } + } + if (token.getType() != JsonTokenType.RIGHT_PAREN) { + throw new JsonParseException("JSON reader expected a ')' but found '%s'.", token.getValue()); + } + } + + DateFormat df = new SimpleDateFormat("EEE MMM dd yyyy HH:mm:ss z", Locale.ENGLISH); + return df.format(new Date()); + } + + private BsonBinary visitBinDataExtendedJson(final String firstKey) { + + Mark mark = new Mark(); + + verifyToken(JsonTokenType.COLON); + + if (firstKey.equals("$binary")) { + JsonToken nextToken = popToken(); + if (nextToken.getType() == JsonTokenType.BEGIN_OBJECT) { + JsonToken nameToken = popToken(); + String firstNestedKey = nameToken.getValue(String.class); + byte[] data; + byte type; + if (firstNestedKey.equals("base64")) { + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + verifyToken(JsonTokenType.COMMA); + verifyString("subType"); + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + } else if (firstNestedKey.equals("subType")) { + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("base64"); + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + } else { + throw new JsonParseException("Unexpected key for $binary: " + firstNestedKey); + } + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonBinary(type, data); + } else { + mark.reset(); + return visitLegacyBinaryExtendedJson(firstKey); + } + } else { + mark.reset(); + return visitLegacyBinaryExtendedJson(firstKey); + } + } + + private BsonBinary visitLegacyBinaryExtendedJson(final String firstKey) { + + Mark mark = new Mark(); + + try { + verifyToken(JsonTokenType.COLON); + + byte[] data; + byte type; + + if (firstKey.equals("$binary")) { + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + verifyToken(JsonTokenType.COMMA); + verifyString("$type"); + verifyToken(JsonTokenType.COLON); + type = readBinarySubtypeFromExtendedJson(); + } else { + type = readBinarySubtypeFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$binary"); + verifyToken(JsonTokenType.COLON); + data = Base64.getDecoder().decode(readStringFromExtendedJson()); + } + verifyToken(JsonTokenType.END_OBJECT); + + return new BsonBinary(type, data); + } catch (JsonParseException e) { + mark.reset(); + return null; + } catch (NumberFormatException e) { + mark.reset(); + return null; + } + } + + private byte readBinarySubtypeFromExtendedJson() { + JsonToken subTypeToken = popToken(); + if (subTypeToken.getType() != JsonTokenType.STRING && subTypeToken.getType() != JsonTokenType.INT32) { + throw new JsonParseException("JSON reader expected a string or number but found '%s'.", subTypeToken.getValue()); + } + + if (subTypeToken.getType() == JsonTokenType.STRING) { + return (byte) Integer.parseInt(subTypeToken.getValue(String.class), 16); + } else { + return subTypeToken.getValue(Integer.class).byteValue(); + } + } + + private long visitDateTimeExtendedJson() { + long value; + verifyToken(JsonTokenType.COLON); + JsonToken valueToken = popToken(); + if (valueToken.getType() == JsonTokenType.BEGIN_OBJECT) { + JsonToken nameToken = popToken(); + String name = nameToken.getValue(String.class); + if (!name.equals("$numberLong")) { + throw new JsonParseException( + String.format("JSON reader expected $numberLong within $date, but found %s", name)); + } + value = visitNumberLongExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + } else { + if (valueToken.getType() == JsonTokenType.INT32 || valueToken.getType() == JsonTokenType.INT64) { + value = valueToken.getValue(Long.class); + } else if (valueToken.getType() == JsonTokenType.STRING + || valueToken.getType() == JsonTokenType.UNQUOTED_STRING) { + + // Spring Data Customization START + + Object dt = bindableValueFor(valueToken).getValue(); + if (dt instanceof Date date) { + value = date.getTime(); + } else if (dt instanceof Number numberValue) { + value = NumberUtils.convertNumberToTargetClass(numberValue, Long.class); + } else { + try { + value = DateTimeFormatter.parse(dt.toString()); + } catch (IllegalArgumentException e) { + throw new JsonParseException(String.format("Failed to parse string '%s' as a date", dt), e); + } + } + + // Spring Data Customization END + } else { + throw new JsonParseException("JSON reader expected an integer or string but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.END_OBJECT); + } + return value; + } + + private MaxKey visitMaxKeyExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.INT32, 1); + verifyToken(JsonTokenType.END_OBJECT); + return new MaxKey(); + } + + private MinKey visitMinKeyExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.INT32, 1); + verifyToken(JsonTokenType.END_OBJECT); + return new MinKey(); + } + + private ObjectId visitObjectIdExtendedJson() { + verifyToken(JsonTokenType.COLON); + ObjectId objectId = new ObjectId(readStringFromExtendedJson()); + verifyToken(JsonTokenType.END_OBJECT); + return objectId; + } + + private BsonRegularExpression visitNewRegularExpressionExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + + String pattern; + String options = ""; + + String firstKey = readStringFromExtendedJson(); + if (firstKey.equals("pattern")) { + verifyToken(JsonTokenType.COLON); + pattern = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("options"); + verifyToken(JsonTokenType.COLON); + options = readStringFromExtendedJson(); + } else if (firstKey.equals("options")) { + verifyToken(JsonTokenType.COLON); + options = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("pattern"); + verifyToken(JsonTokenType.COLON); + pattern = readStringFromExtendedJson(); + } else { + throw new JsonParseException("Expected 't' and 'i' fields in $timestamp document but found " + firstKey); + } + + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonRegularExpression(pattern, options); + } + + private BsonRegularExpression visitRegularExpressionExtendedJson(final String firstKey) { + Mark extendedJsonMark = new Mark(); + + try { + verifyToken(JsonTokenType.COLON); + + String pattern; + String options = ""; + if (firstKey.equals("$regex")) { + pattern = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$options"); + verifyToken(JsonTokenType.COLON); + options = readStringFromExtendedJson(); + } else { + options = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$regex"); + verifyToken(JsonTokenType.COLON); + pattern = readStringFromExtendedJson(); + } + verifyToken(JsonTokenType.END_OBJECT); + return new BsonRegularExpression(pattern, options); + } catch (JsonParseException e) { + extendedJsonMark.reset(); + return null; + } + } + + private String readStringFromExtendedJson() { + JsonToken patternToken = popToken(); + + // Spring Data Customization START + + if (patternToken.getType() == JsonTokenType.STRING || patternToken.getType() == JsonTokenType.UNQUOTED_STRING) { + Object value = bindableValueFor(patternToken).getValue(); + return value != null ? value.toString() : null; + } + + throw new JsonParseException("JSON reader expected a string but found '%s'.", patternToken.getValue()); + + // Spring Data Customization END + } + + private String visitSymbolExtendedJson() { + verifyToken(JsonTokenType.COLON); + String symbol = readStringFromExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + return symbol; + } + + private BsonTimestamp visitTimestampExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + + int time; + int increment; + + String firstKey = readStringFromExtendedJson(); + if (firstKey.equals("t")) { + verifyToken(JsonTokenType.COLON); + time = readIntFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("i"); + verifyToken(JsonTokenType.COLON); + increment = readIntFromExtendedJson(); + } else if (firstKey.equals("i")) { + verifyToken(JsonTokenType.COLON); + increment = readIntFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("t"); + verifyToken(JsonTokenType.COLON); + time = readIntFromExtendedJson(); + } else { + throw new JsonParseException("Expected 't' and 'i' fields in $timestamp document but found " + firstKey); + } + + verifyToken(JsonTokenType.END_OBJECT); + verifyToken(JsonTokenType.END_OBJECT); + return new BsonTimestamp(time, increment); + } + + private int readIntFromExtendedJson() { + JsonToken nextToken = popToken(); + int value; + if (nextToken.getType() == JsonTokenType.INT32) { + value = nextToken.getValue(Integer.class); + } else if (nextToken.getType() == JsonTokenType.INT64) { + value = nextToken.getValue(Long.class).intValue(); + } else { + throw new JsonParseException("JSON reader expected an integer but found '%s'.", nextToken.getValue()); + } + return value; + } + + private BsonBinary visitUuidExtendedJson() { + verifyToken(JsonTokenType.COLON); + String hexString = this.readStringFromExtendedJson().replace("-", ""); + verifyToken(JsonTokenType.END_OBJECT); + try { + return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString)); + } catch (IllegalArgumentException e) { + throw new JsonParseException(e); + } + } + + private void visitJavaScriptExtendedJson() { + verifyToken(JsonTokenType.COLON); + String code = readStringFromExtendedJson(); + JsonToken nextToken = popToken(); + switch (nextToken.getType()) { + case COMMA: + verifyString("$scope"); + verifyToken(JsonTokenType.COLON); + setState(State.VALUE); + currentValue = code; + setCurrentBsonType(BsonType.JAVASCRIPT_WITH_SCOPE); + setContext(new Context(getContext(), BsonContextType.SCOPE_DOCUMENT)); + break; + case END_OBJECT: + currentValue = code; + setCurrentBsonType(BsonType.JAVASCRIPT); + break; + default: + throw new JsonParseException("JSON reader expected ',' or '}' but found '%s'.", nextToken); + } + } + + private BsonUndefined visitUndefinedExtendedJson() { + verifyToken(JsonTokenType.COLON); + JsonToken valueToken = popToken(); + if (!valueToken.getValue(String.class).equals("true")) { + throw new JsonParseException("JSON reader requires $undefined to have the value of true but found '%s'.", + valueToken.getValue()); + } + verifyToken(JsonTokenType.END_OBJECT); + return new BsonUndefined(); + } + + private Long visitNumberLongExtendedJson() { + verifyToken(JsonTokenType.COLON); + Long value; + String longAsString = readStringFromExtendedJson(); + try { + value = Long.valueOf(longAsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", longAsString, Long.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private Integer visitNumberIntExtendedJson() { + verifyToken(JsonTokenType.COLON); + Integer value; + String intAsString = readStringFromExtendedJson(); + try { + value = Integer.valueOf(intAsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", intAsString, Integer.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private Double visitNumberDoubleExtendedJson() { + verifyToken(JsonTokenType.COLON); + Double value; + String doubleAsString = readStringFromExtendedJson(); + try { + value = Double.valueOf(doubleAsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", doubleAsString, Double.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private Decimal128 visitNumberDecimalExtendedJson() { + verifyToken(JsonTokenType.COLON); + Decimal128 value; + String decimal128AsString = readStringFromExtendedJson(); + try { + value = Decimal128.parse(decimal128AsString); + } catch (NumberFormatException e) { + throw new JsonParseException( + format("Exception converting value '%s' to type %s", decimal128AsString, Decimal128.class.getName()), e); + } + verifyToken(JsonTokenType.END_OBJECT); + return value; + } + + private BsonDbPointer visitDbPointerExtendedJson() { + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + + String ref; + ObjectId oid; + + String firstKey = readStringFromExtendedJson(); + if (firstKey.equals("$ref")) { + verifyToken(JsonTokenType.COLON); + ref = readStringFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$id"); + oid = readDbPointerIdFromExtendedJson(); + verifyToken(JsonTokenType.END_OBJECT); + } else if (firstKey.equals("$id")) { + oid = readDbPointerIdFromExtendedJson(); + verifyToken(JsonTokenType.COMMA); + verifyString("$ref"); + verifyToken(JsonTokenType.COLON); + ref = readStringFromExtendedJson(); + + } else { + throw new JsonParseException("Expected $ref and $id fields in $dbPointer document but found " + firstKey); + } + verifyToken(JsonTokenType.END_OBJECT); + return new BsonDbPointer(ref, oid); + } + + private ObjectId readDbPointerIdFromExtendedJson() { + ObjectId oid; + verifyToken(JsonTokenType.COLON); + verifyToken(JsonTokenType.BEGIN_OBJECT); + verifyToken(JsonTokenType.STRING, "$oid"); + oid = visitObjectIdExtendedJson(); + return oid; + } + + @Override + public BsonReaderMark getMark() { + return new Mark(); + } + + @Override + protected Context getContext() { + return (Context) super.getContext(); + } + + protected class Mark extends AbstractBsonReader.Mark { + private final JsonToken pushedToken; + private final Object currentValue; + private final int position; + + protected Mark() { + super(); + pushedToken = ParameterBindingJsonReader.this.pushedToken; + currentValue = ParameterBindingJsonReader.this.currentValue; + position = ParameterBindingJsonReader.this.scanner.getBufferPosition(); + } + + public void reset() { + super.reset(); + ParameterBindingJsonReader.this.pushedToken = pushedToken; + ParameterBindingJsonReader.this.currentValue = currentValue; + ParameterBindingJsonReader.this.scanner.setBufferPosition(position); + ParameterBindingJsonReader.this.setContext(new Context(getParentContext(), getContextType())); + } + } + + protected class Context extends AbstractBsonReader.Context { + protected Context(final AbstractBsonReader.Context parentContext, final BsonContextType contextType) { + super(parentContext, contextType); + } + + protected Context getParentContext() { + return (Context) super.getParentContext(); + } + + protected BsonContextType getContextType() { + return super.getContextType(); + } + } + + private static byte[] decodeHex(final String hex) { + if (hex.length() % 2 != 0) { + throw new IllegalArgumentException("A hex string must contain an even number of characters: " + hex); + } + + byte[] out = new byte[hex.length() / 2]; + + for (int i = 0; i < hex.length(); i += 2) { + int high = Character.digit(hex.charAt(i), 16); + int low = Character.digit(hex.charAt(i + 1), 16); + if (high == -1 || low == -1) { + throw new IllegalArgumentException("A hex string can only contain the characters 0-9, A-F, a-f: " + hex); + } + + out[i / 2] = (byte) (high * 16 + low); + } + + return out; + } + + // Spring Data Customization START + + static class BindableValue { + + private BsonType type; + private Object value; + private int index; + + BindableValue() {} + + BsonType getType() { + return type; + } + + void setType(BsonType type) { + this.type = type; + } + + Object getValue() { + return value; + } + + void setValue(Object value) { + this.value = value; + } + + int getIndex() { + return index; + } + + void setIndex(int index) { + this.index = index; + } + } + + // Spring Data Customization END +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ValueProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ValueProvider.java new file mode 100644 index 0000000000..8f1d23885d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/ValueProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import org.springframework.lang.Nullable; + +/** + * A value provider to retrieve bindable values by their parameter index. + * + * @author Christoph Strobl + * @since 2.2 + */ +@FunctionalInterface +public interface ValueProvider { + + /** + * @param index parameter index to use. + * @return can be {@literal null}. + * @throws RuntimeException if the requested element does not exist. + */ + @Nullable + Object getBindableValue(int index); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/package-info.java new file mode 100644 index 0000000000..8a86b3522b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/json/package-info.java @@ -0,0 +1,5 @@ +/** + * MongoDB driver-specific utility classes for Json conversion. + */ +@org.springframework.lang.NonNullApi +package org.springframework.data.mongodb.util.json; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java new file mode 100644 index 0000000000..9fa66b3b2b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java @@ -0,0 +1,66 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.spel; + +import java.util.function.Supplier; + +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.expression.ParserContext; +import org.springframework.expression.common.LiteralExpression; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Internal utility class for dealing with {@link Expression} and potential ones. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class ExpressionUtils { + + private static final SpelExpressionParser PARSER = new SpelExpressionParser(); + + /** + * Returns a SpEL {@link Expression} if the given {@link String} is actually an expression that does not evaluate to a + * {@link LiteralExpression} (indicating that no subsequent evaluation is necessary). + * + * @param potentialExpression can be {@literal null} + * @return can be {@literal null}. + */ + @Nullable + public static Expression detectExpression(@Nullable String potentialExpression) { + + if (!StringUtils.hasText(potentialExpression)) { + return null; + } + + Expression expression = PARSER.parseExpression(potentialExpression, ParserContext.TEMPLATE_EXPRESSION); + return expression instanceof LiteralExpression ? null : expression; + } + + @Nullable + public static Object evaluate(String value, Supplier evaluationContext) { + + Expression expression = detectExpression(value); + if (expression == null) { + return value; + } + + return expression.getValue(evaluationContext.get(), Object.class); + } +} diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/BulkOperationsExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/BulkOperationsExtensions.kt new file mode 100644 index 0000000000..4c8545ee8c --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/BulkOperationsExtensions.kt @@ -0,0 +1,52 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.Update +import org.springframework.data.mongodb.core.query.UpdateDefinition +import org.springframework.data.util.Pair.of + +/** + * Extension for [BulkOperations.updateMulti] that converts a list of [kotlin.Pair] to list of [org.springframework.data.util.Pair]. + * + * @author 2tsumo-hitori + * @since 4.5 + */ +fun BulkOperations.updateMulti(kotlinPairs: List>): BulkOperations = + updateMulti(kotlinPairs.toSpringPairs()) + +/** + * Extension for [BulkOperations.upsert] that converts a list of [kotlin.Pair] to list of [org.springframework.data.util.Pair]. + * + * @author 2tsumo-hitori + * @since 4.5 + */ +fun BulkOperations.upsert(kotlinPairs: List>): BulkOperations = + upsert(kotlinPairs.toSpringPairs()) + +/** + * Extension for [BulkOperations.updateOne] that converts a [kotlin.Pair] to [org.springframework.data.util.Pair]. + * + * @author 2tsumo-hitori + * @since 4.5 + */ +fun BulkOperations.updateOne(kotlinPairs: List>): BulkOperations = + updateOne(kotlinPairs.toSpringPairs()) + +private fun List>.toSpringPairs(): List> { + return map { (first, second) -> of(first, second) } +} diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt index 44e906ef87..c995624f38 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,6 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass - -/** - * Extension for [ExecutableAggregationOperation.aggregateAndReturn] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @author Mark Paluch - * @since 2.0 - */ -fun ExecutableAggregationOperation.aggregateAndReturn(entityClass: KClass): ExecutableAggregationOperation.ExecutableAggregation = - aggregateAndReturn(entityClass.java) - /** * Extension for [ExecutableAggregationOperation.aggregateAndReturn] leveraging reified type parameters. * @@ -35,4 +23,4 @@ fun ExecutableAggregationOperation.aggregateAndReturn(entityClass: KCl * @since 2.0 */ inline fun ExecutableAggregationOperation.aggregateAndReturn(): ExecutableAggregationOperation.ExecutableAggregation = - aggregateAndReturn(T::class.java) \ No newline at end of file + aggregateAndReturn(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt index 83585111fa..cc0d3c7486 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,9 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass - -/** - * Extension for [ExecutableFindOperation.query] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @author Mark Paluch - * @since 2.0 - */ -fun ExecutableFindOperation.query(entityClass: KClass): ExecutableFindOperation.ExecutableFind = - query(entityClass.java) +import org.springframework.data.mapping.toDotPath +import kotlin.reflect.KProperty +import kotlin.reflect.KProperty1 /** * Extension for [ExecutableFindOperation.query] leveraging reified type parameters. @@ -37,32 +29,40 @@ fun ExecutableFindOperation.query(entityClass: KClass): ExecutableF inline fun ExecutableFindOperation.query(): ExecutableFindOperation.ExecutableFind = query(T::class.java) - /** - * Extension for [ExecutableFindOperation.FindWithProjection. as] providing a [KClass] based variant. + * Extension for [ExecutableFindOperation.query] for a type-safe projection of distinct values. * - * @author Sebastien Deleuze * @author Mark Paluch - * @since 2.0 + * @since 3.0 */ -fun ExecutableFindOperation.FindWithProjection.asType(resultType: KClass): ExecutableFindOperation.FindWithQuery = - `as`(resultType.java) +inline fun ExecutableFindOperation.distinct(field : KProperty1): ExecutableFindOperation.TerminatingDistinct = + query(T::class.java).distinct(field.name) /** - * Extension for [ExecutableFindOperation.FindWithProjection. as] leveraging reified type parameters. + * Extension for [ExecutableFindOperation.FindWithProjection.as] leveraging reified type parameters. * * @author Sebastien Deleuze * @author Mark Paluch * @since 2.0 */ -inline fun ExecutableFindOperation.FindWithProjection.asType(): ExecutableFindOperation.FindWithQuery = +inline fun ExecutableFindOperation.FindWithProjection<*>.asType(): ExecutableFindOperation.FindWithQuery = `as`(T::class.java) /** - * Extension for [ExecutableFindOperation.DistinctWithProjection. as] providing a [KClass] based variant. + * Extension for [ExecutableFindOperation.DistinctWithProjection.as] leveraging reified type parameters. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 */ -fun ExecutableFindOperation.DistinctWithProjection.asType(resultType: KClass): ExecutableFindOperation.TerminatingDistinct = - `as`(resultType.java); +inline fun ExecutableFindOperation.DistinctWithProjection.asType(): ExecutableFindOperation.TerminatingDistinct = + `as`(T::class.java) + +/** + * Extension for [ExecutableFindOperation.FindDistinct.distinct] leveraging KProperty. + * + * @author Mark Paluch + * @since 3.0 + */ +fun ExecutableFindOperation.FindDistinct.distinct(key: KProperty<*>): ExecutableFindOperation.TerminatingDistinct = + distinct(key.toDotPath()) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt index 5ea288d64f..153cc03381 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,6 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass - -/** - * Extension for [ExecutableInsertOperation.insert] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @author Mark Paluch - * @since 2.0 - */ -fun ExecutableInsertOperation.insert(entityClass: KClass): ExecutableInsertOperation.ExecutableInsert = - insert(entityClass.java) - /** * Extension for [ExecutableInsertOperation.insert] leveraging reified type parameters. * @@ -35,4 +23,4 @@ fun ExecutableInsertOperation.insert(entityClass: KClass): Executab * @since 2.0 */ inline fun ExecutableInsertOperation.insert(): ExecutableInsertOperation.ExecutableInsert = - insert(T::class.java) \ No newline at end of file + insert(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensions.kt new file mode 100644 index 0000000000..fb5551c363 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensions.kt @@ -0,0 +1,34 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +/** + * Extension for [ExecutableMapReduceOperation.mapReduce] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ExecutableMapReduceOperation.mapReduce(): ExecutableMapReduceOperation.MapReduceWithMapFunction = + mapReduce(T::class.java) + +/** + * Extension for [ExecutableMapReduceOperation.MapReduceWithProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ExecutableMapReduceOperation.MapReduceWithProjection<*>.asType(): ExecutableMapReduceOperation.MapReduceWithQuery = + `as`(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt index 08235dea90..d91edb5150 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,6 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass - -/** - * Extension for [ExecutableRemoveOperation.remove] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @author Mark Paluch - * @since 2.0 - */ -fun ExecutableRemoveOperation.remove(entityClass: KClass): ExecutableRemoveOperation.ExecutableRemove = - remove(entityClass.java) - /** * Extension for [ExecutableRemoveOperation.remove] leveraging reified type parameters. * @@ -35,4 +23,4 @@ fun ExecutableRemoveOperation.remove(entityClass: KClass): Executab * @since 2.0 */ inline fun ExecutableRemoveOperation.remove(): ExecutableRemoveOperation.ExecutableRemove = - remove(T::class.java) \ No newline at end of file + remove(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt index 771b943868..26c9e6b796 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,6 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass - -/** - * Extension for [ExecutableUpdateOperation.update] providing a [KClass] based variant. - * - * @author Christoph Strobl - * @since 2.0 - */ -fun ExecutableUpdateOperation.update(entityClass: KClass): ExecutableUpdateOperation.ExecutableUpdate = - update(entityClass.java) - /** * Extension for [ExecutableUpdateOperation.update] leveraging reified type parameters. * @@ -33,4 +22,4 @@ fun ExecutableUpdateOperation.update(entityClass: KClass): Executab * @since 2.0 */ inline fun ExecutableUpdateOperation.update(): ExecutableUpdateOperation.ExecutableUpdate = - update(T::class.java) \ No newline at end of file + update(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt index 03b1ca7341..25af7bd18b 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,25 +24,12 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode import org.springframework.data.mongodb.core.aggregation.Aggregation import org.springframework.data.mongodb.core.aggregation.AggregationResults import org.springframework.data.mongodb.core.index.IndexOperations -import org.springframework.data.mongodb.core.mapreduce.GroupBy -import org.springframework.data.mongodb.core.mapreduce.GroupByResults import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions import org.springframework.data.mongodb.core.mapreduce.MapReduceResults -import org.springframework.data.mongodb.core.query.Criteria import org.springframework.data.mongodb.core.query.NearQuery import org.springframework.data.mongodb.core.query.Query -import org.springframework.data.mongodb.core.query.Update -import org.springframework.data.util.CloseableIterator -import kotlin.reflect.KClass - -/** - * Extension for [MongoOperations.getCollectionName] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.getCollectionName(entityClass: KClass): String = - getCollectionName(entityClass.java) +import org.springframework.data.mongodb.core.query.UpdateDefinition +import java.util.stream.Stream /** * Extension for [MongoOperations.getCollectionName] leveraging reified type parameters. @@ -68,8 +55,8 @@ inline fun MongoOperations.execute(action: CollectionCallback< * @author Sebastien Deleuze * @since 2.0 */ -inline fun MongoOperations.stream(query: Query): CloseableIterator = - stream(query, T::class.java) +inline fun MongoOperations.stream(query: Query): Stream = + stream(query, T::class.java) /** * Extension for [MongoOperations.stream] leveraging reified type parameters. @@ -77,19 +64,12 @@ inline fun MongoOperations.stream(query: Query): CloseableIter * @author Sebastien Deleuze * @since 2.0 */ -inline fun MongoOperations.stream(query: Query, collectionName: String? = null): CloseableIterator = - if (collectionName != null) stream(query, T::class.java, collectionName) - else stream(query, T::class.java) - -/** - * Extension for [MongoOperations.createCollection] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.createCollection(entityClass: KClass, collectionOptions: CollectionOptions? = null): MongoCollection = - if (collectionOptions != null) createCollection(entityClass.java, collectionOptions) - else createCollection(entityClass.java) +inline fun MongoOperations.stream( + query: Query, + collectionName: String? = null +): Stream = + if (collectionName != null) stream(query, T::class.java, collectionName) + else stream(query, T::class.java) /** * Extension for [MongoOperations.createCollection] leveraging reified type parameters. @@ -102,15 +82,6 @@ inline fun MongoOperations.createCollection( if (collectionOptions != null) createCollection(T::class.java, collectionOptions) else createCollection(T::class.java) -/** - * Extension for [MongoOperations.collectionExists] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.collectionExists(entityClass: KClass): Boolean = - collectionExists(entityClass.java) - /** * Extension for [MongoOperations.collectionExists] leveraging reified type parameters. * @@ -120,16 +91,6 @@ fun MongoOperations.collectionExists(entityClass: KClass): Boolean inline fun MongoOperations.collectionExists(): Boolean = collectionExists(T::class.java) -/** - * Extension for [MongoOperations.dropCollection] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.dropCollection(entityClass: KClass) { - dropCollection(entityClass.java) -} - /** * Extension for [MongoOperations.dropCollection] leveraging reified type parameters. * @@ -140,15 +101,6 @@ inline fun MongoOperations.dropCollection() { dropCollection(T::class.java) } -/** - * Extension for [MongoOperations.indexOps] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.indexOps(entityClass: KClass): IndexOperations = - indexOps(entityClass.java) - /** * Extension for [MongoOperations.indexOps] leveraging reified type parameters. * @@ -158,16 +110,6 @@ fun MongoOperations.indexOps(entityClass: KClass): IndexOperations inline fun MongoOperations.indexOps(): IndexOperations = indexOps(T::class.java) -/** - * Extension for [MongoOperations.bulkOps] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.bulkOps(bulkMode: BulkMode, entityClass: KClass, collectionName: String? = null): BulkOperations = - if (collectionName != null) bulkOps(bulkMode, entityClass.java, collectionName) - else bulkOps(bulkMode, entityClass.java) - /** * Extension for [MongoOperations.bulkOps] leveraging reified type parameters. * @@ -188,32 +130,14 @@ inline fun MongoOperations.bulkOps(bulkMode: BulkMode, collect inline fun MongoOperations.findAll(collectionName: String? = null): List = if (collectionName != null) findAll(T::class.java, collectionName) else findAll(T::class.java) -/** - * Extension for [MongoOperations.group] leveraging reified type parameters. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -inline fun MongoOperations.group(inputCollectionName: String, groupBy: GroupBy): GroupByResults = - group(inputCollectionName, groupBy, T::class.java) - -/** - * Extension for [MongoOperations.group] leveraging reified type parameters. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -inline fun MongoOperations.group(criteria: Criteria, inputCollectionName: String, groupBy: GroupBy): GroupByResults = - group(criteria, inputCollectionName, groupBy, T::class.java) - /** * Extension for [MongoOperations.aggregate] leveraging reified type parameters. * - * @author Sebastien Deleuze - * @since 2.0 + * @author Mark Paluch + * @since 3.2 */ -inline fun MongoOperations.aggregate(aggregation: Aggregation, inputType: KClass<*>): AggregationResults = - aggregate(aggregation, inputType.java, O::class.java) +inline fun MongoOperations.aggregate(aggregation: Aggregation): AggregationResults = + aggregate(aggregation, I::class.java, O::class.java) /** * Extension for [MongoOperations.aggregate] leveraging reified type parameters. @@ -221,17 +145,20 @@ inline fun MongoOperations.aggregate(aggregation: Aggregation, * @author Sebastien Deleuze * @since 2.0 */ -inline fun MongoOperations.aggregate(aggregation: Aggregation, collectionName: String): AggregationResults = - aggregate(aggregation, collectionName, O::class.java) +inline fun MongoOperations.aggregate( + aggregation: Aggregation, + collectionName: String +): AggregationResults = + aggregate(aggregation, collectionName, O::class.java) /** * Extension for [MongoOperations.aggregateStream] leveraging reified type parameters. * - * @author Sebastien Deleuze - * @since 2.0 + * @author Mark Paluch + * @since 3.2 */ -inline fun MongoOperations.aggregateStream(aggregation: Aggregation, inputType: KClass<*>): CloseableIterator = - aggregateStream(aggregation, inputType.java, O::class.java) +inline fun MongoOperations.aggregateStream(aggregation: Aggregation): Stream = + aggregateStream(aggregation, I::class.java, O::class.java) /** * Extension for [MongoOperations.aggregateStream] leveraging reified type parameters. @@ -239,8 +166,11 @@ inline fun MongoOperations.aggregateStream(aggregation: Aggreg * @author Sebastien Deleuze * @since 2.0 */ -inline fun MongoOperations.aggregateStream(aggregation: Aggregation, collectionName: String): CloseableIterator = - aggregateStream(aggregation, collectionName, O::class.java) +inline fun MongoOperations.aggregateStream( + aggregation: Aggregation, + collectionName: String +): Stream = + aggregateStream(aggregation, collectionName, O::class.java) /** * Extension for [MongoOperations.mapReduce] leveraging reified type parameters. @@ -268,6 +198,8 @@ inline fun MongoOperations.mapReduce(query: Query, collectionN * @author Sebastien Deleuze * @since 2.0 */ +@Suppress("DEPRECATION") +@Deprecated("Since 2.2, the `geoNear` command has been removed in MongoDB Server 4.2.0. Use Aggregations with `Aggregation.geoNear(NearQuery, String)` instead.", replaceWith = ReplaceWith("aggregate()")) inline fun MongoOperations.geoNear(near: NearQuery, collectionName: String? = null): GeoResults = if (collectionName != null) geoNear(near, T::class.java, collectionName) else geoNear(near, T::class.java) @@ -281,16 +213,6 @@ inline fun MongoOperations.geoNear(near: NearQuery, collection inline fun MongoOperations.findOne(query: Query, collectionName: String? = null): T? = if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java) -/** - * Extension for [MongoOperations.exists] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.exists(query: Query, entityClass: KClass, collectionName: String? = null): Boolean = - if (collectionName != null) exists(query, entityClass.java, collectionName) - else exists(query, entityClass.java) - /** * Extension for [MongoOperations.exists] leveraging reified type parameters. * @@ -322,33 +244,6 @@ inline fun MongoOperations.findById(id: Any, collectionName: S if (collectionName != null) findById(id, T::class.java, collectionName) else findById(id, T::class.java) -/** - * Extension for [MongoOperations.findDistinct] leveraging reified type parameters. - * - * @author Christoph Strobl - * @since 2.1 - */ -inline fun MongoOperations.findDistinct(field: String, entityClass: KClass<*>): List = - findDistinct(field, entityClass.java, T::class.java); - -/** - * Extension for [MongoOperations.findDistinct] leveraging reified type parameters. - * - * @author Christoph Strobl - * @since 2.1 - */ -inline fun MongoOperations.findDistinct(query: Query, field: String, entityClass: KClass<*>): List = - findDistinct(query, field, entityClass.java, T::class.java) - -/** - * Extension for [MongoOperations.findDistinct] leveraging reified type parameters. - * - * @author Christoph Strobl - * @since 2.1 - */ -inline fun MongoOperations.findDistinct(query: Query, field: String, collectionName: String, entityClass: KClass<*>): List = - findDistinct(query, field, collectionName, entityClass.java, T::class.java) - /** * Extension for [MongoOperations.findDistinct] leveraging reified type parameters. * @@ -366,7 +261,7 @@ inline fun MongoOperations.findDistinct(query * @author Sebastien Deleuze * @since 2.0 */ -inline fun MongoOperations.findAndModify(query: Query, update: Update, options: FindAndModifyOptions, collectionName: String? = null): T? = +inline fun MongoOperations.findAndModify(query: Query, update: UpdateDefinition, options: FindAndModifyOptions, collectionName: String? = null): T? = if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName) else findAndModify(query, update, options, T::class.java) @@ -380,16 +275,6 @@ inline fun MongoOperations.findAndRemove(query: Query, collect if (collectionName != null) findAndRemove(query, T::class.java, collectionName) else findAndRemove(query, T::class.java) -/** - * Extension for [MongoOperations.count] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.count(query: Query = Query(), entityClass: KClass, collectionName: String? = null): Long = - if (collectionName != null) count(query, entityClass.java, collectionName) - else count(query, entityClass.java) - /** * Extension for [MongoOperations.count] leveraging reified type parameters. * @@ -401,24 +286,13 @@ inline fun MongoOperations.count(query: Query = Query(), colle if (collectionName != null) count(query, T::class.java, collectionName) else count(query, T::class.java) /** - * Extension for [MongoOperations.insert] providing a [KClass] based variant. + * Extension for [MongoOperations.insert] leveraging reified type parameters. * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.insert(batchToSave: Collection, entityClass: KClass) { - insert(batchToSave, entityClass.java) -} - -/** - * Extension for [MongoOperations.upsert] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 + * @author Mark Paluch + * @since 2.2 */ -fun MongoOperations.upsert(query: Query, update: Update, entityClass: KClass, collectionName: String? = null): UpdateResult = - if (collectionName != null) upsert(query, update, entityClass.java, collectionName) - else upsert(query, update, entityClass.java) +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun MongoOperations.insert(batchToSave: Collection): Collection = insert(batchToSave, T::class.java) /** * Extension for [MongoOperations.upsert] leveraging reified type parameters. @@ -427,20 +301,10 @@ fun MongoOperations.upsert(query: Query, update: Update, entityClass: * @since 2.0 */ @Suppress("EXTENSION_SHADOWED_BY_MEMBER") -inline fun MongoOperations.upsert(query: Query, update: Update, collectionName: String? = null): UpdateResult = +inline fun MongoOperations.upsert(query: Query, update: UpdateDefinition, collectionName: String? = null): UpdateResult = if (collectionName != null) upsert(query, update, T::class.java, collectionName) else upsert(query, update, T::class.java) -/** - * Extension for [MongoOperations.updateFirst] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.updateFirst(query: Query, update: Update, entityClass: KClass, collectionName: String? = null): UpdateResult = - if (collectionName != null) updateFirst(query, update, entityClass.java, collectionName) - else updateFirst(query, update, entityClass.java) - /** * Extension for [MongoOperations.updateFirst] leveraging reified type parameters. * @@ -448,20 +312,10 @@ fun MongoOperations.updateFirst(query: Query, update: Update, entityCl * @since 2.0 */ @Suppress("EXTENSION_SHADOWED_BY_MEMBER") -inline fun MongoOperations.updateFirst(query: Query, update: Update, collectionName: String? = null): UpdateResult = +inline fun MongoOperations.updateFirst(query: Query, update: UpdateDefinition, collectionName: String? = null): UpdateResult = if (collectionName != null) updateFirst(query, update, T::class.java, collectionName) else updateFirst(query, update, T::class.java) -/** - * Extension for [MongoOperations.updateMulti] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.updateMulti(query: Query, update: Update, entityClass: KClass, collectionName: String? = null): UpdateResult = - if (collectionName != null) updateMulti(query, update, entityClass.java, collectionName) - else updateMulti(query, update, entityClass.java) - /** * Extension for [MongoOperations.updateMulti] leveraging reified type parameters. * @@ -469,20 +323,10 @@ fun MongoOperations.updateMulti(query: Query, update: Update, entityCl * @since 2.0 */ @Suppress("EXTENSION_SHADOWED_BY_MEMBER") -inline fun MongoOperations.updateMulti(query: Query, update: Update, collectionName: String? = null): UpdateResult = +inline fun MongoOperations.updateMulti(query: Query, update: UpdateDefinition, collectionName: String? = null): UpdateResult = if (collectionName != null) updateMulti(query, update, T::class.java, collectionName) else updateMulti(query, update, T::class.java) -/** - * Extension for [MongoOperations.remove] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun MongoOperations.remove(query: Query, entityClass: KClass, collectionName: String? = null): DeleteResult = - if (collectionName != null) remove(query, entityClass.java, collectionName) - else remove(query, entityClass.java) - /** * Extension for [MongoOperations.remove] leveraging reified type parameters. * diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt index cedf4775fb..e59c5786bc 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,23 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow /** - * Extension for [ExecutableAggregationOperation.aggregateAndReturn] providing a [KClass] based variant. + * Extension for [ExecutableAggregationOperation.aggregateAndReturn] leveraging reified type parameters. * * @author Mark Paluch * @since 2.0 */ -fun ReactiveAggregationOperation.aggregateAndReturn(entityClass: KClass): ReactiveAggregationOperation.ReactiveAggregation = - aggregateAndReturn(entityClass.java) +inline fun ReactiveAggregationOperation.aggregateAndReturn(): ReactiveAggregationOperation.ReactiveAggregation = + aggregateAndReturn(T::class.java) /** - * Extension for [ExecutableAggregationOperation.aggregateAndReturn] leveraging reified type parameters. + * Coroutines [Flow] variant of [ReactiveAggregationOperation.TerminatingAggregationOperation.all]. * - * @author Mark Paluch - * @since 2.0 + * @author Sebastien Deleuze + * @since 2.2 */ -inline fun ReactiveAggregationOperation.aggregateAndReturn(): ReactiveAggregationOperation.ReactiveAggregation = - aggregateAndReturn(T::class.java) \ No newline at end of file +fun ReactiveAggregationOperation.TerminatingAggregationOperation.flow(): Flow = + all().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensions.kt new file mode 100644 index 0000000000..d589c32285 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensions.kt @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow + +/** + * Extension for [RactiveChangeStreamOperation.changeStream] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.2 + */ +inline fun ReactiveChangeStreamOperation.changeStream(): ReactiveChangeStreamOperation.ReactiveChangeStream = + changeStream(T::class.java) + +/** + * Extension for [ReactiveChangeStreamOperation.ChangeStreamWithFilterAndProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.2 + */ +inline fun ReactiveChangeStreamOperation.ChangeStreamWithFilterAndProjection<*>.asType(): ReactiveChangeStreamOperation.ChangeStreamWithFilterAndProjection = + `as`(T::class.java) + +/** + * Coroutines [Flow] variant of [ReactiveChangeStreamOperation.TerminatingChangeStream.listen]. + * + * @author Christoph Strobl + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveChangeStreamOperation.TerminatingChangeStream.flow(): Flow> = + listen().asFlow() + diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt index 37531f4ae3..da1cb7d333 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,14 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass - -/** - * Extension for [ReactiveFindOperation.query] providing a [KClass] based variant. - * - * @author Mark Paluch - * @since 2.0 - */ -fun ReactiveFindOperation.query(entityClass: KClass): ReactiveFindOperation.ReactiveFind = - query(entityClass.java) +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow +import kotlinx.coroutines.reactive.awaitFirstOrNull +import kotlinx.coroutines.reactive.awaitSingle +import org.springframework.data.geo.GeoResult +import org.springframework.data.mapping.toDotPath +import kotlin.reflect.KProperty +import kotlin.reflect.KProperty1 /** * Extension for [ReactiveFindOperation.query] leveraging reified type parameters. @@ -36,13 +34,13 @@ inline fun ReactiveFindOperation.query(): ReactiveFindOperatio query(T::class.java) /** - * Extension for [ReactiveFindOperation.FindWithProjection.as] providing a [KClass] based variant. + * Extension for [ReactiveFindOperation.query] for a type-safe projection of distinct values. * * @author Mark Paluch - * @since 2.0 + * @since 3.0 */ -fun ReactiveFindOperation.FindWithProjection.asType(resultType: KClass): ReactiveFindOperation.FindWithQuery = - `as`(resultType.java) +inline fun ReactiveFindOperation.distinct(field : KProperty1): ReactiveFindOperation.TerminatingDistinct = + query(T::class.java).distinct(field.name) /** * Extension for [ReactiveFindOperation.FindWithProjection.as] leveraging reified type parameters. @@ -50,14 +48,110 @@ fun ReactiveFindOperation.FindWithProjection.asType(resultType: KCl * @author Mark Paluch * @since 2.0 */ -inline fun ReactiveFindOperation.FindWithProjection.asType(): ReactiveFindOperation.FindWithQuery = +inline fun ReactiveFindOperation.FindWithProjection<*>.asType(): ReactiveFindOperation.FindWithQuery = `as`(T::class.java) /** - * Extension for [ExecutableFindOperation.DistinctWithProjection. as] providing a [KClass] based variant. + * Extension for [ReactiveFindOperation.DistinctWithProjection.as] leveraging reified type parameters. * * @author Christoph Strobl * @since 2.1 */ -fun ReactiveFindOperation.DistinctWithProjection.asType(resultType: KClass): ReactiveFindOperation.TerminatingDistinct = - `as`(resultType.java); +inline fun ReactiveFindOperation.DistinctWithProjection.asType(): ReactiveFindOperation.TerminatingDistinct = + `as`(T::class.java) + +/** + * Extension for [ReactiveFindOperation.FindDistinct.distinct] leveraging KProperty. + * + * @author Mark Paluch + * @since 3.0 + */ +fun ReactiveFindOperation.FindDistinct.distinct(key: KProperty<*>): ReactiveFindOperation.TerminatingDistinct = + distinct(key.toDotPath()) + +/** + * Non-nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.one]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitOne(): T = + one().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.one]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitOneOrNull(): T? = + one().awaitFirstOrNull() + +/** + * Non-nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.first]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitFirst(): T = + first().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveFindOperation.TerminatingFind.first]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend inline fun ReactiveFindOperation.TerminatingFind.awaitFirstOrNull(): T? = + first().awaitFirstOrNull() + +/** + * Coroutines variant of [ReactiveFindOperation.TerminatingFind.count]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveFindOperation.TerminatingFind.awaitCount(): Long = + count().awaitSingle() + +/** + * Coroutines variant of [ReactiveFindOperation.TerminatingFind.exists]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveFindOperation.TerminatingFind.awaitExists(): Boolean = + exists().awaitSingle() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingFind.all]. + * + * @author Sebastien Deleuze + */ +fun ReactiveFindOperation.TerminatingFind.flow(): Flow = + all().asFlow() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingFind.tail]. + * + * @author Sebastien Deleuze + */ +fun ReactiveFindOperation.TerminatingFind.tailAsFlow(): Flow = + tail().asFlow() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingFindNear.all]. + * + * @author Sebastien Deleuze + */ +fun ReactiveFindOperation.TerminatingFindNear.flow(): Flow> = + all().asFlow() + +/** + * Coroutines [Flow] variant of [ReactiveFindOperation.TerminatingDistinct.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveFindOperation.TerminatingDistinct.flow(): Flow = + all().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt index ce37488eb4..2ed40aa074 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,34 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow +import kotlinx.coroutines.reactive.awaitSingle /** - * Extension for [ReactiveInsertOperation.insert] providing a [KClass] based variant. + * Extension for [ReactiveInsertOperation.insert] leveraging reified type parameters. * * @author Mark Paluch * @since 2.0 */ -fun ReactiveInsertOperation.insert(entityClass: KClass): ReactiveInsertOperation.ReactiveInsert = - insert(entityClass.java) +inline fun ReactiveInsertOperation.insert(): ReactiveInsertOperation.ReactiveInsert = + insert(T::class.java) /** - * Extension for [ReactiveInsertOperation.insert] leveraging reified type parameters. + * Coroutines variant of [ReactiveInsertOperation.TerminatingInsert.one]. * - * @author Mark Paluch - * @since 2.0 + * @author Sebastien Deleuze + * @since 2.2 */ -inline fun ReactiveInsertOperation.insert(): ReactiveInsertOperation.ReactiveInsert = - insert(T::class.java) \ No newline at end of file +suspend inline fun ReactiveInsertOperation.TerminatingInsert.oneAndAwait(o: T): T = + one(o).awaitSingle() + + +/** + * Coroutines [Flow] variant of [ReactiveInsertOperation.TerminatingInsert.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveInsertOperation.TerminatingInsert.flow(objects: Collection): Flow = + all(objects).asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensions.kt new file mode 100644 index 0000000000..7fc4678100 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensions.kt @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow + +/** + * Extension for [ReactiveMapReduceOperation.mapReduce] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ReactiveMapReduceOperation.mapReduce(): ReactiveMapReduceOperation.MapReduceWithMapFunction = + mapReduce(T::class.java) + +/** + * Extension for [ReactiveMapReduceOperation.MapReduceWithProjection.as] leveraging reified type parameters. + * + * @author Christoph Strobl + * @since 2.1 + */ +inline fun ReactiveMapReduceOperation.MapReduceWithProjection<*>.asType(): ReactiveMapReduceOperation.MapReduceWithQuery = + `as`(T::class.java) + + +/** + * Coroutines [Flow] variant of [ReactiveMapReduceOperation.TerminatingMapReduce.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveMapReduceOperation.TerminatingMapReduce.flow(): Flow = + all().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt index 832f873496..fda959be64 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,22 +20,14 @@ import com.mongodb.client.result.UpdateResult import com.mongodb.reactivestreams.client.MongoCollection import org.bson.Document import org.springframework.data.geo.GeoResult +import org.springframework.data.mongodb.core.aggregation.Aggregation +import org.springframework.data.mongodb.core.aggregation.TypedAggregation import org.springframework.data.mongodb.core.index.ReactiveIndexOperations import org.springframework.data.mongodb.core.query.NearQuery import org.springframework.data.mongodb.core.query.Query -import org.springframework.data.mongodb.core.query.Update +import org.springframework.data.mongodb.core.query.UpdateDefinition import reactor.core.publisher.Flux import reactor.core.publisher.Mono -import kotlin.reflect.KClass - -/** - * Extension for [ReactiveMongoOperations.indexOps] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.indexOps(entityClass: KClass): ReactiveIndexOperations = - indexOps(entityClass.java) /** * Extension for [ReactiveMongoOperations.indexOps] leveraging reified type parameters. @@ -55,15 +47,6 @@ inline fun ReactiveMongoOperations.indexOps(): ReactiveIndexOp inline fun ReactiveMongoOperations.execute(action: ReactiveCollectionCallback): Flux = execute(T::class.java, action) -/** - * Extension for [ReactiveMongoOperations.createCollection] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.createCollection(entityClass: KClass, collectionOptions: CollectionOptions? = null): Mono> = - if (collectionOptions != null) createCollection(entityClass.java, collectionOptions) else createCollection(entityClass.java) - /** * Extension for [ReactiveMongoOperations.createCollection] leveraging reified type parameters. * @@ -73,15 +56,6 @@ fun ReactiveMongoOperations.createCollection(entityClass: KClass, c inline fun ReactiveMongoOperations.createCollection(collectionOptions: CollectionOptions? = null): Mono> = if (collectionOptions != null) createCollection(T::class.java, collectionOptions) else createCollection(T::class.java) -/** - * Extension for [ReactiveMongoOperations.collectionExists] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.collectionExists(entityClass: KClass): Mono = - collectionExists(entityClass.java) - /** * Extension for [ReactiveMongoOperations.collectionExists] leveraging reified type parameters. * @@ -91,15 +65,6 @@ fun ReactiveMongoOperations.collectionExists(entityClass: KClass): inline fun ReactiveMongoOperations.collectionExists(): Mono = collectionExists(T::class.java) -/** - * Extension for [ReactiveMongoOperations.dropCollection] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.dropCollection(entityClass: KClass): Mono = - dropCollection(entityClass.java) - /** * Extension for [ReactiveMongoOperations.dropCollection] leveraging reified type parameters. * @@ -127,15 +92,6 @@ inline fun ReactiveMongoOperations.findAll(collectionName: Str inline fun ReactiveMongoOperations.findOne(query: Query, collectionName: String? = null): Mono = if (collectionName != null) findOne(query, T::class.java, collectionName) else findOne(query, T::class.java) -/** - * Extension for [ReactiveMongoOperations.exists] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.exists(query: Query, entityClass: KClass, collectionName: String? = null): Mono = - if (collectionName != null) exists(query, entityClass.java, collectionName) else exists(query, entityClass.java) - /** * Extension for [ReactiveMongoOperations.exists] leveraging reified type parameters. * @@ -168,39 +124,58 @@ inline fun ReactiveMongoOperations.findById(id: Any, collectio * Extension for [ReactiveMongoOperations.findDistinct] leveraging reified type parameters. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 */ -inline fun ReactiveMongoOperations.findDistinct(field: String, entityClass: KClass<*>): Flux = - findDistinct(field, entityClass.java, T::class.java); +inline fun ReactiveMongoOperations.findDistinct(query: Query, field: String, collectionName: String? = null): Flux = + if (collectionName != null) findDistinct(query, field, collectionName, E::class.java, T::class.java) + else findDistinct(query, field, E::class.java, T::class.java) + /** - * Extension for [ReactiveMongoOperations.findDistinct] leveraging reified type parameters. + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. * - * @author Christoph Strobl - * @since 2.1 + * @author Wonwoo Lee + * @since 3.1.4 */ -inline fun ReactiveMongoOperations.findDistinct(query: Query, field: String, entityClass: KClass<*>): Flux = - findDistinct(query, field, entityClass.java, T::class.java) +inline fun ReactiveMongoOperations.aggregate( + aggregation: TypedAggregation<*>, + collectionName: String +): Flux = + this.aggregate(aggregation, collectionName, O::class.java) /** - * Extension for [ReactiveMongoOperations.findDistinct] leveraging reified type parameters. + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. * - * @author Christoph Strobl - * @since 2.1 + * @author Wonwoo Lee + * @since 3.1.4 */ -inline fun ReactiveMongoOperations.findDistinct(query: Query, field: String, collectionName: String, entityClass: KClass<*>): Flux = - findDistinct(query, field, collectionName, entityClass.java, T::class.java) +inline fun ReactiveMongoOperations.aggregate(aggregation: TypedAggregation<*>): Flux = + this.aggregate(aggregation, O::class.java) /** - * Extension for [ReactiveMongoOperations.findDistinct] leveraging reified type parameters. + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. * - * @author Christoph Strobl + * @author Wonwoo Lee * @author Mark Paluch - * @since 2.1 + * @since 3.1.4 */ -inline fun ReactiveMongoOperations.findDistinct(query: Query, field: String, collectionName: String? = null): Flux = - if (collectionName != null) findDistinct(query, field, collectionName, E::class.java, T::class.java) - else findDistinct(query, field, E::class.java, T::class.java) +inline fun ReactiveMongoOperations.aggregate( + aggregation: Aggregation +): Flux = + this.aggregate(aggregation, I::class.java, O::class.java) + +/** + * Extension for [ReactiveMongoOperations.aggregate] leveraging reified type parameters. + * + * @author Wonwoo Lee + * @since 3.1.4 + */ +inline fun ReactiveMongoOperations.aggregate( + aggregation: Aggregation, + collectionName: String +): Flux = + this.aggregate(aggregation, collectionName, O::class.java) /** * Extension for [ReactiveMongoOperations.geoNear] leveraging reified type parameters. @@ -208,6 +183,8 @@ inline fun ReactiveMongoOperations.findDistin * @author Sebastien Deleuze * @since 2.0 */ +@Suppress("DEPRECATION") +@Deprecated("Since 2.2, the `geoNear` command has been removed in MongoDB Server 4.2.0. Use Aggregations with `Aggregation.geoNear(NearQuery, String)` instead.", replaceWith = ReplaceWith("aggregate()")) inline fun ReactiveMongoOperations.geoNear(near: NearQuery, collectionName: String? = null): Flux> = if (collectionName != null) geoNear(near, T::class.java, collectionName) else geoNear(near, T::class.java) @@ -217,7 +194,7 @@ inline fun ReactiveMongoOperations.geoNear(near: NearQuery, co * @author Sebastien Deleuze * @since 2.0 */ -inline fun ReactiveMongoOperations.findAndModify(query: Query, update: Update, options: FindAndModifyOptions, collectionName: String? = null): Mono = +inline fun ReactiveMongoOperations.findAndModify(query: Query, update: UpdateDefinition, options: FindAndModifyOptions, collectionName: String? = null): Mono = if (collectionName != null) findAndModify(query, update, options, T::class.java, collectionName) else findAndModify(query, update, options, T::class.java) /** @@ -230,16 +207,6 @@ inline fun ReactiveMongoOperations.findAndRemove(query: Query, if (collectionName != null) findAndRemove(query, T::class.java, collectionName) else findAndRemove(query, T::class.java) -/** - * Extension for [ReactiveMongoOperations.count] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.count(query: Query = Query(), entityClass: KClass, collectionName: String? = null): Mono = - if (collectionName != null) count(query, entityClass.java, collectionName) - else count(query, entityClass.java) - /** * Extension for [ReactiveMongoOperations.count] leveraging reified type parameters. * @@ -252,31 +219,13 @@ inline fun ReactiveMongoOperations.count(query: Query = Query( else count(query, T::class.java) /** - * Extension for [ReactiveMongoOperations.insert] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.insert(batchToSave: Collection, entityClass: KClass): Flux = - insert(batchToSave, entityClass.java) - -/** - * Extension for [ReactiveMongoOperations.insertAll] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.insertAll(batchToSave: Mono>, entityClass: KClass): Flux = - insertAll(batchToSave, entityClass.java) - -/** - * Extension for [ReactiveMongoOperations.upsert] providing a [KClass] based variant. + * Extension for [ReactiveMongoOperations.insert] leveraging reified type parameters. * - * @author Sebastien Deleuze - * @since 2.0 + * @author Mark Paluch + * @since 2.2 */ -fun ReactiveMongoOperations.upsert(query: Query, update: Update, entityClass: KClass, collectionName: String? = null): Mono = - if (collectionName != null) upsert(query, update, entityClass.java, collectionName) else upsert(query, update, entityClass.java) +@Suppress("EXTENSION_SHADOWED_BY_MEMBER") +inline fun ReactiveMongoOperations.insert(batchToSave: Collection): Flux = insert(batchToSave, T::class.java) /** * Extension for [ReactiveMongoOperations.upsert] leveraging reified type parameters. @@ -285,20 +234,10 @@ fun ReactiveMongoOperations.upsert(query: Query, update: Update, entit * @since 2.0 */ @Suppress("EXTENSION_SHADOWED_BY_MEMBER") -inline fun ReactiveMongoOperations.upsert(query: Query, update: Update, collectionName: String? = null): Mono = +inline fun ReactiveMongoOperations.upsert(query: Query, update: UpdateDefinition, collectionName: String? = null): Mono = if (collectionName != null) upsert(query, update, T::class.java, collectionName) else upsert(query, update, T::class.java) -/** - * Extension for [ReactiveMongoOperations.updateFirst] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.updateFirst(query: Query, update: Update, entityClass: KClass, collectionName: String? = null): Mono = - if (collectionName != null) updateFirst(query, update, entityClass.java, collectionName) - else updateFirst(query, update, entityClass.java) - /** * Extension for [ReactiveMongoOperations.updateFirst] leveraging reified type parameters. * @@ -306,20 +245,10 @@ fun ReactiveMongoOperations.updateFirst(query: Query, update: Update, * @since 2.0 */ @Suppress("EXTENSION_SHADOWED_BY_MEMBER") -inline fun ReactiveMongoOperations.updateFirst(query: Query, update: Update, collectionName: String? = null): Mono = +inline fun ReactiveMongoOperations.updateFirst(query: Query, update: UpdateDefinition, collectionName: String? = null): Mono = if (collectionName != null) updateFirst(query, update, T::class.java, collectionName) else updateFirst(query, update, T::class.java) -/** - * Extension for [ReactiveMongoOperations.updateMulti] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.updateMulti(query: Query, update: Update, entityClass: KClass, collectionName: String? = null): Mono = - if (collectionName != null) updateMulti(query, update, entityClass.java, collectionName) - else updateMulti(query, update, entityClass.java) - /** * Extension for [ReactiveMongoOperations.updateMulti] leveraging reified type parameters. * @@ -327,20 +256,10 @@ fun ReactiveMongoOperations.updateMulti(query: Query, update: Update, * @since 2.0 */ @Suppress("EXTENSION_SHADOWED_BY_MEMBER") -inline fun ReactiveMongoOperations.updateMulti(query: Query, update: Update, collectionName: String? = null): Mono = +inline fun ReactiveMongoOperations.updateMulti(query: Query, update: UpdateDefinition, collectionName: String? = null): Mono = if (collectionName != null) updateMulti(query, update, T::class.java, collectionName) else updateMulti(query, update, T::class.java) -/** - * Extension for [ReactiveMongoOperations.remove] providing a [KClass] based variant. - * - * @author Sebastien Deleuze - * @since 2.0 - */ -fun ReactiveMongoOperations.remove(query: Query, entityClass: KClass, collectionName: String? = null): Mono = - if (collectionName != null) remove(query, entityClass.java, collectionName) - else remove(query, entityClass.java) - /** * Extension for [ReactiveMongoOperations.remove] leveraging reified type parameters. * diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt index 0398a26d01..5cf7ccf514 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,35 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass +import com.mongodb.client.result.DeleteResult +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.reactive.asFlow +import kotlinx.coroutines.reactive.awaitSingle /** - * Extension for [ReactiveRemoveOperation.remove] providing a [KClass] based variant. + * Extension for [ReactiveRemoveOperation.remove] leveraging reified type parameters. * * @author Mark Paluch * @since 2.0 */ -fun ReactiveRemoveOperation.remove(entityClass: KClass): ReactiveRemoveOperation.ReactiveRemove = - remove(entityClass.java) +inline fun ReactiveRemoveOperation.remove(): ReactiveRemoveOperation.ReactiveRemove = + remove(T::class.java) /** - * Extension for [ReactiveRemoveOperation.remove] leveraging reified type parameters. + * Coroutines variant of [ReactiveRemoveOperation.TerminatingRemove.all]. * - * @author Mark Paluch - * @since 2.0 + * @author Sebastien Deleuze + * @since 2.2 */ -inline fun ReactiveRemoveOperation.remove(): ReactiveRemoveOperation.ReactiveRemove = - remove(T::class.java) \ No newline at end of file +suspend fun ReactiveRemoveOperation.TerminatingRemove.allAndAwait(): DeleteResult = + all().awaitSingle() + + +/** + * Coroutines [Flow] variant of [ReactiveRemoveOperation.TerminatingRemove.findAndRemove]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +fun ReactiveRemoveOperation.TerminatingRemove.findAndRemoveAsFlow(): Flow = + findAndRemove().asFlow() diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt index 4cb2f9fa8e..0c603dcaed 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,86 @@ */ package org.springframework.data.mongodb.core -import kotlin.reflect.KClass +import com.mongodb.client.result.UpdateResult +import kotlinx.coroutines.reactive.awaitFirstOrNull +import kotlinx.coroutines.reactive.awaitSingle /** - * Extension for [ReactiveUpdateOperation.update] providing a [KClass] based variant. + * Extension for [ReactiveUpdateOperation.update] leveraging reified type parameters. * * @author Mark Paluch * @since 2.0 */ -fun ReactiveUpdateOperation.update(entityClass: KClass): ReactiveUpdateOperation.ReactiveUpdate = - update(entityClass.java) +inline fun ReactiveUpdateOperation.update(): ReactiveUpdateOperation.ReactiveUpdate = + update(T::class.java) /** - * Extension for [ReactiveUpdateOperation.update] leveraging reified type parameters. + * Non-nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwait]. * - * @author Mark Paluch - * @since 2.0 + * @author Sebastien Deleuze + * @since 2.2 */ -inline fun ReactiveUpdateOperation.update(): ReactiveUpdateOperation.ReactiveUpdate = - update(T::class.java) \ No newline at end of file +suspend fun ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwait(): T = + findAndModify().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwait]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndModify.findModifyAndAwaitOrNull(): T? = + findAndModify().awaitFirstOrNull() + +/** + * Non-nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndReplace.findAndReplace]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndReplace.findReplaceAndAwait(): T = + findAndReplace().awaitSingle() + +/** + * Nullable Coroutines variant of [ReactiveUpdateOperation.TerminatingFindAndReplace.findAndReplace]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingFindAndReplace.findReplaceAndAwaitOrNull(): T? = + findAndReplace().awaitFirstOrNull() + +/** + * Coroutines variant of [ReactiveUpdateOperation.TerminatingUpdate.all]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingUpdate.allAndAwait(): UpdateResult = + all().awaitSingle() + +/** + * Coroutines variant of [ReactiveUpdateOperation.TerminatingUpdate.first]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingUpdate.firstAndAwait(): UpdateResult = + first().awaitSingle() + +/** + * Coroutines variant of [ReactiveUpdateOperation.TerminatingUpdate.upsert]. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +suspend fun ReactiveUpdateOperation.TerminatingUpdate.upsertAndAwait(): UpdateResult = upsert().awaitSingle() + +/** + * Extension for [ReactiveUpdateOperation.FindAndReplaceWithProjection.as] leveraging reified type parameters. + * + * @author Sebastien Deleuze + * @since 2.2 + */ +inline fun ReactiveUpdateOperation.FindAndReplaceWithProjection<*>.asType(): ReactiveUpdateOperation.FindAndReplaceWithOptions = + `as`(T::class.java) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt index 0022d5c916..f4744d402a 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensions.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,16 @@ */ package org.springframework.data.mongodb.core.query +import org.springframework.data.mapping.toDotPath +import kotlin.reflect.KProperty + /** - * Extension for [Criteria.is] providing an `isEqualTo` alias since `in` is a reserved keyword in Kotlin. + * Extension for [Criteria.is] providing an `isEqualTo` alias since `is` is a reserved keyword in Kotlin. * * @author Sebastien Deleuze * @since 2.0 */ -fun Criteria.isEqualTo(o: Any?) : Criteria = `is`(o) +fun Criteria.isEqualTo(o: Any?): Criteria = `is`(o) /** * Extension for [Criteria.in] providing an `inValues` alias since `in` is a reserved keyword in Kotlin. @@ -29,7 +32,7 @@ fun Criteria.isEqualTo(o: Any?) : Criteria = `is`(o) * @author Sebastien Deleuze * @since 2.0 */ -fun Criteria.inValues(c: Collection) : Criteria = `in`(c) +fun Criteria.inValues(c: Collection): Criteria = `in`(c) /** * Extension for [Criteria.in] providing an `inValues` alias since `in` is a reserved keyword in Kotlin. @@ -37,4 +40,20 @@ fun Criteria.inValues(c: Collection) : Criteria = `in`(c) * @author Sebastien Deleuze * @since 2.0 */ -fun Criteria.inValues(vararg o: Any?) : Criteria = `in`(*o) +fun Criteria.inValues(vararg o: Any?): Criteria = `in`(*o) + +/** + * Creates a Criteria using a KProperty as key. + * Supports nested field names with [KPropertyPath]. + * @author Tjeu Kayim + * @since 2.2 + */ +fun where(key: KProperty<*>): Criteria = Criteria.where(key.toDotPath()) + +/** + * Add new key to the criteria chain using a KProperty. + * Supports nested field names with [KPropertyPath]. + * @author Tjeu Kayim + * @since 2.2 + */ +infix fun Criteria.and(key: KProperty<*>): Criteria = and(key.toDotPath()) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt new file mode 100644 index 0000000000..43cf02ea4c --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt @@ -0,0 +1,416 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.bson.BsonRegularExpression +import org.springframework.data.geo.Circle +import org.springframework.data.geo.Point +import org.springframework.data.geo.Shape +import org.springframework.data.mapping.toDotPath +import org.springframework.data.mongodb.core.geo.GeoJson +import org.springframework.data.mongodb.core.schema.JsonSchemaObject +import java.util.regex.Pattern +import kotlin.reflect.KProperty + +/** + * Creates a criterion using equality. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.isEqualTo + */ +infix fun KProperty.isEqualTo(value: T) = + Criteria(this.toDotPath()).isEqualTo(value) + +/** + * Creates a criterion using the $ne operator. + * + * See [MongoDB Query operator: $ne](https://docs.mongodb.com/manual/reference/operator/query/ne/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.ne + */ +infix fun KProperty.ne(value: T): Criteria = + Criteria(this.toDotPath()).ne(value) + +/** + * Creates a criterion using the $lt operator. + * + * See [MongoDB Query operator: $lt](https://docs.mongodb.com/manual/reference/operator/query/lt/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.lt + */ +infix fun KProperty.lt(value: Any): Criteria = + Criteria(this.toDotPath()).lt(value) + +/** + * Creates a criterion using the $lte operator. + * + * See [MongoDB Query operator: $lte](https://docs.mongodb.com/manual/reference/operator/query/lte/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.lte + */ +infix fun KProperty.lte(value: Any): Criteria = + Criteria(this.toDotPath()).lte(value) + +/** + * Creates a criterion using the $gt operator. + * + * See [MongoDB Query operator: $gt](https://docs.mongodb.com/manual/reference/operator/query/gt/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.gt + */ +infix fun KProperty.gt(value: Any): Criteria = + Criteria(this.toDotPath()).gt(value) + +/** + * Creates a criterion using the $gte operator. + * + * See [MongoDB Query operator: $gte](https://docs.mongodb.com/manual/reference/operator/query/gte/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.gte + */ +infix fun KProperty.gte(value: Any): Criteria = + Criteria(this.toDotPath()).gte(value) + +/** + * Creates a criterion using the $in operator. + * + * See [MongoDB Query operator: $in](https://docs.mongodb.com/manual/reference/operator/query/in/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.inValues + */ +fun KProperty.inValues(vararg o: Any): Criteria = + Criteria(this.toDotPath()).`in`(*o) + +/** + * Creates a criterion using the $in operator. + * + * See [MongoDB Query operator: $in](https://docs.mongodb.com/manual/reference/operator/query/in/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.inValues + */ +infix fun KProperty.inValues(value: Collection): Criteria = + Criteria(this.toDotPath()).`in`(value) + +/** + * Creates a criterion using the $nin operator. + * + * See [MongoDB Query operator: $nin](https://docs.mongodb.com/manual/reference/operator/query/nin/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.nin + */ +fun KProperty.nin(vararg o: Any): Criteria = + Criteria(this.toDotPath()).nin(*o) + +/** + * Creates a criterion using the $nin operator. + * + * See [MongoDB Query operator: $nin](https://docs.mongodb.com/manual/reference/operator/query/nin/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.nin + */ +infix fun KProperty.nin(value: Collection): Criteria = + Criteria(this.toDotPath()).nin(value) + +/** + * Creates a criterion using the $mod operator. + * + * See [MongoDB Query operator: $mod](https://docs.mongodb.com/manual/reference/operator/query/mod/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.mod + */ +fun KProperty.mod(value: Number, remainder: Number): Criteria = + Criteria(this.toDotPath()).mod(value, remainder) + +/** + * Creates a criterion using the $all operator. + * + * See [MongoDB Query operator: $all](https://docs.mongodb.com/manual/reference/operator/query/all/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.all + */ +fun KProperty<*>.all(vararg o: Any): Criteria = + Criteria(this.toDotPath()).all(*o) + +/** + * Creates a criterion using the $all operator. + * + * See [MongoDB Query operator: $all](https://docs.mongodb.com/manual/reference/operator/query/all/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.all + */ +infix fun KProperty<*>.all(value: Collection<*>): Criteria = + Criteria(this.toDotPath()).all(value) + +/** + * Creates a criterion using the $size operator. + * + * See [MongoDB Query operator: $size](https://docs.mongodb.com/manual/reference/operator/query/size/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.size + */ +infix fun KProperty<*>.size(s: Int): Criteria = + Criteria(this.toDotPath()).size(s) + +/** + * Creates a criterion using the $exists operator. + * + * See [MongoDB Query operator: $exists](https://docs.mongodb.com/manual/reference/operator/query/exists/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.exists + */ +infix fun KProperty<*>.exists(b: Boolean): Criteria = + Criteria(this.toDotPath()).exists(b) + +/** + * Creates a criterion using the $type operator. + * + * See [MongoDB Query operator: $type](https://docs.mongodb.com/manual/reference/operator/query/type/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.type + */ +infix fun KProperty<*>.type(t: Int): Criteria = + Criteria(this.toDotPath()).type(t) + +/** + * Creates a criterion using the $type operator. + * + * See [MongoDB Query operator: $type](https://docs.mongodb.com/manual/reference/operator/query/type/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.type + */ +infix fun KProperty<*>.type(t: Collection): Criteria = + Criteria(this.toDotPath()).type(*t.toTypedArray()) + +/** + * Creates a criterion using the $type operator. + * + * See [MongoDB Query operator: $type](https://docs.mongodb.com/manual/reference/operator/query/type/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.type + */ +fun KProperty<*>.type(vararg t: JsonSchemaObject.Type): Criteria = + Criteria(this.toDotPath()).type(*t) + +/** + * Creates a criterion using the $not meta operator which affects the clause directly following + * + * See [MongoDB Query operator: $not](https://docs.mongodb.com/manual/reference/operator/query/not/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.not + */ +fun KProperty<*>.not(): Criteria = + Criteria(this.toDotPath()).not() + +/** + * Creates a criterion using a $regex operator. + * + * See [MongoDB Query operator: $regex](https://docs.mongodb.com/manual/reference/operator/query/regex/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: String): Criteria = + Criteria(this.toDotPath()).regex(re, null) + +/** + * Creates a criterion using a $regex and $options operator. + * + * See [MongoDB Query operator: $regex](https://docs.mongodb.com/manual/reference/operator/query/regex/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +fun KProperty.regex(re: String, options: String?): Criteria = + Criteria(this.toDotPath()).regex(re, options) + +/** + * Syntactical sugar for [isEqualTo] making obvious that we create a regex predicate. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: Regex): Criteria = + Criteria(this.toDotPath()).regex(re.toPattern()) + +/** + * Syntactical sugar for [isEqualTo] making obvious that we create a regex predicate. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: Pattern): Criteria = + Criteria(this.toDotPath()).regex(re) + +/** + * Syntactical sugar for [isEqualTo] making obvious that we create a regex predicate. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.regex + */ +infix fun KProperty.regex(re: BsonRegularExpression): Criteria = + Criteria(this.toDotPath()).regex(re) + +/** + * Creates a geospatial criterion using a $geoWithin $centerSphere operation. This is only available for + * Mongo 2.4 and higher. + * + * See [MongoDB Query operator: + * $geoWithin](https://docs.mongodb.com/manual/reference/operator/query/geoWithin/) + * + * See [MongoDB Query operator: + * $centerSphere](https://docs.mongodb.com/manual/reference/operator/query/centerSphere/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.withinSphere + */ +infix fun KProperty>.withinSphere(circle: Circle): Criteria = + Criteria(this.toDotPath()).withinSphere(circle) + +/** + * Creates a geospatial criterion using a $geoWithin operation. + * + * See [MongoDB Query operator: + * $geoWithin](https://docs.mongodb.com/manual/reference/operator/query/geoWithin/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.within + */ +infix fun KProperty>.within(shape: Shape): Criteria = + Criteria(this.toDotPath()).within(shape) + +/** + * Creates a geospatial criterion using a $near operation. + * + * See [MongoDB Query operator: $near](https://docs.mongodb.com/manual/reference/operator/query/near/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.near + */ +infix fun KProperty>.near(point: Point): Criteria = + Criteria(this.toDotPath()).near(point) + +/** + * Creates a geospatial criterion using a $nearSphere operation. This is only available for Mongo 1.7 and + * higher. + * + * See [MongoDB Query operator: + * $nearSphere](https://docs.mongodb.com/manual/reference/operator/query/nearSphere/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.nearSphere + */ +infix fun KProperty>.nearSphere(point: Point): Criteria = + Criteria(this.toDotPath()).nearSphere(point) + +/** + * Creates criterion using `$geoIntersects` operator which matches intersections of the given `geoJson` + * structure and the documents one. Requires MongoDB 2.4 or better. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.intersects + */ +infix fun KProperty>.intersects(geoJson: GeoJson<*>): Criteria = + Criteria(this.toDotPath()).intersects(geoJson) + +/** + * Creates a geo-spatial criterion using a $maxDistance operation, for use with $near + * + * See [MongoDB Query operator: + * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.maxDistance + */ +infix fun KProperty>.maxDistance(d: Double): Criteria = + Criteria(this.toDotPath()).maxDistance(d) + +/** + * Creates a geospatial criterion using a $minDistance operation, for use with $near or + * $nearSphere. + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.minDistance + */ +infix fun KProperty>.minDistance(d: Double): Criteria = + Criteria(this.toDotPath()).minDistance(d) + +/** + * Creates a geo-spatial criterion using a $maxDistance operation, for use with $near + * + * See [MongoDB Query operator: + * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) + * @author Sangyong Choi + * @since 3.2.5 + * @see Criteria.maxDistance + */ +infix fun Criteria.maxDistance(d: Double): Criteria = + this.maxDistance(d) + +/** + * Creates a geospatial criterion using a $minDistance operation, for use with $near or + * $nearSphere. + * @author Sangyong Choi + * @since 3.2.5 + * @see Criteria.minDistance + */ +infix fun Criteria.minDistance(d: Double): Criteria = + this.minDistance(d) + +/** + * Creates a criterion using the $elemMatch operator + * + * See [MongoDB Query operator: + * $elemMatch](https://docs.mongodb.com/manual/reference/operator/query/elemMatch/) + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.elemMatch + */ +infix fun KProperty<*>.elemMatch(c: Criteria): Criteria = + Criteria(this.toDotPath()).elemMatch(c) + +/** + * Use [Criteria.BitwiseCriteriaOperators] as gateway to create a criterion using one of the + * [bitwise operators](https://docs.mongodb.com/manual/reference/operator/query-bitwise/) like + * `$bitsAllClear`. + * + * Example: + * ``` + * bits { allClear(123) } + * ``` + * @author Tjeu Kayim + * @since 2.2 + * @see Criteria.bits + */ +infix fun KProperty<*>.bits(bitwiseCriteria: Criteria.BitwiseCriteriaOperators.() -> Criteria) = + Criteria(this.toDotPath()).bits().let(bitwiseCriteria) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensions.kt new file mode 100644 index 0000000000..d132482f65 --- /dev/null +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensions.kt @@ -0,0 +1,228 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.springframework.data.mapping.toDotPath +import org.springframework.data.mongodb.core.query.Update.Position +import kotlin.reflect.KProperty + +/** + * Static factory method to create an Update using the provided key + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.update + */ +fun update(key: KProperty, value: T?) = + Update.update(key.toDotPath(), value) + +/** + * Update using the {@literal $set} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.set + */ +fun Update.set(key: KProperty, value: T?) = + set(key.toDotPath(), value) + +/** + * Update using the {@literal $setOnInsert} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.setOnInsert + */ +fun Update.setOnInsert(key: KProperty, value: T?) = + setOnInsert(key.toDotPath(), value) + +/** + * Update using the {@literal $unset} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.unset + */ +fun Update.unset(key: KProperty) = + unset(key.toDotPath()) + +/** + * Update using the {@literal $inc} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.inc + */ +fun Update.inc(key: KProperty, inc: Number) = + inc(key.toDotPath(), inc) + +fun Update.inc(key: KProperty) = + inc(key.toDotPath()) + +/** + * Update using the {@literal $push} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.push + */ +fun Update.push(key: KProperty>, value: T?) = + push(key.toDotPath(), value) + +/** + * Update using {@code $push} modifier.
          + * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values as well as using + * {@code $position}. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.push + */ +fun Update.push(key: KProperty) = + push(key.toDotPath()) + +/** + * Update using {@code $addToSet} modifier.
          + * Allows creation of {@code $push} command for single or multiple (using {@code $each}) values * {@code $position}. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.addToSet + */ +fun Update.addToSet(key: KProperty) = + addToSet(key.toDotPath()) + +/** + * Update using the {@literal $addToSet} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.addToSet + */ +fun Update.addToSet(key: KProperty>, value: T?) = + addToSet(key.toDotPath(), value) + +/** + * Update using the {@literal $pop} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.pop + */ +fun Update.pop(key: KProperty, pos: Position) = + pop(key.toDotPath(), pos) + +/** + * Update using the {@literal $pull} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.pull + */ +fun Update.pull(key: KProperty, value: Any) = + pull(key.toDotPath(), value) + +/** + * Update using the {@literal $pullAll} update modifier + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.pullAll + */ +fun Update.pullAll(key: KProperty>, values: Array) = + pullAll(key.toDotPath(), values) + +/** + * Update given key to current date using {@literal $currentDate} modifier. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.currentDate + */ +fun Update.currentDate(key: KProperty) = + currentDate(key.toDotPath()) + +/** + * Update given key to current date using {@literal $currentDate : { $type : "timestamp" }} modifier. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.currentTimestamp + */ +fun Update.currentTimestamp(key: KProperty) = + currentTimestamp(key.toDotPath()) + +/** + * Multiply the value of given key by the given number. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.multiply + */ +fun Update.multiply(key: KProperty, multiplier: Number) = + multiply(key.toDotPath(), multiplier) + +/** + * Update given key to the {@code value} if the {@code value} is greater than the current value of the field. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.max + */ +fun Update.max(key: KProperty, value: T) = + max(key.toDotPath(), value) + +/** + * Update given key to the {@code value} if the {@code value} is less than the current value of the field. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.min + */ +fun Update.min(key: KProperty, value: T) = + min(key.toDotPath(), value) + +/** + * The operator supports bitwise {@code and}, bitwise {@code or}, and bitwise {@code xor} operations. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.bitwise + */ +fun Update.bitwise(key: KProperty) = + bitwise(key.toDotPath()) + +/** + * Filter elements in an array that match the given criteria for update. {@code expression} is used directly with the + * driver without further type or field mapping. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.filterArray + */ +fun Update.filterArray(identifier: KProperty, expression: Any) = + filterArray(identifier.toDotPath(), expression) + +/** + * Determine if a given {@code key} will be touched on execution. + * + * @author Pawel Matysek + * @since 4.4 + * @see Update.modifies + */ +fun Update.modifies(key: KProperty) = + modifies(key.toDotPath()) + diff --git a/spring-data-mongodb/src/main/resources/META-INF/services/javax.enterprise.inject.spi.Extension b/spring-data-mongodb/src/main/resources/META-INF/services/jakarta.enterprise.inject.spi.Extension similarity index 100% rename from spring-data-mongodb/src/main/resources/META-INF/services/javax.enterprise.inject.spi.Extension rename to spring-data-mongodb/src/main/resources/META-INF/services/jakarta.enterprise.inject.spi.Extension diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas index 473e052a19..57920f7449 100644 --- a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas +++ b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas @@ -9,4 +9,24 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.8.xsd=org/sprin http\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd -http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-4.0.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.3.xsd=org/springframework/data/mongodb/config/spring-mongo-1.3.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.4.xsd=org/springframework/data/mongodb/config/spring-mongo-1.4.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.5.xsd=org/springframework/data/mongodb/config/spring-mongo-1.5.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.7.xsd=org/springframework/data/mongodb/config/spring-mongo-1.7.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.8.xsd=org/springframework/data/mongodb/config/spring-mongo-1.8.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.2.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-4.0.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-4.0.xsd diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring/aot.factories b/spring-data-mongodb/src/main/resources/META-INF/spring/aot.factories new file mode 100644 index 0000000000..0130a33d7c --- /dev/null +++ b/spring-data-mongodb/src/main/resources/META-INF/spring/aot.factories @@ -0,0 +1,6 @@ +org.springframework.aot.hint.RuntimeHintsRegistrar=\ + org.springframework.data.mongodb.aot.MongoRuntimeHints,\ + org.springframework.data.mongodb.repository.aot.RepositoryRuntimeHints + +org.springframework.beans.factory.aot.BeanRegistrationAotProcessor=\ + org.springframework.data.mongodb.aot.MongoManagedTypesBeanRegistrationAotProcessor diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd index 8a6693dbea..a0d95f2425 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.0.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository-1.0.xsd" /> @@ -318,8 +318,8 @@ The Mongo driver options - - @@ -348,14 +348,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -363,22 +363,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -395,18 +395,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -430,14 +430,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -466,4 +466,4 @@ This controls if the driver is allowed to read from secondaries or slaves. Defa - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd index c8c1d7c1bc..dd7287dab3 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.1.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -316,8 +316,8 @@ The Mongo driver options - - @@ -346,14 +346,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -361,22 +361,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -393,18 +393,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -428,14 +428,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -464,4 +464,4 @@ This controls if the driver is allowed to read from secondaries or slaves. Defa - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd index 431f5ab688..6c0fcc67c3 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.2.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -531,7 +531,7 @@ This controls whether or not to fsync. The 'fsync' option to the getlasterror c diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd index 4f158fb340..dde5cd0a91 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.10.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -531,7 +531,7 @@ This controls whether or not to fsync. The 'fsync' option to the getlasterror c diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd index 9d23805609..cf995f23cd 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.2.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -230,7 +230,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -331,8 +331,8 @@ The Mongo driver options - - @@ -361,14 +361,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -376,22 +376,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -408,18 +408,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -443,14 +443,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -479,4 +479,4 @@ This controls if the driver is allowed to read from secondaries or slaves. Defa - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd index a3f3e68ade..c1b6b1df34 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.3.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -234,7 +234,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -346,8 +346,8 @@ The Mongo driver options - - @@ -376,14 +376,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -391,22 +391,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -423,18 +423,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -458,14 +458,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + @@ -557,7 +557,7 @@ The reference to a Mongoconverter instance. - + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd index c036f6b506..78f1dabe14 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.4.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -241,7 +241,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -300,7 +300,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -364,8 +364,8 @@ The Mongo driver options - - @@ -394,14 +394,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -409,22 +409,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -441,18 +441,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -476,14 +476,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd index 68524f8f8f..0b67ea4db8 100644 --- a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-1.5.xsd @@ -13,7 +13,7 @@ + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -248,7 +248,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -318,7 +318,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -382,8 +382,8 @@ The Mongo driver options - - @@ -412,14 +412,14 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -427,22 +427,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -459,18 +459,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -494,14 +494,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -27,7 +27,7 @@ Deprecated since 1.7 - use mongo-client instead. Defines a Mongo instance used f - + - + @@ -261,7 +261,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -331,7 +331,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -354,7 +354,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -384,7 +384,7 @@ The name of the Mongo object that determines what server to monitor. (by default @@ -410,8 +410,8 @@ The Mongo driver options - - @@ -440,19 +440,19 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -460,22 +460,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -492,18 +492,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -527,14 +527,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + @@ -606,11 +606,11 @@ The comma delimited list of username:password@database entries to use for authen - + @@ -626,11 +626,11 @@ The MongoClient description. The minimum number of connections per host. ]]> - + @@ -638,36 +638,36 @@ The number of connections allowed per host. Will block if run out. Default is @@ -684,7 +684,7 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + - + - + @@ -732,7 +732,7 @@ The connect timeout for connections used for the cluster heartbeat. The socket timeout for connections used for the cluster heartbeat. ]]> - + - + @@ -838,7 +838,7 @@ The reference to a Mongoconverter instance. - + + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> @@ -27,7 +27,7 @@ Deprecated since 1.7 - use mongo-client instead. Defines a Mongo instance used f - + - + @@ -337,7 +337,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -360,7 +360,7 @@ The name of the Mongo object that determines what server to monitor. (by default - + @@ -416,8 +416,8 @@ The Mongo driver options - - @@ -446,7 +446,7 @@ The host to connect to a MongoDB server. Default is localhost The comma delimited list of host:port entries to use for replica set/pairs. ]]> - + @@ -466,22 +466,22 @@ The number of connections allowed per host. Will block if run out. Default is @@ -498,18 +498,18 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + @@ -533,14 +533,14 @@ This controls timeout for write operations in milliseconds. The 'wtimeout' opti This controls whether or not to fsync. The 'fsync' option to the getlasterror command. Defaults to false. ]]> - + - + - + - + - + @@ -644,36 +644,36 @@ The number of connections allowed per host. Will block if run out. Default is @@ -690,7 +690,7 @@ The socket timeout. 0 is default and infinite. The keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. ]]> - + - + - + @@ -738,7 +738,7 @@ The connect timeout for connections used for the cluster heartbeat. The socket timeout for connections used for the cluster heartbeat. ]]> - + - + + schemaLocation="https://www.springframework.org/schema/data/repository/spring-repository.xsd" /> diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-2.2.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-2.2.xsd new file mode 100644 index 0000000000..8364fe70ea --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-2.2.xsd @@ -0,0 +1,708 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDbFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a DbFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.0.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.0.xsd new file mode 100644 index 0000000000..b482282b40 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.0.xsd @@ -0,0 +1,867 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd new file mode 100644 index 0000000000..3b6d0e9db1 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd @@ -0,0 +1,895 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-4.0.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-4.0.xsd new file mode 100644 index 0000000000..1bdaa38450 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-4.0.xsd @@ -0,0 +1,907 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java index b1cc426c08..ce458132d9 100644 --- a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java +++ b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackage.java @@ -1,11 +1,12 @@ + /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,11 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +import java.util.Collections; +import java.util.Set; + import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; /** * Sample configuration class in default package. @@ -25,23 +30,20 @@ * @author Oliver Gierke */ @Configuration -public class ConfigClassInDefaultPackage extends AbstractMongoConfiguration { +public class ConfigClassInDefaultPackage extends MongoClientClosingTestConfiguration { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#getDatabaseName() - */ @Override protected String getDatabaseName() { return "default"; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.config.AbstractMongoConfiguration#mongo() - */ @Override public MongoClient mongoClient() { - return new MongoClient(); + return MongoClients.create(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } diff --git a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java index 9340cbf790..7fa6c358f3 100644 --- a/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java +++ b/spring-data-mongodb/src/test/java/ConfigClassInDefaultPackageUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import org.junit.Test; + +import org.junit.jupiter.api.Test; + import org.springframework.context.annotation.AnnotationConfigApplicationContext; /** diff --git a/spring-data-mongodb/src/test/java/example/first/First.java b/spring-data-mongodb/src/test/java/example/first/First.java index 9889220261..04e50fa206 100644 --- a/spring-data-mongodb/src/test/java/example/first/First.java +++ b/spring-data-mongodb/src/test/java/example/first/First.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/example/second/Second.java b/spring-data-mongodb/src/test/java/example/second/Second.java index 7181f42c28..446501de87 100644 --- a/spring-data-mongodb/src/test/java/example/second/Second.java +++ b/spring-data-mongodb/src/test/java/example/second/Second.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/CapturingTransactionOptionsResolver.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/CapturingTransactionOptionsResolver.java new file mode 100644 index 0000000000..0448ad936c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/CapturingTransactionOptionsResolver.java @@ -0,0 +1,64 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.assertj.core.api.Assertions; +import org.assertj.core.api.ListAssert; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; + +/** + * @author Christoph Strobl + */ +public class CapturingTransactionOptionsResolver implements MongoTransactionOptionsResolver { + + private final MongoTransactionOptionsResolver delegateResolver; + private final List capturedOptions = new ArrayList<>(10); + + public CapturingTransactionOptionsResolver(MongoTransactionOptionsResolver delegateResolver) { + this.delegateResolver = delegateResolver; + } + + @Nullable + @Override + public String getLabelPrefix() { + return delegateResolver.getLabelPrefix(); + } + + @Override + public MongoTransactionOptions convert(Map source) { + + MongoTransactionOptions options = delegateResolver.convert(source); + capturedOptions.add(options); + return options; + } + + public void clear() { + capturedOptions.clear(); + } + + public List getCapturedOptions() { + return capturedOptions; + } + + public MongoTransactionOptions getLastCapturedOption() { + return CollectionUtils.lastElement(capturedOptions); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolverUnitTests.java new file mode 100644 index 0000000000..2724fb1605 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolverUnitTests.java @@ -0,0 +1,134 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Set; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.transaction.interceptor.DefaultTransactionAttribute; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; + +/** + * Unit tests for {@link DefaultMongoTransactionOptionsResolver}. + * + * @author Yan Kardziyaka + * @author Christoph Strobl + */ +class DefaultMongoTransactionOptionsResolverUnitTests { + + @ParameterizedTest + @ValueSource(strings = { "mongo:maxCommitTime=-PT5S", "mongo:readConcern=invalidValue", + "mongo:readPreference=invalidValue", "mongo:writeConcern=invalidValue", "mongo:invalidPreference=jedi", + "mongo:readConcern", "mongo:readConcern:local", "mongo:readConcern=" }) + void shouldThrowExceptionOnInvalidAttribute(String label) { + + TransactionAttribute attribute = transactionAttribute(label); + + assertThatThrownBy(() -> DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) // + .isInstanceOf(IllegalArgumentException.class); + } + + @Test // GH-1628 + public void shouldReturnEmptyOptionsIfNotTransactionAttribute() { + + DefaultTransactionDefinition definition = new DefaultTransactionDefinition(); + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(definition)) + .isSameAs(MongoTransactionOptions.NONE); + } + + @Test // GH-1628 + public void shouldReturnEmptyOptionsIfNoLabelsProvided() { + + TransactionAttribute attribute = new DefaultTransactionAttribute(); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .isSameAs(MongoTransactionOptions.NONE); + } + + @Test // GH-1628 + public void shouldIgnoreNonMongoOptions() { + + TransactionAttribute attribute = transactionAttribute("jpa:ignore"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .isSameAs(MongoTransactionOptions.NONE); + } + + @Test // GH-1628 + public void shouldReturnMergedOptionsIfLabelsContainMaxCommitTime() { + + TransactionAttribute attribute = transactionAttribute("mongo:maxCommitTime=PT5S"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(5L, from(options -> options.getMaxCommitTime().toSeconds())) // + .returns(null, from(MongoTransactionOptions::getReadConcern)) // + .returns(null, from(MongoTransactionOptions::getReadPreference)) // + .returns(null, from(MongoTransactionOptions::getWriteConcern)); + } + + @Test // GH-1628 + public void shouldReturnReadConcernWhenPresent() { + + TransactionAttribute attribute = transactionAttribute("mongo:readConcern=majority"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(null, from(TransactionMetadata::getMaxCommitTime)) // + .returns(ReadConcern.MAJORITY, from(MongoTransactionOptions::getReadConcern)) // + .returns(null, from(MongoTransactionOptions::getReadPreference)) // + .returns(null, from(MongoTransactionOptions::getWriteConcern)); + } + + @Test // GH-1628 + public void shouldReturnMergedOptionsIfLabelsContainReadPreference() { + + TransactionAttribute attribute = transactionAttribute("mongo:readPreference=primaryPreferred"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(null, from(TransactionMetadata::getMaxCommitTime)) // + .returns(null, from(MongoTransactionOptions::getReadConcern)) // + .returns(ReadPreference.primaryPreferred(), from(MongoTransactionOptions::getReadPreference)) // + .returns(null, from(MongoTransactionOptions::getWriteConcern)); + } + + @Test // GH-1628 + public void shouldReturnMergedOptionsIfLabelsContainWriteConcern() { + + TransactionAttribute attribute = transactionAttribute("mongo:writeConcern=w3"); + + assertThat(DefaultMongoTransactionOptionsResolver.INSTANCE.resolve(attribute)) + .returns(null, from(TransactionMetadata::getMaxCommitTime)) // + .returns(null, from(MongoTransactionOptions::getReadConcern)) // + .returns(null, from(MongoTransactionOptions::getReadPreference)) // + .returns(WriteConcern.W3, from(MongoTransactionOptions::getWriteConcern)); + + } + + private static TransactionAttribute transactionAttribute(String... labels) { + + DefaultTransactionAttribute attribute = new DefaultTransactionAttribute(); + attribute.setLabels(Set.of(labels)); + return attribute; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java index b6e0d1f799..adcf9eb293 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/DependencyTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,11 +16,12 @@ package org.springframework.data.mongodb; import static de.schauderhaft.degraph.check.JCheck.*; -import static org.junit.Assert.*; +import static org.hamcrest.MatcherAssert.*; import de.schauderhaft.degraph.configuration.NamedPattern; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; /** * Tests package dependency constraints. @@ -28,10 +29,11 @@ * @author Jens Schauder * @author Oliver Gierke */ -public class DependencyTests { +@Disabled("Needs to be tansitioned to ArchUnit") +class DependencyTests { @Test - public void noInternalPackageCycles() { + void noInternalPackageCycles() { assertThat(classpath() // .noJars() // @@ -43,7 +45,7 @@ public void noInternalPackageCycles() { } @Test - public void onlyConfigMayUseRepository() { + void onlyConfigMayUseRepository() { assertThat(classpath() // .including("org.springframework.data.**") // @@ -60,7 +62,7 @@ public void onlyConfigMayUseRepository() { } @Test - public void commonsInternaly() { + void commonsInternaly() { assertThat(classpath() // .noJars() // diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java new file mode 100644 index 0000000000..db1ab68269 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java @@ -0,0 +1,310 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import jakarta.transaction.Status; +import jakarta.transaction.UserTransaction; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.jta.JtaTransactionManager; +import org.springframework.transaction.support.TransactionCallbackWithoutResult; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.transaction.support.TransactionTemplate; + +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; +import com.mongodb.session.ServerSession; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class MongoDatabaseUtilsUnitTests { + + @Mock ClientSession session; + @Mock ServerSession serverSession; + @Mock MongoDatabaseFactory dbFactory; + @Mock MongoDatabase db; + + @Mock UserTransaction userTransaction; + + @AfterEach + void verifyTransactionSynchronizationManagerState() { + + assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue(); + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse(); + assertThat(TransactionSynchronizationManager.getCurrentTransactionName()).isNull(); + assertThat(TransactionSynchronizationManager.isCurrentTransactionReadOnly()).isFalse(); + assertThat(TransactionSynchronizationManager.getCurrentTransactionIsolationLevel()).isNull(); + assertThat(TransactionSynchronizationManager.isActualTransactionActive()).isFalse(); + } + + @Test // DATAMONGO-2130 + void isTransactionActiveShouldDetectTxViaFactory() { + + when(dbFactory.isTransactionActive()).thenReturn(true); + + assertThat(MongoDatabaseUtils.isTransactionActive(dbFactory)).isTrue(); + } + + @Test // DATAMONGO-2130 + void isTransactionActiveShouldReturnFalseIfNoTxActive() { + + when(dbFactory.isTransactionActive()).thenReturn(false); + + assertThat(MongoDatabaseUtils.isTransactionActive(dbFactory)).isFalse(); + } + + @Test // DATAMONGO-2130 + void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() { + + when(dbFactory.getSession(any())).thenReturn(session); + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(serverSession.isClosed()).thenReturn(false); + + when(dbFactory.isTransactionActive()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + assertThat(MongoDatabaseUtils.isTransactionActive(dbFactory)).isTrue(); + } + }); + } + + @Test // DATAMONGO-1920 + void shouldNotStartSessionWhenNoTransactionOngoing() { + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + + verify(dbFactory, never()).getSession(any()); + verify(dbFactory, never()).withSession(any(ClientSession.class)); + } + + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() throws Exception { + + when(dbFactory.getMongoDatabase()).thenReturn(db); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.NEVER); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + } + }); + + verify(userTransaction).getStatus(); + verifyNoMoreInteractions(userTransaction); + verifyNoInteractions(session); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(serverSession.isClosed()).thenReturn(false); + + when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE, + Status.STATUS_ACTIVE); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ALWAYS); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + } + }); + + verify(userTransaction).begin(); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsAny() throws Exception { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(serverSession.isClosed()).thenReturn(false); + + when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE, + Status.STATUS_ACTIVE); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ALWAYS); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(userTransaction).rollback(); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void shouldNotParticipateInOngoingJtaTransactionWithRollbackWhenSessionSychronizationIsNative() throws Exception { + + when(userTransaction.getStatus()).thenReturn(Status.STATUS_NO_TRANSACTION, Status.STATUS_ACTIVE, + Status.STATUS_ACTIVE); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(userTransaction).rollback(); + + verify(session, never()).startTransaction(); + verify(session, never()).abortTransaction(); + verify(session, never()).close(); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(serverSession.isClosed()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void shouldParticipateInOngoingMongoTransactionWhenSessionSynchronizationIsAny() { + + when(dbFactory.getSession(any())).thenReturn(session); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + when(serverSession.isClosed()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(transactionStatus.isNewTransaction()).isTrue(); + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isTrue(); + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.ALWAYS); + + transactionStatus.setRollbackOnly(); + } + }); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java new file mode 100644 index 0000000000..db1993e63d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java @@ -0,0 +1,336 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.UnexpectedRollbackException; +import org.springframework.transaction.support.DefaultTransactionDefinition; +import org.springframework.transaction.support.TransactionCallbackWithoutResult; +import org.springframework.transaction.support.TransactionSynchronizationManager; +import org.springframework.transaction.support.TransactionTemplate; + +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoDatabase; +import com.mongodb.session.ServerSession; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoTransactionManagerUnitTests { + + @Mock ClientSession session; + @Mock ClientSession session2; + @Mock ServerSession serverSession; + @Mock MongoDatabaseFactory dbFactory; + @Mock MongoDatabaseFactory dbFactory2; + @Mock MongoDatabase db; + @Mock MongoDatabase db2; + + @BeforeEach + void setUp() { + + when(dbFactory.getSession(any())).thenReturn(session, session2); + when(dbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbFactory2.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbFactory.withSession(session)).thenReturn(dbFactory); + when(dbFactory.getMongoDatabase()).thenReturn(db); + when(session.getServerSession()).thenReturn(serverSession); + } + + @AfterEach + void verifyTransactionSynchronizationManager() { + + assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue(); + assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse(); + } + + @Test // DATAMONGO-1920 + void triggerCommitCorrectly() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void participateInOnGoingTransactionWithCommit() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + } + }); + + verify(dbFactory, times(2)).withSession(eq(session)); + + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void participateInOnGoingTransactionWithRollbackOnly() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + + status.setRollbackOnly(); + } + }); + + verify(dbFactory, times(2)).withSession(eq(session)); + + assertThatExceptionOfType(UnexpectedRollbackException.class).isThrownBy(() -> txManager.commit(txStatus)); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void triggerRollbackCorrectly() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.rollback(txStatus); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void suspendTransactionWhilePropagationNotSupported() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_NOT_SUPPORTED); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + } + }); + + template.execute(MongoDatabase::listCollections); + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session2, never()).startTransaction(); + + verify(dbFactory, times(2)).withSession(eq(session)); + verify(dbFactory, never()).withSession(eq(session2)); + + verify(db, times(2)).drop(); + verify(db).listCollections(); + + verify(session).close(); + verify(session2, never()).close(); + } + + @Test // DATAMONGO-1920 + void suspendTransactionWhilePropagationRequiresNew() { + + when(dbFactory.withSession(session2)).thenReturn(dbFactory2); + when(dbFactory2.getMongoDatabase()).thenReturn(db2); + when(session2.getServerSession()).thenReturn(serverSession); + when(serverSession.isClosed()).thenReturn(false); + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { + + template.execute(db -> { + db.drop(); + return null; + }); + } + }); + + template.execute(MongoDatabase::listCollections); + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session2).startTransaction(); + + verify(dbFactory, times(2)).withSession(eq(session)); + verify(dbFactory).withSession(eq(session2)); + + verify(db).drop(); + verify(db2).drop(); + verify(db).listCollections(); + + verify(session).close(); + verify(session2).close(); + } + + @Test // DATAMONGO-1920 + void readonlyShouldInitiateASessionStartAndCommitTransaction() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + + DefaultTransactionDefinition readonlyTxDefinition = new DefaultTransactionDefinition(); + readonlyTxDefinition.setReadOnly(true); + + TransactionStatus txStatus = txManager.getTransaction(readonlyTxDefinition); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.commit(txStatus); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-1920 + void readonlyShouldInitiateASessionStartAndRollbackTransaction() { + + MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); + + DefaultTransactionDefinition readonlyTxDefinition = new DefaultTransactionDefinition(); + readonlyTxDefinition.setReadOnly(true); + + TransactionStatus txStatus = txManager.getTransaction(readonlyTxDefinition); + + MongoTemplate template = new MongoTemplate(dbFactory); + + template.execute(db -> { + db.drop(); + return null; + }); + + verify(dbFactory).withSession(eq(session)); + + txManager.rollback(txStatus); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionOptionsUnitTests.java new file mode 100644 index 0000000000..44692348a0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionOptionsUnitTests.java @@ -0,0 +1,118 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; +import org.springframework.lang.Nullable; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.TransactionOptions; +import com.mongodb.WriteConcern; + +/** + * Unit tests for {@link MongoTransactionOptions}. + * + * @author Christoph Strobl + */ +class MongoTransactionOptionsUnitTests { + + private static final TransactionOptions NATIVE_OPTIONS = TransactionOptions.builder() // + .maxCommitTime(1L, TimeUnit.SECONDS) // + .readConcern(ReadConcern.SNAPSHOT) // + .readPreference(ReadPreference.secondaryPreferred()) // + .writeConcern(WriteConcern.W3) // + .build(); + + @Test // GH-1628 + void wrapsNativeDriverTransactionOptions() { + + assertThat(MongoTransactionOptions.of(NATIVE_OPTIONS)) + .returns(NATIVE_OPTIONS.getMaxCommitTime(TimeUnit.SECONDS), options -> options.getMaxCommitTime().toSeconds()) + .returns(NATIVE_OPTIONS.getReadConcern(), MongoTransactionOptions::getReadConcern) + .returns(NATIVE_OPTIONS.getReadPreference(), MongoTransactionOptions::getReadPreference) + .returns(NATIVE_OPTIONS.getWriteConcern(), MongoTransactionOptions::getWriteConcern) + .returns(NATIVE_OPTIONS, MongoTransactionOptions::toDriverOptions); + } + + @Test // GH-1628 + void mergeNoneWithDefaultsUsesDefaults() { + + assertThat(MongoTransactionOptions.NONE.mergeWith(MongoTransactionOptions.of(NATIVE_OPTIONS))) + .returns(NATIVE_OPTIONS.getMaxCommitTime(TimeUnit.SECONDS), options -> options.getMaxCommitTime().toSeconds()) + .returns(NATIVE_OPTIONS.getReadConcern(), MongoTransactionOptions::getReadConcern) + .returns(NATIVE_OPTIONS.getReadPreference(), MongoTransactionOptions::getReadPreference) + .returns(NATIVE_OPTIONS.getWriteConcern(), MongoTransactionOptions::getWriteConcern) + .returns(NATIVE_OPTIONS, MongoTransactionOptions::toDriverOptions); + } + + @Test // GH-1628 + void mergeExistingOptionsWithNoneUsesOptions() { + + MongoTransactionOptions source = MongoTransactionOptions.of(NATIVE_OPTIONS); + assertThat(source.mergeWith(MongoTransactionOptions.NONE)).isSameAs(source); + } + + @Test // GH-1628 + void mergeExistingOptionsWithUsesFirstNonNullValue() { + + MongoTransactionOptions source = MongoTransactionOptions + .of(TransactionOptions.builder().writeConcern(WriteConcern.UNACKNOWLEDGED).build()); + + assertThat(source.mergeWith(MongoTransactionOptions.of(NATIVE_OPTIONS))) + .returns(NATIVE_OPTIONS.getMaxCommitTime(TimeUnit.SECONDS), options -> options.getMaxCommitTime().toSeconds()) + .returns(NATIVE_OPTIONS.getReadConcern(), MongoTransactionOptions::getReadConcern) + .returns(NATIVE_OPTIONS.getReadPreference(), MongoTransactionOptions::getReadPreference) + .returns(source.getWriteConcern(), MongoTransactionOptions::getWriteConcern); + } + + @Test // GH-1628 + void testEquals() { + + assertThat(MongoTransactionOptions.NONE) // + .isSameAs(MongoTransactionOptions.NONE) // + .isNotEqualTo(new MongoTransactionOptions() { + @Nullable + @Override + public Duration getMaxCommitTime() { + return null; + } + + @Nullable + @Override + public ReadConcern getReadConcern() { + return null; + } + + @Nullable + @Override + public ReadPreference getReadPreference() { + return null; + } + + @Nullable + @Override + public WriteConcern getWriteConcern() { + return null; + } + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java new file mode 100644 index 0000000000..64331704c6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java @@ -0,0 +1,146 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.session.ServerSession; + +/** + * Unit tests for {@link ReactiveMongoDatabaseUtils}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +class ReactiveMongoDatabaseUtilsUnitTests { + + @Mock ClientSession session; + @Mock ServerSession serverSession; + @Mock ReactiveMongoDatabaseFactory databaseFactory; + @Mock MongoDatabase db; + + @Test // DATAMONGO-2265 + void isTransactionActiveShouldDetectTxViaFactory() { + + when(databaseFactory.isTransactionActive()).thenReturn(true); + + ReactiveMongoDatabaseUtils.isTransactionActive(databaseFactory) // + .as(StepVerifier::create) // + .expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-2265 + void isTransactionActiveShouldReturnFalseIfNoTxActive() { + + when(databaseFactory.isTransactionActive()).thenReturn(false); + + ReactiveMongoDatabaseUtils.isTransactionActive(databaseFactory) // + .as(StepVerifier::create) // + .expectNext(false).verifyComplete(); + } + + @Test // DATAMONGO-2265 + void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxManager() { + + when(session.getServerSession()).thenReturn(serverSession); + when(session.hasActiveTransaction()).thenReturn(true); + when(databaseFactory.getSession(any())).thenReturn(Mono.just(session)); + when(databaseFactory.isTransactionActive()).thenReturn(false); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + operator.execute(tx -> { + + return ReactiveMongoDatabaseUtils.isTransactionActive(databaseFactory); + }).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() { + + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + + ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.NEVER) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + verify(databaseFactory, never()).getSession(any()); + verify(databaseFactory, never()).withSession(any(ClientSession.class)); + } + + @Test // DATAMONGO-2265 + void shouldNotStartSessionWhenNoTransactionOngoing() { + + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + + ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.ON_ACTUAL_TRANSACTION) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + verify(databaseFactory, never()).getSession(any()); + verify(databaseFactory, never()).withSession(any(ClientSession.class)); + } + + @Test // DATAMONGO-2265 + void shouldParticipateInOngoingMongoTransactionWhenSessionSychronizationIsNative() { + + when(session.getServerSession()).thenReturn(serverSession); + when(databaseFactory.getSession(any())).thenReturn(Mono.just(session)); + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + when(session.abortTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + operator.execute(tx -> { + + return TransactionSynchronizationManager.forCurrentTransaction().doOnNext(synchronizationManager -> { + + assertThat(synchronizationManager.isSynchronizationActive()).isTrue(); + assertThat(tx.isNewTransaction()).isTrue(); + + assertThat(synchronizationManager.hasResource(databaseFactory)).isTrue(); + + }).then(Mono.fromRunnable(tx::setRollbackOnly)); + }).as(StepVerifier::create).verifyComplete(); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoTransactionManagerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoTransactionManagerUnitTests.java new file mode 100644 index 0000000000..9dbb2d550d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoTransactionManagerUnitTests.java @@ -0,0 +1,252 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.session.ServerSession; + +/** + * Unit tests for {@link ReactiveMongoTransactionManager}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +class ReactiveMongoTransactionManagerUnitTests { + + @Mock ClientSession session; + @Mock ClientSession session2; + @Mock ServerSession serverSession; + @Mock ReactiveMongoDatabaseFactory databaseFactory; + @Mock ReactiveMongoDatabaseFactory databaseFactory2; + @Mock MongoDatabase db; + @Mock MongoDatabase db2; + + @BeforeEach + void setUp() { + when(databaseFactory.getSession(any())).thenReturn(Mono.just(session), Mono.just(session2)); + when(databaseFactory.withSession(session)).thenReturn(databaseFactory); + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(session.getServerSession()).thenReturn(serverSession); + } + + @Test // DATAMONGO-2265 + void triggerCommitCorrectly() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + + }).as(operator::transactional) // + .as(StepVerifier::create) // + .verifyComplete(); + + verify(databaseFactory).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + + verify(session).close(); + } + + @Test // DATAMONGO-2265 + void participateInOnGoingTransactionWithCommit() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + }).as(StepVerifier::create).verifyComplete(); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + }).as(operator::transactional) // + .as(StepVerifier::create) // + .verifyComplete(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-2265 + void participateInOnGoingTransactionWithRollbackOnly() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.abortTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator operator = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + operator.execute(tx -> { + + return template.execute(db -> { + db.drop(); + tx.setRollbackOnly(); + return Mono.empty(); + }); + }).as(StepVerifier::create).verifyComplete(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).abortTransaction(); + verify(session).close(); + } + + @Test // DATAMONGO-2265 + void suspendTransactionWhilePropagationNotSupported() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator outer = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + DefaultTransactionDefinition definition = new DefaultTransactionDefinition(); + definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_NOT_SUPPORTED); + TransactionalOperator inner = TransactionalOperator.create(txManager, definition); + + outer.execute(tx1 -> { + + return template.execute(db -> { + + db.drop(); + + return inner.execute(tx2 -> { + return template.execute(db2 -> { + db2.drop(); + return Mono.empty(); + }); + }); + }); + }).as(StepVerifier::create).verifyComplete(); + + verify(session).startTransaction(); + verify(session2, never()).startTransaction(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + verify(databaseFactory, never()).withSession(eq(session2)); + + verify(db, times(2)).drop(); + + verify(session2, never()).close(); + } + + @Test // DATAMONGO-2265 + void suspendTransactionWhilePropagationRequiresNew() { + + when(databaseFactory.withSession(session2)).thenReturn(databaseFactory2); + when(databaseFactory2.getMongoDatabase()).thenReturn(Mono.just(db2)); + when(session2.getServerSession()).thenReturn(serverSession); + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + when(session2.commitTransaction()).thenReturn(Mono.empty()); + + TransactionalOperator outer = TransactionalOperator.create(txManager, new DefaultTransactionDefinition()); + + DefaultTransactionDefinition definition = new DefaultTransactionDefinition(); + definition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + TransactionalOperator inner = TransactionalOperator.create(txManager, definition); + + outer.execute(tx1 -> { + + return template.execute(db -> { + + db.drop(); + + return inner.execute(tx2 -> { + return template.execute(db2 -> { + db2.drop(); + return Mono.empty(); + }); + }); + }); + }).as(StepVerifier::create).verifyComplete(); + + verify(session).startTransaction(); + verify(session2).startTransaction(); + + verify(databaseFactory, times(1)).withSession(eq(session)); + verify(databaseFactory).withSession(eq(session2)); + + verify(db).drop(); + verify(db2).drop(); + + verify(session).close(); + verify(session2).close(); + } + + @Test // DATAMONGO-2265 + void readonlyShouldInitiateASessionStartAndCommitTransaction() { + + ReactiveMongoTransactionManager txManager = new ReactiveMongoTransactionManager(databaseFactory); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(databaseFactory); + when(session.commitTransaction()).thenReturn(Mono.empty()); + + DefaultTransactionDefinition readonlyTxDefinition = new DefaultTransactionDefinition(); + readonlyTxDefinition.setReadOnly(true); + TransactionalOperator operator = TransactionalOperator.create(txManager, readonlyTxDefinition); + + template.execute(db -> { + db.drop(); + return Mono.empty(); + + }).as(operator::transactional) // + .as(StepVerifier::create) // + .verifyComplete(); + + verify(databaseFactory).withSession(eq(session)); + + verify(session).startTransaction(); + verify(session).commitTransaction(); + verify(session).close(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionIntegrationTests.java new file mode 100644 index 0000000000..a6135939de --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionIntegrationTests.java @@ -0,0 +1,613 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static java.util.UUID.*; +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; + +import org.bson.types.ObjectId; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledIfSystemProperty; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.SetSystemProperty; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for reactive transaction management. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Yan Kardziyaka + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +@EnableIfReplicaSetAvailable +@DisabledIfSystemProperty(named = "user.name", matches = "jenkins") +@SetSystemProperty(key = "tx.read.concern", value = "local") +public class ReactiveTransactionIntegrationTests { + + private static final String DATABASE = "rxtx-test"; + + static @Client MongoClient mongoClient; + static GenericApplicationContext context; + + PersonService personService; + ReactiveMongoOperations operations; + ReactiveTransactionOptionsTestService transactionOptionsTestService; + CapturingTransactionOptionsResolver transactionOptionsResolver; + + @BeforeAll + public static void init() { + context = new AnnotationConfigApplicationContext(TestMongoConfig.class, PersonService.class); + } + + @AfterAll + public static void after() { + context.close(); + } + + @BeforeEach + public void setUp() { + + personService = context.getBean(PersonService.class); + operations = context.getBean(ReactiveMongoOperations.class); + transactionOptionsTestService = context.getBean(ReactiveTransactionOptionsTestService.class); + transactionOptionsResolver = context.getBean(CapturingTransactionOptionsResolver.class); + transactionOptionsResolver.clear(); // clean out left overs from dirty context + + try (MongoClient client = MongoTestUtils.reactiveClient()) { + + Flux.merge( // + MongoTestUtils.createOrReplaceCollection(DATABASE, operations.getCollectionName(Person.class), client), + MongoTestUtils.createOrReplaceCollection(DATABASE, operations.getCollectionName(EventLog.class), client) // + ).then().as(StepVerifier::create).thenAwait(Duration.ofMillis(100)).verifyComplete(); + } + } + + @Test // DATAMONGO-2265 + public void shouldRollbackAfterException() { + + personService.savePersonErrors(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .verifyError(RuntimeException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void shouldRollbackAfterExceptionOfTxAnnotatedMethod() { + + personService.declarativeSavePersonErrors(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .verifyError(RuntimeException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void commitShouldPersistTxEntries() { + + personService.savePerson(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .thenAwait(Duration.ofMillis(100)) + .expectNextCount(1) // + .verifyComplete(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void commitShouldPersistTxEntriesOfTxAnnotatedMethod() { + + personService.declarativeSavePerson(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void commitShouldPersistTxEntriesAcrossCollections() { + + personService.saveWithLogs(new Person(null, "Walter", "White")) // + .then() // + .as(StepVerifier::create) // + .verifyComplete(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + operations.count(new Query(), EventLog.class) // + .as(StepVerifier::create) // + .expectNext(4L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void rollbackShouldAbortAcrossCollections() { + + personService.saveWithErrorLogs(new Person(null, "Walter", "White")) // + .then() // + .as(StepVerifier::create) // + .verifyError(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + + operations.count(new Query(), EventLog.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void countShouldWorkInsideTransaction() { + + personService.countDuringTx(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void emitMultipleElementsDuringTransaction() { + + personService.saveWithLogs(new Person(null, "Walter", "White")) // + .as(StepVerifier::create) // + .expectNextCount(4L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2265 + public void errorAfterTxShouldNotAffectPreviousStep() { + + personService.savePerson(new Person(null, "Walter", "White")) // + .delayElement(Duration.ofMillis(10)) // + .then(Mono.error(new RuntimeException("my big bad evil error"))).as(StepVerifier::create) // + .expectError() // + .verify(); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidMaxCommitTime() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.saveWithInvalidMaxCommitTime(person) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldCommitOnTransactionWithinMaxCommitTime() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.saveWithinMaxCommitTime(person) // + .as(StepVerifier::create) // + .expectNext(person) // + .verifyComplete(); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(Duration.ofMinutes(1), + MongoTransactionOptions::getMaxCommitTime); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowInvalidDataAccessApiUsageExceptionOnTransactionWithAvailableReadConcern() { + transactionOptionsTestService.availableReadConcernFind(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(InvalidDataAccessApiUsageException.class); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidReadConcern() { + transactionOptionsTestService.invalidReadConcernFind(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + } + + @Test // GH-1628 + public void shouldReadTransactionOptionFromSystemProperty() { + + transactionOptionsTestService.environmentReadConcernFind(randomUUID().toString()).then().as(StepVerifier::create) + .verifyComplete(); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns( + new ReadConcern(ReadConcernLevel.fromString(System.getProperty("tx.read.concern"))), + MongoTransactionOptions::getReadConcern); + } + + @Test // GH-1628 + public void shouldNotThrowOnTransactionWithMajorityReadConcern() { + transactionOptionsTestService.majorityReadConcernFind(randomUUID().toString()) // + .as(StepVerifier::create) // + .expectNextCount(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowUncategorizedMongoDbExceptionOnTransactionWithPrimaryPreferredReadPreference() { + transactionOptionsTestService.findFromPrimaryPreferredReplica(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(UncategorizedMongoDbException.class); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidReadPreference() { + transactionOptionsTestService.findFromInvalidReplica(randomUUID().toString()) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + } + + @Test // GH-1628 + public void shouldNotThrowOnTransactionWithPrimaryReadPreference() { + transactionOptionsTestService.findFromPrimaryReplica(randomUUID().toString()) // + .as(StepVerifier::create) // + .expectNextCount(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithUnacknowledgedWriteConcern() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.unacknowledgedWriteConcernSave(person) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create).expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldThrowTransactionSystemExceptionOnTransactionWithInvalidWriteConcern() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.invalidWriteConcernSave(person) // + .as(StepVerifier::create) // + .verifyError(TransactionSystemException.class); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-1628 + public void shouldCommitOnTransactionWithAcknowledgedWriteConcern() { + + Person person = new Person(ObjectId.get(), randomUUID().toString(), randomUUID().toString()); + transactionOptionsTestService.acknowledgedWriteConcernSave(person) // + .as(StepVerifier::create) // + .expectNext(person) // + .verifyComplete(); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(WriteConcern.ACKNOWLEDGED, + MongoTransactionOptions::getWriteConcern); + + operations.count(new Query(), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Configuration + @EnableTransactionManagement + static class TestMongoConfig extends AbstractReactiveMongoConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DATABASE; + } + + @Bean + CapturingTransactionOptionsResolver txOptionsResolver() { + return new CapturingTransactionOptionsResolver(MongoTransactionOptionsResolver.defaultResolver()); + } + + @Bean + public ReactiveMongoTransactionManager txManager(ReactiveMongoDatabaseFactory factory, + MongoTransactionOptionsResolver txOptionsResolver) { + return new ReactiveMongoTransactionManager(factory, txOptionsResolver, MongoTransactionOptions.NONE); + } + + @Bean + public ReactiveTransactionOptionsTestService transactionOptionsTestService( + ReactiveMongoOperations operations) { + return new ReactiveTransactionOptionsTestService<>(operations, Person.class); + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(Person.class); + } + } + + static class PersonService { + + final ReactiveMongoOperations operations; + final ReactiveMongoTransactionManager manager; + + PersonService(ReactiveMongoOperations operations, ReactiveMongoTransactionManager manager) { + + this.operations = operations; + this.manager = manager; + } + + public Mono savePersonErrors(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return operations.save(person) // + . flatMap(it -> Mono.error(new RuntimeException("poof"))) // + .as(transactionalOperator::transactional); + } + + public Mono savePerson(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return operations.save(person) // + .flatMap(Mono::just) // + .as(transactionalOperator::transactional); + } + + public Mono countDuringTx(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return operations.save(person) // + .then(operations.count(new Query(), Person.class)) // + .as(transactionalOperator::transactional); + } + + public Flux saveWithLogs(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return Flux.concat(operations.save(new EventLog(new ObjectId(), "beforeConvert")), // + operations.save(new EventLog(new ObjectId(), "afterConvert")), // + operations.save(new EventLog(new ObjectId(), "beforeInsert")), // + operations.save(person), // + operations.save(new EventLog(new ObjectId(), "afterInsert"))) // + .thenMany(operations.query(EventLog.class).all()) // + .as(transactionalOperator::transactional); + } + + public Flux saveWithErrorLogs(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return Flux.concat(operations.save(new EventLog(new ObjectId(), "beforeConvert")), // + operations.save(new EventLog(new ObjectId(), "afterConvert")), // + operations.save(new EventLog(new ObjectId(), "beforeInsert")), // + operations.save(person), // + operations.save(new EventLog(new ObjectId(), "afterInsert"))) // + . flatMap(it -> Mono.error(new RuntimeException("poof"))) // + .as(transactionalOperator::transactional); + } + + @Transactional(transactionManager = "txManager") + public Flux declarativeSavePerson(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return transactionalOperator.execute(reactiveTransaction -> { + return operations.save(person); + }); + } + + @Transactional(transactionManager = "txManager") + public Flux declarativeSavePersonErrors(Person person) { + + TransactionalOperator transactionalOperator = TransactionalOperator.create(manager, + new DefaultTransactionDefinition()); + + return transactionalOperator.execute(reactiveTransaction -> { + + return operations.save(person) // + . flatMap(it -> Mono.error(new RuntimeException("poof"))); + }); + } + } + + @Document("person-rx") + static class Person { + + ObjectId id; + String firstname, lastname; + + Person(ObjectId id, String firstname, String lastname) { + this.id = id; + this.firstname = firstname; + this.lastname = lastname; + } + + public ObjectId getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname); + } + + public String toString() { + return "ReactiveTransactionIntegrationTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ")"; + } + } + + static class EventLog { + + ObjectId id; + String action; + + public EventLog(ObjectId id, String action) { + this.id = id; + this.action = action; + } + + public ObjectId getId() { + return this.id; + } + + public String getAction() { + return this.action; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public void setAction(String action) { + this.action = action; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EventLog eventLog = (EventLog) o; + return Objects.equals(id, eventLog.id) && Objects.equals(action, eventLog.action); + } + + @Override + public int hashCode() { + return Objects.hash(id, action); + } + + public String toString() { + return "ReactiveTransactionIntegrationTests.EventLog(id=" + this.getId() + ", action=" + this.getAction() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionOptionsTestService.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionOptionsTestService.java new file mode 100644 index 0000000000..98280b287a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveTransactionOptionsTestService.java @@ -0,0 +1,101 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; + +import java.util.function.Function; + +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.transaction.annotation.Transactional; + +/** + * Helper class for integration tests of {@link Transactional#label()} MongoDb options in reactive context. + * + * @param root document type + * @author Yan Kardziyaka + * @see org.springframework.data.mongodb.core.TransactionOptionsTestService + */ +public class ReactiveTransactionOptionsTestService { + private final Function> findByIdFunction; + + private final Function> saveFunction; + + public ReactiveTransactionOptionsTestService(ReactiveMongoOperations operations, Class entityClass) { + this.findByIdFunction = id -> operations.findById(id, entityClass); + this.saveFunction = operations::save; + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=-PT6H3M" }) + public Mono saveWithInvalidMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=PT1M" }) + public Mono saveWithinMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=available" }) + public Mono availableReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=invalid" }) + public Mono invalidReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=${tx.read.concern}" }) + public Mono environmentReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=majority" }) + public Mono majorityReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primaryPreferred" }) + public Mono findFromPrimaryPreferredReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=invalid" }) + public Mono findFromInvalidReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primary" }) + public Mono findFromPrimaryReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=unacknowledged" }) + public Mono unacknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=invalid" }) + public Mono invalidWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=acknowledged" }) + public Mono acknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SessionAwareMethodInterceptorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SessionAwareMethodInterceptorUnitTests.java new file mode 100644 index 0000000000..0027fd89a4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SessionAwareMethodInterceptorUnitTests.java @@ -0,0 +1,184 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor.MethodCache; +import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Unit tests for {@link SessionAwareMethodInterceptor}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +public class SessionAwareMethodInterceptorUnitTests { + + @Mock ClientSession session; + @Mock MongoCollection targetCollection; + @Mock MongoDatabase targetDatabase; + + MongoCollection collection; + MongoDatabase database; + + @BeforeEach + public void setUp() { + + collection = createProxyInstance(session, targetCollection, MongoCollection.class); + database = createProxyInstance(session, targetDatabase, MongoDatabase.class); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnCollectionDelegatesToMethodWithSession() { + + collection.find(); + + verify(targetCollection).find(eq(session)); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnCollectionWithSessionInArgumentListProceedsWithExecution() { + + ClientSession yetAnotherSession = mock(ClientSession.class); + collection.find(yetAnotherSession); + + verify(targetCollection).find(eq(yetAnotherSession)); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnDatabaseDelegatesToMethodWithSession() { + + database.drop(); + + verify(targetDatabase).drop(eq(session)); + } + + @Test // DATAMONGO-1880 + public void proxyFactoryOnDatabaseWithSessionInArgumentListProceedsWithExecution() { + + ClientSession yetAnotherSession = mock(ClientSession.class); + database.drop(yetAnotherSession); + + verify(targetDatabase).drop(eq(yetAnotherSession)); + } + + @Test // DATAMONGO-1880 + public void justMoveOnIfNoOverloadWithSessionAvailable() { + + collection.getReadPreference(); + + verify(targetCollection).getReadPreference(); + } + + @Test // DATAMONGO-1880 + public void usesCacheForMethodLookup() { + + MethodCache cache = (MethodCache) ReflectionTestUtils.getField(SessionAwareMethodInterceptor.class, "METHOD_CACHE"); + Method countMethod = ClassUtils.getMethod(MongoCollection.class, "countDocuments"); + + assertThat(cache.contains(countMethod, MongoCollection.class)).isFalse(); + + collection.countDocuments(); + + assertThat(cache.contains(countMethod, MongoCollection.class)).isTrue(); + } + + @Test // DATAMONGO-1880 + public void cachesNullForMethodsThatDoNotHaveASessionOverload() { + + MethodCache cache = (MethodCache) ReflectionTestUtils.getField(SessionAwareMethodInterceptor.class, "METHOD_CACHE"); + Method readConcernMethod = ClassUtils.getMethod(MongoCollection.class, "getReadConcern"); + + assertThat(cache.contains(readConcernMethod, MongoCollection.class)).isFalse(); + + collection.getReadConcern(); + + collection.getReadConcern(); + + assertThat(cache.contains(readConcernMethod, MongoCollection.class)).isTrue(); + assertThat(cache.lookup(readConcernMethod, MongoCollection.class, ClientSession.class)).isEmpty(); + } + + @Test // DATAMONGO-1880 + public void proxiesNewDbInstanceReturnedByMethod() { + + MongoDatabase otherDb = mock(MongoDatabase.class); + when(targetDatabase.withCodecRegistry(any())).thenReturn(otherDb); + + MongoDatabase target = database.withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry()); + assertThat(target).isInstanceOf(Proxy.class).isNotSameAs(database).isNotSameAs(targetDatabase); + + target.drop(); + + verify(otherDb).drop(eq(session)); + } + + @Test // DATAMONGO-1880 + public void proxiesNewCollectionInstanceReturnedByMethod() { + + MongoCollection otherCollection = mock(MongoCollection.class); + when(targetCollection.withCodecRegistry(any())).thenReturn(otherCollection); + + MongoCollection target = collection.withCodecRegistry(MongoClientSettings.getDefaultCodecRegistry()); + assertThat(target).isInstanceOf(Proxy.class).isNotSameAs(collection).isNotSameAs(targetCollection); + + target.drop(); + + verify(otherCollection).drop(eq(session)); + } + + private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) { + return createProxyInstance(session, database, MongoDatabase.class); + } + + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, MongoCollection collection) { + return createProxyInstance(session, collection, MongoCollection.class); + } + + private T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class targetType) { + + ProxyFactory factory = new ProxyFactory(); + factory.setTarget(target); + factory.setInterfaces(targetType); + factory.setOpaque(true); + + factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, + this::proxyDatabase, MongoCollection.class, this::proxyCollection)); + + return targetType.cast(factory.getProxy()); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SpringDataMongoDBTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SpringDataMongoDBTests.java new file mode 100644 index 0000000000..09b8a428fa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/SpringDataMongoDBTests.java @@ -0,0 +1,36 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * @author Christoph Strobl + */ +class SpringDataMongoDBTests { + + @Test // DATAMONGO-2427 + void driverInformationHoldsSpringDataHint() { + assertThat(SpringDataMongoDB.driverInformation().getDriverNames()).contains("spring-data"); + } + + @Test // DATAMONGO-2427 + void versionIsDetectedFromPackage() { + assertThat(SpringDataMongoDB.version()).isNotNull(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessorUnitTests.java new file mode 100644 index 0000000000..c900e20a3b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessorUnitTests.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.springframework.aot.generate.ClassNameGenerator; +import org.springframework.aot.generate.DefaultGenerationContext; +import org.springframework.aot.generate.GenerationContext; +import org.springframework.aot.generate.InMemoryGeneratedFiles; +import org.springframework.aot.hint.predicate.RuntimeHintsPredicates; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.javapoet.ClassName; + +/** + * Unit tests for {@link LazyLoadingProxyAotProcessor}. + * + * @author Christoph Strobl + */ +class LazyLoadingProxyAotProcessorUnitTests { + + @Test // GH-4351 + void registersProxyForLazyDbRefCorrectlyWhenTypeIsCollectionInterface() { + + GenerationContext ctx = new DefaultGenerationContext(new ClassNameGenerator(ClassName.get(this.getClass())), + new InMemoryGeneratedFiles()); + + new LazyLoadingProxyAotProcessor().registerLazyLoadingProxyIfNeeded(A.class, ctx); + + assertThat(ctx.getRuntimeHints()) + .satisfies(RuntimeHintsPredicates.proxies().forInterfaces(java.util.Collection.class, + org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class, java.util.List.class, + org.springframework.aop.SpringProxy.class, org.springframework.aop.framework.Advised.class, + org.springframework.core.DecoratingProxy.class)::test); + } + + static class A { + + String id; + + @DBRef(lazy = true) // + List listRef; + } + + static class B { + String id; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/MongoRuntimeHintsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/MongoRuntimeHintsUnitTests.java new file mode 100644 index 0000000000..0f9ecb911e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/aot/MongoRuntimeHintsUnitTests.java @@ -0,0 +1,129 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.aot.hint.MemberCategory.*; +import static org.springframework.aot.hint.predicate.RuntimeHintsPredicates.*; + +import java.util.function.Predicate; + +import org.junit.jupiter.api.Test; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.test.util.ClassPathExclusions; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.UnixServerAddress; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * Unit Tests for {@link MongoRuntimeHints}. + * + * @author Christoph Strobl + */ +@SuppressWarnings("deprecation") +class MongoRuntimeHintsUnitTests { + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.client", "com.mongodb.reactivestreams.client" }) + void shouldRegisterGeneralCompatibilityHints() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(MongoClientSettings.class) + .withMemberCategory(INVOKE_PUBLIC_METHODS) + .and(reflection().onType(MongoClientSettings.Builder.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(IndexOptions.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(ServerAddress.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(UnixServerAddress.class).withMemberCategory(INVOKE_PUBLIC_METHODS)) + .and(reflection().onType(TypeReference.of("com.mongodb.connection.StreamFactoryFactory")) + .withMemberCategory(INTROSPECT_PUBLIC_METHODS)); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.reactivestreams.client" }) + void shouldRegisterSyncCompatibilityHintsIfPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(MapReduceIterable.class) + .withMemberCategory(INVOKE_PUBLIC_METHODS) + .and(reflection().onType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS)); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.client" }) + void shouldNotRegisterSyncCompatibilityHintsIfClientNotPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(TypeReference.of("com.mongodb.client.MapReduceIterable")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate() + .and(reflection().onType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate()); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.client" }) + void shouldRegisterReactiveCompatibilityHintsIfPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection().onType(MapReducePublisher.class) + .withMemberCategory(INVOKE_PUBLIC_METHODS) + .and(reflection().onType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS)); + + assertThat(runtimeHints).matches(expected); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.reactivestreams.client" }) + void shouldNotRegisterReactiveCompatibilityHintsIfClientNotPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + + new MongoRuntimeHints().registerHints(runtimeHints, this.getClass().getClassLoader()); + + Predicate expected = reflection() + .onType(TypeReference.of("com.mongodb.reactivestreams.client.MapReducePublisher")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate() + .and(reflection().onType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl")) + .withMemberCategory(INVOKE_PUBLIC_METHODS).negate()); + + assertThat(runtimeHints).matches(expected); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/classloading/HidingClassLoader.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/classloading/HidingClassLoader.java new file mode 100644 index 0000000000..d809101f73 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/classloading/HidingClassLoader.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.classloading; + +import java.net.URLClassLoader; +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Collectors; + +import org.springframework.instrument.classloading.ShadowingClassLoader; +import org.springframework.util.Assert; + +/** + * is intended for testing code that depends on the presence/absence of certain classes. Classes can be: + *
            + *
          • shadowed: reloaded by this classloader no matter if they are loaded already by the SystemClassLoader
          • + *
          • hidden: not loaded by this classloader no matter if they are loaded already by the SystemClassLoader. Trying to + * load these classes results in a {@link ClassNotFoundException}
          • + *
          • all other classes get loaded by the SystemClassLoader
          • + *
          + * + * @author Jens Schauder + * @author Oliver Gierke + * @author Christoph Strobl + */ +public class HidingClassLoader extends ShadowingClassLoader { + + private final Collection hidden; + + public HidingClassLoader(String... hidden) { + this(Arrays.asList(hidden)); + } + + public HidingClassLoader(Collection hidden) { + + super(URLClassLoader.getSystemClassLoader(), false); + + this.hidden = hidden; + } + + /** + * Creates a new {@link HidingClassLoader} with the packages of the given classes hidden. + * + * @param packages must not be {@literal null}. + * @return + */ + public static HidingClassLoader hide(Class... packages) { + + Assert.notNull(packages, "Packages must not be null"); + + return new HidingClassLoader(Arrays.stream(packages)// + .map(it -> it.getPackage().getName())// + .collect(Collectors.toList())); + } + + public static HidingClassLoader hideTypes(Class... types) { + + Assert.notNull(types, "Types must not be null!"); + + return new HidingClassLoader(Arrays.stream(types)// + .map(it -> it.getName())// + .collect(Collectors.toList())); + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + + Class loaded = super.loadClass(name); + checkIfHidden(loaded); + return loaded; + } + + @Override + protected boolean isEligibleForShadowing(String className) { + return isExcluded(className); + } + + @Override + protected Class findClass(String name) throws ClassNotFoundException { + + Class loaded = super.findClass(name); + checkIfHidden(loaded); + return loaded; + } + + private void checkIfHidden(Class type) throws ClassNotFoundException { + + if (hidden.stream().anyMatch(it -> type.getName().startsWith(it))) { + throw new ClassNotFoundException(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java index 775cec8a10..b7f945f2a3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,38 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Collections; +import java.util.Set; import org.bson.Document; -import org.junit.After; -import org.junit.Before; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; /** * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration public abstract class AbstractIntegrationTests { @Configuration - static class TestConfig extends AbstractMongoConfiguration { + static class TestConfig extends MongoClientClosingTestConfiguration { @Override protected String getDatabaseName() { @@ -52,14 +55,24 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return MongoTestUtils.client(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Override + protected boolean autoIndexCreation() { + return true; } } @Autowired MongoOperations operations; - @Before - @After + @BeforeEach + @AfterEach public void cleanUp() { for (String collectionName : operations.getCollectionNames()) { @@ -68,7 +81,7 @@ public void cleanUp() { @Override public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { collection.deleteMany(new Document()); - assertThat(collection.find().iterator().hasNext(), is(false)); + assertThat(collection.find().iterator().hasNext()).isFalse(); return null; } }); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java index 39df34926c..b16cb6961a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractMongoConfigurationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import example.first.First; import example.second.Second; @@ -26,27 +25,30 @@ import java.util.Collections; import java.util.Set; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; + +import org.mockito.Mockito; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.support.AbstractApplicationContext; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoManagedTypes; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.MongoTypeMapper; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** - * Unit tests for {@link AbstractMongoConfiguration}. + * Unit tests for {@link AbstractMongoClientConfiguration}. * * @author Oliver Gierke * @author Thomas Darimont @@ -54,15 +56,14 @@ */ public class AbstractMongoConfigurationUnitTests { - @Rule public ExpectedException exception = ExpectedException.none(); - @Test // DATAMONGO-496 public void usesConfigClassPackageAsBaseMappingPackage() throws ClassNotFoundException { - AbstractMongoConfiguration configuration = new SampleMongoConfiguration(); - assertThat(configuration.getMappingBasePackage(), is(SampleMongoConfiguration.class.getPackage().getName())); - assertThat(configuration.getInitialEntitySet(), hasSize(2)); - assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class)); + AbstractMongoClientConfiguration configuration = new SampleMongoConfiguration(); + assertThat(configuration.getMappingBasePackages()) + .containsExactly(SampleMongoConfiguration.class.getPackage().getName()); + assertThat(configuration.getInitialEntitySet()).hasSize(2); + assertThat(configuration.getInitialEntitySet()).contains(Entity.class); } @Test // DATAMONGO-496 @@ -82,10 +83,9 @@ public void containsMongoDbFactoryButNoMongoBean() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); - assertThat(context.getBean(MongoDbFactory.class), is(notNullValue())); + assertThat(context.getBean(MongoDatabaseFactory.class)).isNotNull(); + assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() -> context.getBean(MongoClient.class)); - exception.expect(NoSuchBeanDefinitionException.class); - context.getBean(MongoClient.class); context.close(); } @@ -93,11 +93,12 @@ public void containsMongoDbFactoryButNoMongoBean() { public void returnsUninitializedMappingContext() throws Exception { SampleMongoConfiguration configuration = new SampleMongoConfiguration(); - MongoMappingContext context = configuration.mongoMappingContext(); + MongoMappingContext context = configuration.mongoMappingContext(configuration.customConversions(), + MongoManagedTypes.from(Entity.class)); - assertThat(context.getPersistentEntities(), is(emptyIterable())); + assertThat(context.getPersistentEntities()).isEmpty(); context.initialize(); - assertThat(context.getPersistentEntities(), is(not(emptyIterable()))); + assertThat(context.getPersistentEntities()).isNotEmpty(); } @Test // DATAMONGO-717 @@ -105,10 +106,11 @@ public void lifecycleCallbacksAreInvokedInAppropriateOrder() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); - BasicMongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(Entity.class); - StandardEvaluationContext spElContext = (StandardEvaluationContext) ReflectionTestUtils.getField(entity, "context"); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(Entity.class); + EvaluationContextProvider provider = (EvaluationContextProvider) ReflectionTestUtils.getField(entity, + "evaluationContextProvider"); - assertThat(spElContext.getBeanResolver(), is(notNullValue())); + assertThat(provider).isInstanceOf(ExtensionAwareEvaluationContextProvider.class); context.close(); } @@ -119,8 +121,8 @@ public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() { MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class); MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class); - assertThat(mmc, is(notNullValue())); - assertThat(mmc.getTypeMapper(), is(typeMapper)); + assertThat(mmc).isNotNull(); + assertThat(mmc.getTypeMapper()).isEqualTo(typeMapper); context.close(); } @@ -131,25 +133,25 @@ public void allowsMultipleEntityBasePackages() throws ClassNotFoundException { ConfigurationWithMultipleBasePackages config = new ConfigurationWithMultipleBasePackages(); Set> entities = config.getInitialEntitySet(); - assertThat(entities, hasSize(2)); - assertThat(entities, hasItems(First.class, Second.class)); + assertThat(entities).hasSize(2); + assertThat(entities).contains(First.class, Second.class); } private static void assertScanningDisabled(final String value) throws ClassNotFoundException { - AbstractMongoConfiguration configuration = new SampleMongoConfiguration() { + AbstractMongoClientConfiguration configuration = new SampleMongoConfiguration() { @Override protected Collection getMappingBasePackages() { return Collections.singleton(value); } }; - assertThat(configuration.getMappingBasePackages(), hasItem(value)); - assertThat(configuration.getInitialEntitySet(), hasSize(0)); + assertThat(configuration.getMappingBasePackages()).contains(value); + assertThat(configuration.getInitialEntitySet()).hasSize(0); } @Configuration - static class SampleMongoConfiguration extends AbstractMongoConfiguration { + static class SampleMongoConfiguration extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -158,14 +160,13 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return Mockito.mock(MongoClient.class); } - @Bean @Override - public MappingMongoConverter mappingMongoConverter() throws Exception { - - MappingMongoConverter converter = super.mappingMongoConverter(); + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext); converter.setTypeMapper(typeMapper()); return converter; @@ -175,9 +176,10 @@ public MappingMongoConverter mappingMongoConverter() throws Exception { public MongoTypeMapper typeMapper() { return new CustomMongoTypeMapper(); } + } - static class ConfigurationWithMultipleBasePackages extends AbstractMongoConfiguration { + static class ConfigurationWithMultipleBasePackages extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -186,7 +188,7 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return Mockito.mock(MongoClient.class); } @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java index bddce19c32..2fcb44a6e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,8 @@ import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Configuration; @@ -29,7 +31,6 @@ import org.springframework.test.context.junit4.SpringRunner; import com.mongodb.reactivestreams.client.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; /** * Integration tests for {@link AbstractReactiveMongoConfiguration}. @@ -56,7 +57,7 @@ static class ReactiveConfiguration extends AbstractReactiveMongoConfiguration { @Override public MongoClient reactiveMongoClient() { - return MongoClients.create(); + return Mockito.mock(MongoClient.class); } @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java index 1a3510be9f..6c80842556 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfigurationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,38 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import example.first.First; +import example.second.Second; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Set; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.support.AbstractApplicationContext; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.MongoTypeMapper; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.expression.spel.support.StandardEvaluationContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.Mongo; import com.mongodb.reactivestreams.client.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; - -import example.first.First; -import example.second.Second; /** * Unit tests for {@link AbstractReactiveMongoConfiguration}. @@ -54,15 +55,13 @@ */ public class AbstractReactiveMongoConfigurationUnitTests { - @Rule public ExpectedException exception = ExpectedException.none(); - @Test // DATAMONGO-1444 public void usesConfigClassPackageAsBaseMappingPackage() throws ClassNotFoundException { AbstractReactiveMongoConfiguration configuration = new SampleMongoConfiguration(); - assertThat(configuration.getMappingBasePackages(), hasItem(SampleMongoConfiguration.class.getPackage().getName())); - assertThat(configuration.getInitialEntitySet(), hasSize(2)); - assertThat(configuration.getInitialEntitySet(), hasItem(Entity.class)); + assertThat(configuration.getMappingBasePackages()).contains(SampleMongoConfiguration.class.getPackage().getName()); + assertThat(configuration.getInitialEntitySet()).hasSize(2); + assertThat(configuration.getInitialEntitySet()).contains(Entity.class); } @Test // DATAMONGO-1444 @@ -82,10 +81,10 @@ public void containsMongoDbFactoryButNoMongoBean() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); - assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class), is(notNullValue())); + assertThat(context.getBean(SimpleReactiveMongoDatabaseFactory.class)).isNotNull(); + assertThatExceptionOfType(NoSuchBeanDefinitionException.class) + .isThrownBy(() -> context.getBean(com.mongodb.client.MongoClient.class)); - exception.expect(NoSuchBeanDefinitionException.class); - context.getBean(Mongo.class); context.close(); } @@ -93,11 +92,12 @@ public void containsMongoDbFactoryButNoMongoBean() { public void returnsUninitializedMappingContext() throws Exception { SampleMongoConfiguration configuration = new SampleMongoConfiguration(); - MongoMappingContext context = configuration.mongoMappingContext(); + MongoMappingContext context = configuration.mongoMappingContext(configuration.customConversions(), + MongoManagedTypes.from(Entity.class)); - assertThat(context.getPersistentEntities(), is(emptyIterable())); + assertThat(context.getPersistentEntities()).isEmpty(); context.initialize(); - assertThat(context.getPersistentEntities(), is(not(emptyIterable()))); + assertThat(context.getPersistentEntities()).isNotEmpty(); } @Test // DATAMONGO-1444 @@ -105,10 +105,11 @@ public void lifecycleCallbacksAreInvokedInAppropriateOrder() { AbstractApplicationContext context = new AnnotationConfigApplicationContext(SampleMongoConfiguration.class); MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); - BasicMongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(Entity.class); - StandardEvaluationContext spElContext = (StandardEvaluationContext) ReflectionTestUtils.getField(entity, "context"); + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(Entity.class); + EvaluationContextProvider provider = (EvaluationContextProvider) ReflectionTestUtils.getField(entity, + "evaluationContextProvider"); - assertThat(spElContext.getBeanResolver(), is(notNullValue())); + assertThat(provider).isInstanceOf(ExtensionAwareEvaluationContextProvider.class); context.close(); } @@ -119,8 +120,8 @@ public void shouldBeAbleToConfigureCustomTypeMapperViaJavaConfig() { MongoTypeMapper typeMapper = context.getBean(CustomMongoTypeMapper.class); MappingMongoConverter mmc = context.getBean(MappingMongoConverter.class); - assertThat(mmc, is(notNullValue())); - assertThat(mmc.getTypeMapper(), is(typeMapper)); + assertThat(mmc).isNotNull(); + assertThat(mmc.getTypeMapper()).isEqualTo(typeMapper); context.close(); } @@ -131,8 +132,8 @@ public void allowsMultipleEntityBasePackages() throws ClassNotFoundException { ConfigurationWithMultipleBasePackages config = new ConfigurationWithMultipleBasePackages(); Set> entities = config.getInitialEntitySet(); - assertThat(entities, hasSize(2)); - assertThat(entities, hasItems(First.class, Second.class)); + assertThat(entities).hasSize(2); + assertThat(entities).contains(First.class, Second.class); } private static void assertScanningDisabled(final String value) throws ClassNotFoundException { @@ -144,8 +145,8 @@ protected Collection getMappingBasePackages() { } }; - assertThat(configuration.getMappingBasePackages(), hasItem(value)); - assertThat(configuration.getInitialEntitySet(), hasSize(0)); + assertThat(configuration.getMappingBasePackages()).contains(value); + assertThat(configuration.getInitialEntitySet()).hasSize(0); } @Configuration @@ -158,14 +159,14 @@ protected String getDatabaseName() { @Override public MongoClient reactiveMongoClient() { - return MongoClients.create(); + return Mockito.mock(MongoClient.class); } - @Bean @Override - public MappingMongoConverter mappingMongoConverter() throws Exception { + public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { - MappingMongoConverter converter = super.mappingMongoConverter(); + MappingMongoConverter converter = super.mappingMongoConverter(databaseFactory, customConversions, mappingContext); converter.setTypeMapper(typeMapper()); return converter; @@ -186,7 +187,7 @@ protected String getDatabaseName() { @Override public MongoClient reactiveMongoClient() { - return MongoClients.create(); + return Mockito.mock(MongoClient.class); } @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java index a276be7227..ee411eb7c7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,30 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.Date; + +import org.junit.jupiter.api.Test; -import org.joda.time.DateTime; -import org.junit.Test; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.data.annotation.CreatedDate; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; /** * Integration test for the auditing support. * * @author Oliver Gierke + * @author Mark Paluch */ public class AuditingIntegrationTests { - @Test // DATAMONGO-577, DATAMONGO-800, DATAMONGO-883 + @Test // DATAMONGO-577, DATAMONGO-800, DATAMONGO-883, DATAMONGO-2261 public void enablesAuditingAndSetsPropertiesAccordingly() throws Exception { AbstractApplicationContext context = new ClassPathXmlApplicationContext("auditing.xml", getClass()); @@ -43,31 +46,32 @@ public void enablesAuditingAndSetsPropertiesAccordingly() throws Exception { MongoMappingContext mappingContext = context.getBean(MongoMappingContext.class); mappingContext.getPersistentEntity(Entity.class); + EntityCallbacks callbacks = EntityCallbacks.create(context); + Entity entity = new Entity(); - BeforeConvertEvent event = new BeforeConvertEvent(entity, "collection-1"); - context.publishEvent(event); + entity = callbacks.callback(BeforeConvertCallback.class, entity, "collection-1"); - assertThat(entity.created, is(notNullValue())); - assertThat(entity.modified, is(entity.created)); + assertThat(entity.created).isNotNull(); + assertThat(entity.modified).isEqualTo(entity.created); Thread.sleep(10); entity.id = 1L; - event = new BeforeConvertEvent(entity, "collection-1"); - context.publishEvent(event); - assertThat(entity.created, is(notNullValue())); - assertThat(entity.modified, is(not(entity.created))); + entity = callbacks.callback(BeforeConvertCallback.class, entity, "collection-1"); + + assertThat(entity.created).isNotNull(); + assertThat(entity.modified).isNotEqualTo(entity.created); context.close(); } class Entity { @Id Long id; - @CreatedDate DateTime created; - DateTime modified; + @CreatedDate Date created; + Date modified; @LastModifiedDate - public DateTime getModified() { + public Date getModified() { return modified; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java index aa3f1d7f50..c3122d2850 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AuditingViaJavaConfigRepositoriesTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,57 +15,84 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; import java.util.Optional; +import java.util.Set; +import java.util.function.Function; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.core.ResolvableType; +import org.springframework.data.annotation.Version; import org.springframework.data.domain.AuditorAware; +import org.springframework.data.mapping.callback.EntityCallback; import org.springframework.data.mongodb.core.AuditablePerson; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.stereotype.Repository; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration tests for auditing via Java config. * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration -public class AuditingViaJavaConfigRepositoriesTests { +class AuditingViaJavaConfigRepositoriesTests { + + static @Client MongoClient mongoClient; @Autowired AuditablePersonRepository auditablePersonRepository; @Autowired AuditorAware auditorAware; + @Autowired MongoMappingContext context; + @Autowired MongoOperations operations; + AuditablePerson auditor; @Configuration @EnableMongoAuditing(auditorAwareRef = "auditorProvider") - @EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true) - static class Config extends AbstractMongoConfiguration { + @EnableMongoRepositories(basePackageClasses = AuditablePersonRepository.class, considerNestedRepositories = true, + includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE, classes = AuditablePersonRepository.class)) + static class Config extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { + return "database"; } @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; } @Bean @@ -73,16 +100,22 @@ public MongoClient mongoClient() { public AuditorAware auditorProvider() { return mock(AuditorAware.class); } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return new HashSet<>( + Arrays.asList(AuditablePerson.class, VersionedAuditablePerson.class, SimpleVersionedAuditablePerson.class)); + } } - @Before - public void setup() { + @BeforeEach + void setup() { auditablePersonRepository.deleteAll(); this.auditor = auditablePersonRepository.save(new AuditablePerson("auditor")); } @Test // DATAMONGO-792, DATAMONGO-883 - public void basicAuditing() { + void basicAuditing() { doReturn(Optional.of(this.auditor)).when(this.auditorAware).getCurrentAuditor(); @@ -90,25 +123,110 @@ public void basicAuditing() { AuditablePerson createdBy = savedUser.getCreatedBy(); - assertThat(createdBy, is(notNullValue())); - assertThat(createdBy.getFirstname(), is(this.auditor.getFirstname())); - assertThat(savedUser.getCreatedAt(), is(notNullValue())); + assertThat(createdBy).isNotNull(); + assertThat(createdBy.getFirstname()).isEqualTo(this.auditor.getFirstname()); + assertThat(savedUser.getCreatedAt()).isNotNull(); } @Test // DATAMONGO-843 @SuppressWarnings("resource") - public void auditingUsesFallbackMappingContextIfNoneConfiguredWithRepositories() { + void auditingUsesFallbackMappingContextIfNoneConfiguredWithRepositories() { new AnnotationConfigApplicationContext(SimpleConfigWithRepositories.class); } @Test // DATAMONGO-843 @SuppressWarnings("resource") - public void auditingUsesFallbackMappingContextIfNoneConfigured() { + void auditingUsesFallbackMappingContextIfNoneConfigured() { new AnnotationConfigApplicationContext(SimpleConfig.class); } + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2179 + void auditingWorksForVersionedEntityBatchWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + s -> auditablePersonRepository.saveAll(Collections.singletonList(s)).get(0), // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithSimpleVersion() { + + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithWrapperVersionOnTemplate() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139 + void auditingWorksForVersionedEntityWithSimpleVersionOnTemplate() { + + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2586 + void auditingShouldOnlyRegisterImperativeAuditingCallback() { + + Object callbacks = ReflectionTestUtils.getField(operations, "entityCallbacks"); + Object callbackDiscoverer = ReflectionTestUtils.getField(callbacks, "callbackDiscoverer"); + List> actualCallbacks = ReflectionTestUtils.invokeMethod(callbackDiscoverer, "getEntityCallbacks", + AuditablePerson.class, ResolvableType.forClass(EntityCallback.class)); + + assertThat(actualCallbacks) // + .hasAtLeastOneElementOfType(AuditingEntityCallback.class) // + .doesNotHaveAnyElementsOfTypes(ReactiveAuditingEntityCallback.class); + } + + private void verifyAuditingViaVersionProperty(T instance, + Function versionExtractor, Function createdDateExtractor, Function persister, + Object... expectedValues) { + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(instance.getClass()); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[0]); + assertThat(createdDateExtractor.apply(instance)).isNull(); + assertThat(entity.isNew(instance)).isTrue(); + + instance = persister.apply(instance); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[1]); + assertThat(createdDateExtractor.apply(instance)).isNotNull(); + assertThat(entity.isNew(instance)).isFalse(); + + instance = persister.apply(instance); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[2]); + assertThat(entity.isNew(instance)).isFalse(); + } + @Repository - static interface AuditablePersonRepository extends MongoRepository {} + interface AuditablePersonRepository extends MongoRepository {} @Configuration @EnableMongoRepositories @@ -116,16 +234,29 @@ static class SimpleConfigWithRepositories extends SimpleConfig {} @Configuration @EnableMongoAuditing - static class SimpleConfig extends AbstractMongoConfiguration { + static class SimpleConfig extends MongoClientClosingTestConfiguration { @Override public MongoClient mongoClient() { - return new MongoClient(); + return MongoTestUtils.client(); } @Override protected String getDatabaseName() { return "database"; } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + } + + static class VersionedAuditablePerson extends AuditablePerson { + @Version Long version; + } + + static class SimpleVersionedAuditablePerson extends AuditablePerson { + @Version long version; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java index 471656c51e..0d89487955 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/CustomMongoTypeMapper.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java index ceacb7bdcd..be96469878 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/GeoJsonConfigurationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,24 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.data.mongodb.core.geo.GeoJsonModule; import org.springframework.data.web.config.EnableSpringDataWebSupport; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link GeoJsonConfiguration}. * * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class GeoJsonConfigurationIntegrationTests { @@ -44,6 +44,6 @@ static class Config {} @Test // DATAMONGO-1181 public void picksUpGeoJsonModuleConfigurationByDefault() { - assertThat(geoJsonModule, is(notNullValue())); + assertThat(geoJsonModule).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java index ead2bde44a..11143da832 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,13 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; import java.util.Set; import org.bson.Document; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanReference; import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; @@ -40,6 +37,7 @@ import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoTypeMapper; import org.springframework.data.mongodb.core.mapping.Account; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.Person; import org.springframework.stereotype.Component; @@ -50,80 +48,92 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Ryan Tenney + * @author Tomasz Forys */ public class MappingMongoConverterParserIntegrationTests { - @Rule public ExpectedException exception = ExpectedException.none(); - - DefaultListableBeanFactory factory; + private DefaultListableBeanFactory factory; @Test // DATAMONGO-243 - public void allowsDbFactoryRefAttribute() { + void allowsDbFactoryRefAttribute() { loadValidConfiguration(); factory.getBeanDefinition("converter"); factory.getBean("converter"); } + @Test // GH-4275 + void defaultsToFalseForAutoIndexCreation() { + + loadValidConfiguration(); + MongoMappingContext mongoMappingContext = factory.getBean("converter.mongoMappingContext", + MongoMappingContext.class); + assertThat(mongoMappingContext.isAutoIndexCreation()).isFalse(); + } + + @Test // GH-4275 + void allowsToOverrideAutoIndexCreation() { + + loadValidConfiguration(); + MongoMappingContext mongoMappingContext = factory.getBean("autoIndexCreationConverter.mongoMappingContext", + MongoMappingContext.class); + assertThat(mongoMappingContext.isAutoIndexCreation()).isTrue(); + } + @Test // DATAMONGO-725 - public void hasCustomTypeMapper() { + void hasCustomTypeMapper() { loadValidConfiguration(); MappingMongoConverter converter = factory.getBean("converter", MappingMongoConverter.class); MongoTypeMapper customMongoTypeMapper = factory.getBean(CustomMongoTypeMapper.class); - assertThat(converter.getTypeMapper(), is(customMongoTypeMapper)); + assertThat(converter.getTypeMapper()).isEqualTo(customMongoTypeMapper); } @Test // DATAMONGO-301 - public void scansForConverterAndSetsUpCustomConversionsAccordingly() { + void scansForConverterAndSetsUpCustomConversionsAccordingly() { loadValidConfiguration(); CustomConversions conversions = factory.getBean(CustomConversions.class); - assertThat(conversions.hasCustomWriteTarget(Person.class), is(true)); - assertThat(conversions.hasCustomWriteTarget(Account.class), is(true)); + assertThat(conversions.hasCustomWriteTarget(Person.class)).isTrue(); + assertThat(conversions.hasCustomWriteTarget(Account.class)).isTrue(); } @Test // DATAMONGO-607 - public void activatesAbbreviatingPropertiesCorrectly() { + void activatesAbbreviatingPropertiesCorrectly() { loadValidConfiguration(); BeanDefinition definition = factory.getBeanDefinition("abbreviatingConverter.mongoMappingContext"); Object value = definition.getPropertyValues().getPropertyValue("fieldNamingStrategy").getValue(); - assertThat(value, is(instanceOf(BeanDefinition.class))); + assertThat(value).isInstanceOf(BeanDefinition.class); BeanDefinition strategy = (BeanDefinition) value; - assertThat(strategy.getBeanClassName(), is(CamelCaseAbbreviatingFieldNamingStrategy.class.getName())); + assertThat(strategy.getBeanClassName()).isEqualTo(CamelCaseAbbreviatingFieldNamingStrategy.class.getName()); } @Test // DATAMONGO-866 - public void rejectsInvalidFieldNamingStrategyConfiguration() { - - exception.expect(BeanDefinitionParsingException.class); - exception.expectMessage("abbreviation"); - exception.expectMessage("field-naming-strategy-ref"); + void rejectsInvalidFieldNamingStrategyConfiguration() { BeanDefinitionRegistry factory = new DefaultListableBeanFactory(); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory); - reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-invalid.xml")); + + assertThatExceptionOfType(BeanDefinitionParsingException.class) + .isThrownBy(() -> reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-invalid.xml"))) + .withMessageContaining("abbreviation").withMessageContaining("field-naming-strategy-ref"); } @Test // DATAMONGO-892 - public void shouldThrowBeanDefinitionParsingExceptionIfConverterDefinedAsNestedBean() { - - exception.expect(BeanDefinitionParsingException.class); - exception.expectMessage("Mongo Converter must not be defined as nested bean."); - - loadNestedBeanConfiguration(); + void shouldThrowBeanDefinitionParsingExceptionIfConverterDefinedAsNestedBean() { + assertThatExceptionOfType(BeanDefinitionParsingException.class).isThrownBy(this::loadNestedBeanConfiguration); } @Test // DATAMONGO-925, DATAMONGO-928 - public void shouldSupportCustomFieldNamingStrategy() { + void shouldSupportCustomFieldNamingStrategy() { assertStrategyReferenceSetFor("mappingConverterWithCustomFieldNamingStrategy"); } @Test // DATAMONGO-925, DATAMONGO-928 - public void shouldNotFailLoadingConfigIfAbbreviationIsDisabledAndStrategySet() { + void shouldNotFailLoadingConfigIfAbbreviationIsDisabledAndStrategySet() { assertStrategyReferenceSetFor("mappingConverterWithCustomFieldNamingStrategyAndAbbreviationDisabled"); } @@ -137,13 +147,15 @@ private void loadNestedBeanConfiguration() { private void loadConfiguration(String configLocation) { factory = new DefaultListableBeanFactory(); + factory.setAllowBeanDefinitionOverriding(false); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory); reader.loadBeanDefinitions(new ClassPathResource(configLocation)); } private static void assertStrategyReferenceSetFor(String beanId) { - BeanDefinitionRegistry factory = new DefaultListableBeanFactory(); + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.setAllowBeanDefinitionOverriding(false); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(factory); reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-fieldnamingstrategy.xml")); @@ -151,7 +163,7 @@ private static void assertStrategyReferenceSetFor(String beanId) { BeanReference value = (BeanReference) definition.getPropertyValues().getPropertyValue("fieldNamingStrategy") .getValue(); - assertThat(value.getBeanName(), is("customFieldNamingStrategy")); + assertThat(value.getBeanName()).isEqualTo("customFieldNamingStrategy"); } @Component diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java index 0c0056df1e..6cdd99cb3b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MappingMongoConverterParserValidationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,11 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Before; import org.junit.Test; + import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.support.BeanDefinitionReader; import org.springframework.beans.factory.support.DefaultListableBeanFactory; @@ -50,14 +50,14 @@ public void setUp() { public void validatingEventListenerCreatedWithDefaultConfig() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-default.xml")); - assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME), is(not(nullValue()))); + assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME)).isNotNull(); } @Test // DATAMONGO-36 public void validatingEventListenerCreatedWhenValidationEnabled() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-validation-enabled.xml")); - assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME), is(not(nullValue()))); + assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME)).isNotNull(); } @Test(expected = NoSuchBeanDefinitionException.class) // DATAMONGO-36 @@ -71,6 +71,6 @@ public void validatingEventListenersIsNotCreatedWhenDisabled() { public void validatingEventListenerCreatedWithCustomTypeMapperConfig() { reader.loadBeanDefinitions(new ClassPathResource("namespace/converter-custom-typeMapper.xml")); - assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME), is(not(nullValue()))); + assertThat(factory.getBean(BeanNames.VALIDATING_EVENT_LISTENER_BEAN_NAME)).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java index bd59fe65ee..7a711707fd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoAuditingRegistrarUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,36 @@ */ package org.springframework.data.mongodb.config; -import org.junit.Test; -import org.junit.runner.RunWith; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.core.type.AnnotationMetadata; /** - * Unit tests for {@link JpaAuditingRegistrar}. + * Unit tests for {@link MongoAuditingRegistrar}. * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class MongoAuditingRegistrarUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoAuditingRegistrarUnitTests { - MongoAuditingRegistrar registrar = new MongoAuditingRegistrar(); + private MongoAuditingRegistrar registrar = new MongoAuditingRegistrar(); @Mock AnnotationMetadata metadata; @Mock BeanDefinitionRegistry registry; - @Test(expected = IllegalArgumentException.class) // DATAMONGO-792 - public void rejectsNullAnnotationMetadata() { - registrar.registerBeanDefinitions(null, registry); + @Test // DATAMONGO-792 + void rejectsNullAnnotationMetadata() { + assertThatIllegalArgumentException().isThrownBy(() -> registrar.registerBeanDefinitions(null, registry)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-792 - public void rejectsNullBeanDefinitionRegistry() { - registrar.registerBeanDefinitions(metadata, null); + @Test // DATAMONGO-792 + void rejectsNullBeanDefinitionRegistry() { + assertThatIllegalArgumentException().isThrownBy(() -> registrar.registerBeanDefinitions(metadata, null)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java new file mode 100644 index 0000000000..f83e0ec76b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java @@ -0,0 +1,164 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.test.util.ReflectionTestUtils.*; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +import org.bson.UuidRepresentation; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.data.mongodb.core.MongoClientFactoryBean; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoCredential; +import com.mongodb.ServerAddress; +import com.mongodb.ServerApiVersion; +import com.mongodb.connection.ClusterType; + +/** + * Integration tests for the MongoDB namespace. + * + * @author Christoph Strobl + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration +public class MongoClientNamespaceTests { + + @Autowired ApplicationContext ctx; + + @Test // DATAMONGO-2384 + public void clientWithJustHostAndPort() { + + assertThat(ctx.containsBean("client-with-just-host-port")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-just-host-port", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isEqualTo("127.0.0.1"); + assertThat(getField(factoryBean, "port")).isEqualTo(27017); + assertThat(getField(factoryBean, "connectionString")).isNull(); + assertThat(getField(factoryBean, "credential")).isNull(); + assertThat(getField(factoryBean, "replicaSet")).isNull(); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithConnectionString() { + + assertThat(ctx.containsBean("client-with-connection-string")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-connection-string", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isNull(); + assertThat(getField(factoryBean, "port")).isNull(); + assertThat(getField(factoryBean, "connectionString")) + .isEqualTo(new ConnectionString("mongodb://127.0.0.1:27017/?replicaSet=rs0")); + assertThat(getField(factoryBean, "credential")).isNull(); + assertThat(getField(factoryBean, "replicaSet")).isNull(); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithReplicaSet() { + + assertThat(ctx.containsBean("client-with-replica-set")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-replica-set", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isNull(); + assertThat(getField(factoryBean, "port")).isNull(); + assertThat(getField(factoryBean, "connectionString")).isNull(); + assertThat(getField(factoryBean, "credential")).isNull(); + assertThat(getField(factoryBean, "replicaSet")).isEqualTo("rs0"); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithCredential() { + + assertThat(ctx.containsBean("client-with-auth")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-auth", MongoClientFactoryBean.class); + + assertThat(getField(factoryBean, "host")).isNull(); + assertThat(getField(factoryBean, "port")).isNull(); + assertThat(getField(factoryBean, "connectionString")).isNull(); + assertThat(getField(factoryBean, "credential")).isEqualTo( + Collections.singletonList(MongoCredential.createPlainCredential("jon", "snow", "warg".toCharArray()))); + assertThat(getField(factoryBean, "replicaSet")).isNull(); + assertThat(getField(factoryBean, "mongoClientSettings")).isNull(); + } + + @Test // DATAMONGO-2384 + public void clientWithClusterSettings() { + + assertThat(ctx.containsBean("client-with-cluster-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-cluster-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + + assertThat(settings.getClusterSettings().getRequiredClusterType()).isEqualTo(ClusterType.REPLICA_SET); + assertThat(settings.getClusterSettings().getServerSelectionTimeout(TimeUnit.MILLISECONDS)).isEqualTo(10); + assertThat(settings.getClusterSettings().getLocalThreshold(TimeUnit.MILLISECONDS)).isEqualTo(5); + assertThat(settings.getClusterSettings().getHosts()).contains(new ServerAddress("localhost", 27018), + new ServerAddress("localhost", 27019), new ServerAddress("localhost", 27020)); + } + + @Test // DATAMONGO-2384 + public void clientWithConnectionPoolSettings() { + + assertThat(ctx.containsBean("client-with-connection-pool-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-connection-pool-settings", + MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + + assertThat(settings.getConnectionPoolSettings().getMaxConnectionLifeTime(TimeUnit.MILLISECONDS)).isEqualTo(10); + assertThat(settings.getConnectionPoolSettings().getMinSize()).isEqualTo(10); + assertThat(settings.getConnectionPoolSettings().getMaxSize()).isEqualTo(20); + assertThat(settings.getConnectionPoolSettings().getMaintenanceFrequency(TimeUnit.MILLISECONDS)).isEqualTo(10); + assertThat(settings.getConnectionPoolSettings().getMaintenanceInitialDelay(TimeUnit.MILLISECONDS)).isEqualTo(11); + assertThat(settings.getConnectionPoolSettings().getMaxConnectionIdleTime(TimeUnit.MILLISECONDS)).isEqualTo(30); + assertThat(settings.getConnectionPoolSettings().getMaxWaitTime(TimeUnit.MILLISECONDS)).isEqualTo(15); + } + + @Test // DATAMONGO-2427 + public void clientWithUUidSettings() { + + assertThat(ctx.containsBean("client-with-uuid-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-uuid-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + assertThat(settings.getUuidRepresentation()).isEqualTo(UuidRepresentation.STANDARD); + } + + @Test // GH-3820 + public void clientWithServerVersion() { + + assertThat(ctx.containsBean("client-with-server-api-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-server-api-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + assertThat(settings.getServerApi()).isNotNull().satisfies(it -> { + assertThat(it.getVersion()).isEqualTo(ServerApiVersion.V1); + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java index 8e6b82f28f..4b3bb25a9f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,26 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.collection.IsIterableContainingInOrder.*; -import static org.hamcrest.core.Is.*; -import static org.hamcrest.core.IsInstanceOf.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.support.BeanDefinitionReader; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.io.ClassPathResource; -import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.MongoClient; +import com.mongodb.MongoClientSettings; import com.mongodb.MongoCredential; import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link MongoClientParser}. @@ -47,7 +47,7 @@ public class MongoClientParserIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; - @Before + @BeforeEach public void setUp() { this.factory = new DefaultListableBeanFactory(); @@ -59,25 +59,21 @@ public void createsMongoClientCorrectlyWhenGivenHostAndPort() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - assertThat(factory.getBean("mongo-client-with-host-and-port"), instanceOf(MongoClient.class)); + assertThat(factory.getBean("mongo-client-with-host-and-port")).isInstanceOf(MongoClient.class); } - @Test // DATAMONGO-1158 + @Test // DATAMONGO-1158, DATAMONGO-2199 public void createsMongoClientWithOptionsCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); - - try { - MongoClient client = context.getBean("mongo-client-with-options-for-write-concern-and-read-preference", - MongoClient.class); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { + context.refresh(); - assertThat(client.getReadPreference(), is(ReadPreference.secondary())); - assertThat(client.getWriteConcern(), is(WriteConcern.NORMAL)); - } finally { - context.close(); + MongoClientSettings settings = extractClientSettingsFromBean(context, + "mongo-client-with-options-for-write-concern-and-read-preference"); + assertThat(settings.getReadPreference()).isEqualTo(ReadPreference.secondary()); + assertThat(settings.getWriteConcern()).isEqualTo(WriteConcern.UNACKNOWLEDGED); } } @@ -86,16 +82,12 @@ public void createsMongoClientWithDefaultsCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { - try { - MongoClient client = context.getBean("mongoClient", MongoClient.class); + context.refresh(); - assertThat(client.getAddress().getHost(), is("127.0.0.1")); - assertThat(client.getAddress().getPort(), is(27017)); - } finally { - context.close(); + MongoClient client = context.getBean("mongoClient", MongoClient.class); + assertThat(client.getClusterDescription().getClusterSettings().getHosts()).containsExactly(new ServerAddress()); } } @@ -104,16 +96,14 @@ public void createsMongoClientWithCredentialsCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { + + context.refresh(); - try { - MongoClient client = context.getBean("mongo-client-with-credentials", MongoClient.class); + MongoClientSettings settings = extractClientSettingsFromBean(context, "mongo-client-with-credentials"); - assertThat(client.getCredentialsList(), - contains(MongoCredential.createPlainCredential("jon", "snow", "warg".toCharArray()))); - } finally { - context.close(); + assertThat(settings.getCredential()) + .isEqualTo(MongoCredential.createPlainCredential("jon", "snow", "warg".toCharArray())); } } @@ -122,15 +112,20 @@ public void createsMongoClientWithServerSelectionTimeoutCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongoClient-bean.xml")); - AbstractApplicationContext context = new GenericApplicationContext(factory); - context.refresh(); + try (AbstractApplicationContext context = new GenericApplicationContext(factory)) { + context.refresh(); - try { - - MongoClient client = context.getBean("mongo-client-with-server-selection-timeout", MongoClient.class); - assertThat(client.getMongoClientOptions().getServerSelectionTimeout(), is((Object) 100)); - } finally { - context.close(); + MongoClientSettings settings = extractClientSettingsFromBean(context, + "mongo-client-with-server-selection-timeout"); + assertThat(settings.getClusterSettings().getServerSelectionTimeout(TimeUnit.MILLISECONDS)).isEqualTo(100); } } + + private MongoClientSettings extractClientSettingsFromBean(AbstractApplicationContext context, String beanName) { + return extractClientSettings(context.getBean(beanName, MongoClient.class)); + } + + private MongoClientSettings extractClientSettings(MongoClient client) { + return (MongoClientSettings) ReflectionTestUtils.getField(client, "settings"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java index 787275dabb..92a7e0036d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,7 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.collection.IsIterableContainingInOrder.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; @@ -34,6 +32,7 @@ * Unit tests for {@link MongoCredentialPropertyEditor}. * * @author Christoph Strobl + * @author Stephen Tyler Conrad */ public class MongoCredentialPropertyEditorUnitTests { @@ -54,6 +53,10 @@ public class MongoCredentialPropertyEditorUnitTests { static final String USER_4_ENCODED_PWD; static final String USER_4_DB = "targaryen"; + static final String USER_5_NAME = "lyanna"; + static final String USER_5_PWD = "random?password"; + static final String USER_5_DB = "mormont"; + static final String USER_1_AUTH_STRING = USER_1_NAME + ":" + USER_1_PWD + "@" + USER_1_DB; static final String USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM = USER_1_AUTH_STRING + "?uri.authMechanism=PLAIN"; @@ -66,6 +69,13 @@ public class MongoCredentialPropertyEditorUnitTests { static final String USER_4_AUTH_STRING; + static final String USER_5_AUTH_STRING = USER_5_NAME + ":" + USER_5_PWD + "@" + USER_5_DB; + static final String USER_5_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM = USER_5_AUTH_STRING + "?uri.authMechanism=PLAIN"; + static final String USER_5_AUTH_STRING_WITH_QUERY_ARGS = USER_5_AUTH_STRING + "?uri.authMechanism=PLAIN&foo=&bar"; + + static final String SCRAM_SHA_256_AUTH_STRING = USER_1_NAME + ":" + USER_1_PWD + "@" + USER_1_DB + + "?uri.authMechanism=SCRAM-SHA-256"; + static final MongoCredential USER_1_CREDENTIALS = MongoCredential.createCredential(USER_1_NAME, USER_1_DB, USER_1_PWD.toCharArray()); static final MongoCredential USER_1_CREDENTIALS_PLAIN_AUTH = MongoCredential.createPlainCredential(USER_1_NAME, @@ -73,14 +83,20 @@ public class MongoCredentialPropertyEditorUnitTests { static final MongoCredential USER_2_CREDENTIALS = MongoCredential.createCredential(USER_2_NAME, USER_2_DB, USER_2_PWD.toCharArray()); - static final MongoCredential USER_2_CREDENTIALS_CR_AUTH = MongoCredential.createMongoCRCredential(USER_2_NAME, - USER_2_DB, USER_2_PWD.toCharArray()); static final MongoCredential USER_3_CREDENTIALS_X509_AUTH = MongoCredential.createMongoX509Credential(USER_3_NAME); static final MongoCredential USER_4_CREDENTIALS = MongoCredential.createCredential(USER_4_PLAIN_NAME, USER_4_DB, USER_4_PLAIN_PWD.toCharArray()); + static final MongoCredential USER_5_CREDENTIALS = MongoCredential.createCredential(USER_5_NAME, USER_5_DB, + USER_5_PWD.toCharArray()); + static final MongoCredential USER_5_CREDENTIALS_PLAIN_AUTH = MongoCredential.createPlainCredential(USER_5_NAME, + USER_5_DB, USER_5_PWD.toCharArray()); + + static final MongoCredential SCRAM_SHA_256_CREDENTIALS = MongoCredential.createScramSha256Credential(USER_1_NAME, + USER_1_DB, USER_1_PWD.toCharArray()); + MongoCredentialPropertyEditor editor; static { @@ -108,7 +124,7 @@ public void shouldReturnNullValueForNullText() { editor.setAsText(null); - assertThat(editor.getValue(), nullValue()); + assertThat(getValue()).isNull(); } @Test // DATAMONGO-1158 @@ -116,17 +132,18 @@ public void shouldReturnNullValueForEmptyText() { editor.setAsText(" "); - assertThat(editor.getValue(), nullValue()); + assertThat(getValue()).isNull(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1158 + @Test // DATAMONGO-1158 public void shouldThrowExceptionForMalformatedCredentialsString() { - editor.setAsText("tyrion"); + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText("tyrion")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1158 + @Test // DATAMONGO-1158 public void shouldThrowExceptionForMalformatedAuthMechanism() { - editor.setAsText(USER_2_AUTH_STRING + "?uri.authMechanism=Targaryen"); + assertThatIllegalArgumentException() + .isThrownBy(() -> editor.setAsText(USER_2_AUTH_STRING + "?uri.authMechanism=Targaryen")); } @Test // DATAMONGO-1158 @@ -135,7 +152,7 @@ public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleUserNamePassword editor.setAsText(USER_1_AUTH_STRING); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS); } @Test // DATAMONGO-1158 @@ -144,7 +161,7 @@ public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleUserNamePassword editor.setAsText(USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS_PLAIN_AUTH)); + assertThat(getValue()).contains(USER_1_CREDENTIALS_PLAIN_AUTH); } @Test // DATAMONGO-1158 @@ -154,38 +171,27 @@ public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleUserNamePasswo editor .setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList(USER_1_AUTH_STRING, USER_2_AUTH_STRING))); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS)); - } - - @Test // DATAMONGO-1158 - @SuppressWarnings("unchecked") - public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleUserNamePasswordStringWithDatabaseAndAuthOptions() { - - editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList( - USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM, USER_2_AUTH_STRING_WITH_MONGODB_CR_AUTH_MECHANISM))); - - assertThat((List) editor.getValue(), - contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS_CR_AUTH)); + assertThat(getValue()).contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS); } @Test // DATAMONGO-1158 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleUserNamePasswordStringWithDatabaseAndMixedOptions() { - editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList( - USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM, USER_2_AUTH_STRING))); + editor.setAsText(StringUtils.collectionToCommaDelimitedString( + Arrays.asList(USER_1_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM, USER_2_AUTH_STRING))); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS_PLAIN_AUTH, USER_2_CREDENTIALS); } @Test // DATAMONGO-1257 @SuppressWarnings("unchecked") public void shouldReturnCredentialsValueCorrectlyWhenGivenMultipleQuotedUserNamePasswordStringWithDatabaseAndNoOptions() { - editor.setAsText(StringUtils.collectionToCommaDelimitedString(Arrays.asList("'" + USER_1_AUTH_STRING + "'", "'" - + USER_2_AUTH_STRING + "'"))); + editor.setAsText(StringUtils.collectionToCommaDelimitedString( + Arrays.asList("'" + USER_1_AUTH_STRING + "'", "'" + USER_2_AUTH_STRING + "'"))); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS, USER_2_CREDENTIALS); } @Test // DATAMONGO-1257 @@ -194,7 +200,7 @@ public void shouldReturnCredentialsValueCorrectlyWhenGivenSingleQuotedUserNamePa editor.setAsText("'" + USER_1_AUTH_STRING + "'"); - assertThat((List) editor.getValue(), contains(USER_1_CREDENTIALS)); + assertThat(getValue()).contains(USER_1_CREDENTIALS); } @Test // DATAMONGO-1257 @@ -203,7 +209,7 @@ public void shouldReturnX509CredentialsCorrectly() { editor.setAsText(USER_3_AUTH_STRING_WITH_X509_AUTH_MECHANISM); - assertThat((List) editor.getValue(), contains(USER_3_CREDENTIALS_X509_AUTH)); + assertThat(getValue()).contains(USER_3_CREDENTIALS_X509_AUTH); } @Test // DATAMONGO-1257 @@ -212,7 +218,7 @@ public void shouldReturnX509CredentialsCorrectlyWhenNoDbSpecified() { editor.setAsText("tyrion?uri.authMechanism=MONGODB-X509"); - assertThat((List) editor.getValue(), contains(MongoCredential.createMongoX509Credential("tyrion"))); + assertThat(getValue()).contains(MongoCredential.createMongoX509Credential("tyrion")); } @Test(expected = IllegalArgumentException.class) // DATAMONGO-1257 @@ -220,7 +226,7 @@ public void shouldThrowExceptionWhenNoDbSpecifiedForMongodbCR() { editor.setAsText("tyrion?uri.authMechanism=MONGODB-CR"); - editor.getValue(); + getValue(); } @Test(expected = IllegalArgumentException.class) // DATAMONGO-1257 @@ -228,15 +234,52 @@ public void shouldThrowExceptionWhenDbIsEmptyForMongodbCR() { editor.setAsText("tyrion@?uri.authMechanism=MONGODB-CR"); - editor.getValue(); + getValue(); } @Test // DATAMONGO-1317 @SuppressWarnings("unchecked") - public void encodedUserNameAndPasswrodShouldBeDecoded() throws UnsupportedEncodingException { + public void encodedUserNameAndPasswordShouldBeDecoded() { editor.setAsText(USER_4_AUTH_STRING); - assertThat((List) editor.getValue(), contains(USER_4_CREDENTIALS)); + assertThat(getValue()).contains(USER_4_CREDENTIALS); + } + + @Test // DATAMONGO-2016 + @SuppressWarnings("unchecked") + public void passwordWithQuestionMarkShouldNotBeInterpretedAsOptionString() { + + editor.setAsText(USER_5_AUTH_STRING); + + assertThat(getValue()).contains(USER_5_CREDENTIALS); + } + + @Test // DATAMONGO-2016 + @SuppressWarnings("unchecked") + public void passwordWithQuestionMarkShouldNotBreakParsingOfOptionString() { + + editor.setAsText(USER_5_AUTH_STRING_WITH_PLAIN_AUTH_MECHANISM); + + assertThat(getValue()).contains(USER_5_CREDENTIALS_PLAIN_AUTH); + } + + @Test // DATAMONGO-2051 + public void shouldReturnScramSha256Credentials() { + + editor.setAsText(SCRAM_SHA_256_AUTH_STRING); + + assertThat(getValue()).contains(SCRAM_SHA_256_CREDENTIALS); + } + + @Test // DATAMONGO-2016 + @SuppressWarnings("unchecked") + public void failsGracefullyOnEmptyQueryArgument() { + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText(USER_5_AUTH_STRING_WITH_QUERY_ARGS)); + } + + @SuppressWarnings("unchecked") + private List getValue() { + return (List) editor.getValue(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java index c9a8411b84..a283666c05 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,25 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** - * Integration tests for {@link MongoDbFactory}. + * Integration tests for {@link MongoDatabaseFactory}. * * @author Thomas Risberg * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoDbFactoryNoDatabaseRunningTests { @@ -41,11 +41,12 @@ public class MongoDbFactoryNoDatabaseRunningTests { @Test // DATAMONGO-139 public void startsUpWithoutADatabaseRunning() { - assertThat(mongoTemplate.getClass().getName(), is("org.springframework.data.mongodb.core.MongoTemplate")); + assertThat(mongoTemplate.getClass().getName()).isEqualTo("org.springframework.data.mongodb.core.MongoTemplate"); } - @Test(expected = DataAccessResourceFailureException.class) + @Test public void failsDataAccessWithoutADatabaseRunning() { - mongoTemplate.getCollectionNames(); + assertThatExceptionOfType(DataAccessResourceFailureException.class) + .isThrownBy(() -> mongoTemplate.getCollectionNames()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java index e8d04b2bb6..3ebdf61ae4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoDbFactoryParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,29 +15,27 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConstructorArgumentValues; import org.springframework.beans.factory.config.ConstructorArgumentValues.ValueHolder; -import org.springframework.beans.factory.parsing.BeanDefinitionParsingException; import org.springframework.beans.factory.support.BeanDefinitionReader; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.core.io.ClassPathResource; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; +import com.mongodb.ConnectionString; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; /** @@ -52,33 +50,39 @@ public class MongoDbFactoryParserIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; - @Before + @BeforeEach public void setUp() { factory = new DefaultListableBeanFactory(); reader = new XmlBeanDefinitionReader(factory); } - @Test + @Test // DATAMONGO-2199 public void testWriteConcern() throws Exception { - SimpleMongoDbFactory dbFactory = new SimpleMongoDbFactory(new MongoClient("localhost"), "database"); - dbFactory.setWriteConcern(WriteConcern.SAFE); - dbFactory.getDb(); + try (MongoClient client = MongoTestUtils.client()) { + SimpleMongoClientDatabaseFactory dbFactory = new SimpleMongoClientDatabaseFactory(client, "database"); + dbFactory.setWriteConcern(WriteConcern.ACKNOWLEDGED); + dbFactory.getMongoDatabase(); - assertThat(ReflectionTestUtils.getField(dbFactory, "writeConcern"), is((Object) WriteConcern.SAFE)); + assertThat(ReflectionTestUtils.getField(dbFactory, "writeConcern")).isEqualTo(WriteConcern.ACKNOWLEDGED); + } } - @Test + @Test // DATAMONGO-2199 public void parsesWriteConcern() { + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("namespace/db-factory-bean.xml"); - assertWriteConcern(ctx, WriteConcern.SAFE); + assertWriteConcern(ctx, WriteConcern.ACKNOWLEDGED); + ctx.close(); } - @Test + @Test // DATAMONGO-2199 public void parsesCustomWriteConcern() { + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext( "namespace/db-factory-bean-custom-write-concern.xml"); assertWriteConcern(ctx, new WriteConcern("rack1")); + ctx.close(); } @Test // DATAMONGO-331 @@ -86,20 +90,21 @@ public void readsReplicasWriteConcernCorrectly() { AbstractApplicationContext ctx = new ClassPathXmlApplicationContext( "namespace/db-factory-bean-custom-write-concern.xml"); - MongoDbFactory factory = ctx.getBean("second", MongoDbFactory.class); - MongoDatabase db = factory.getDb(); - - assertThat(db.getWriteConcern(), is(WriteConcern.REPLICAS_SAFE)); + MongoDatabaseFactory factory = ctx.getBean("second", MongoDatabaseFactory.class); ctx.close(); + + MongoDatabase db = factory.getMongoDatabase(); + assertThat(db.getWriteConcern()).isEqualTo(WriteConcern.W2); } // This test will fail since equals in WriteConcern uses == for _w and not .equals public void testWriteConcernEquality() { + String s1 = new String("rack1"); String s2 = new String("rack1"); WriteConcern wc1 = new WriteConcern(s1); WriteConcern wc2 = new WriteConcern(s2); - assertThat(wc1, is(wc2)); + assertThat(wc1).isEqualTo(wc2); } @Test @@ -116,13 +121,15 @@ public void setsUpMongoDbFactoryUsingAMongoUriWithoutCredentials() { BeanDefinition definition = factory.getBeanDefinition("mongoDbFactory"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); + + MongoDatabaseFactory dbFactory = factory.getBean("mongoDbFactory", MongoDatabaseFactory.class); + MongoDatabase db = dbFactory.getMongoDatabase(); + assertThat(db.getName()).isEqualTo("database"); - MongoDbFactory dbFactory = factory.getBean("mongoDbFactory", MongoDbFactory.class); - MongoDatabase db = dbFactory.getDb(); - assertThat(db.getName(), is("database")); + factory.destroyBean(dbFactory); } @Test // DATAMONGO-1218 @@ -132,9 +139,9 @@ public void setsUpMongoDbFactoryUsingAMongoClientUri() { BeanDefinition definition = factory.getBeanDefinition("mongoDbFactory"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); } @Test // DATAMONGO-1293 @@ -144,9 +151,9 @@ public void setsUpClientUriWithId() { BeanDefinition definition = factory.getBeanDefinition("testMongo"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); } @Test // DATAMONGO-1293 @@ -156,25 +163,45 @@ public void setsUpUriWithId() { BeanDefinition definition = factory.getBeanDefinition("testMongo"); ConstructorArgumentValues constructorArguments = definition.getConstructorArgumentValues(); - assertThat(constructorArguments.getArgumentCount(), is(1)); - ValueHolder argument = constructorArguments.getArgumentValue(0, MongoClientURI.class); - assertThat(argument, is(notNullValue())); + assertThat(constructorArguments.getArgumentCount()).isOne(); + ValueHolder argument = constructorArguments.getArgumentValue(0, ConnectionString.class); + assertThat(argument).isNotNull(); + } + + @Test // DATAMONGO-2384 + public void usesConnectionStringToCreateClientClient() { + + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("namespace/db-factory-bean.xml"); + + MongoDatabaseFactory dbFactory = ctx.getBean("with-connection-string", MongoDatabaseFactory.class); + ctx.close(); + + assertThat(dbFactory).isInstanceOf(SimpleMongoClientDatabaseFactory.class); + assertThat(ReflectionTestUtils.getField(dbFactory, "mongoClient")) + .isInstanceOf(com.mongodb.client.MongoClient.class); + } + + @Test // DATAMONGO-2384 + public void usesMongoClientClientRef() { + + ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("namespace/db-factory-bean.xml"); + + MongoDatabaseFactory dbFactory = ctx.getBean("with-mongo-client-client-ref", MongoDatabaseFactory.class); + assertThat(dbFactory).isInstanceOf(SimpleMongoClientDatabaseFactory.class); + assertThat(ReflectionTestUtils.getField(dbFactory, "mongoClient")) + .isInstanceOf(com.mongodb.client.MongoClient.class); } private static void assertWriteConcern(ClassPathXmlApplicationContext ctx, WriteConcern expectedWriteConcern) { - SimpleMongoDbFactory dbFactory = ctx.getBean("first", SimpleMongoDbFactory.class); - MongoDatabase db = dbFactory.getDb(); - assertThat(db.getName(), is("db")); + SimpleMongoClientDatabaseFactory dbFactory = ctx.getBean("first", SimpleMongoClientDatabaseFactory.class); + MongoDatabase db = dbFactory.getMongoDatabase(); + assertThat(db.getName()).isEqualTo("db"); WriteConcern configuredConcern = (WriteConcern) ReflectionTestUtils.getField(dbFactory, "writeConcern"); - MyWriteConcern myDbFactoryWriteConcern = new MyWriteConcern(configuredConcern); - MyWriteConcern myDbWriteConcern = new MyWriteConcern(db.getWriteConcern()); - MyWriteConcern myExpectedWriteConcern = new MyWriteConcern(expectedWriteConcern); - - assertThat(myDbFactoryWriteConcern, is(myExpectedWriteConcern)); - assertThat(myDbWriteConcern, is(myExpectedWriteConcern)); - assertThat(myDbWriteConcern, is(myDbFactoryWriteConcern)); + assertThat(configuredConcern).isEqualTo(expectedWriteConcern); + assertThat(db.getWriteConcern()).isEqualTo(expectedWriteConcern); + assertThat(db.getWriteConcern()).isEqualTo(expectedWriteConcern); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java index 61d5b704f0..d7689c8e2a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,37 +16,36 @@ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.net.InetAddress; import java.util.ArrayList; import java.util.List; -import com.mongodb.MongoClient; import org.bson.Document; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.data.mongodb.core.MongoClientFactoryBean; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.Mongo; +import com.mongodb.MongoClientSettings; import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; /** - * * @author Mark Pollack * @author Oliver Gierke * @author Thomas Darimont * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoNamespaceReplicaSetTests { @@ -56,34 +55,36 @@ public class MongoNamespaceReplicaSetTests { @SuppressWarnings("unchecked") public void testParsingMongoWithReplicaSets() throws Exception { - assertTrue(ctx.containsBean("replicaSetMongo")); + assertThat(ctx.containsBean("replicaSetMongo")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&replicaSetMongo"); - List replicaSetSeeds = (List) ReflectionTestUtils.getField(mfb, "replicaSetSeeds"); + MongoClientSettings settings = (MongoClientSettings) ReflectionTestUtils.getField(mfb, "mongoClientSettings"); + List replicaSetSeeds = settings.getClusterSettings().getHosts(); - assertThat(replicaSetSeeds, is(notNullValue())); - assertThat(replicaSetSeeds, hasItems(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001), - new ServerAddress(InetAddress.getByName("localhost"), 10002))); + assertThat(replicaSetSeeds).isNotNull(); + assertThat(replicaSetSeeds).contains(new ServerAddress(InetAddress.getByName("127.0.0.1"), 10001), + new ServerAddress(InetAddress.getByName("localhost"), 10002)); } @Test @SuppressWarnings("unchecked") public void testParsingWithPropertyPlaceHolder() throws Exception { - assertTrue(ctx.containsBean("manyReplicaSetMongo")); + assertThat(ctx.containsBean("manyReplicaSetMongo")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&manyReplicaSetMongo"); - List replicaSetSeeds = (List) ReflectionTestUtils.getField(mfb, "replicaSetSeeds"); + MongoClientSettings settings = (MongoClientSettings) ReflectionTestUtils.getField(mfb, "mongoClientSettings"); + List replicaSetSeeds = settings.getClusterSettings().getHosts(); - assertThat(replicaSetSeeds, is(notNullValue())); - assertThat(replicaSetSeeds, hasSize(3)); + assertThat(replicaSetSeeds).isNotNull(); + assertThat(replicaSetSeeds).hasSize(3); List ports = new ArrayList(); for (ServerAddress replicaSetSeed : replicaSetSeeds) { ports.add(replicaSetSeed.getPort()); } - assertThat(ports, hasItems(27017, 27018, 27019)); + assertThat(ports).contains(27017, 27018, 27019); } @Test @@ -91,15 +92,15 @@ public void testParsingWithPropertyPlaceHolder() throws Exception { public void testMongoWithReplicaSets() { MongoClient mongo = ctx.getBean(MongoClient.class); - assertEquals(2, mongo.getAllAddress().size()); - List servers = mongo.getAllAddress(); - assertEquals("127.0.0.1", servers.get(0).getHost()); - assertEquals("localhost", servers.get(1).getHost()); - assertEquals(10001, servers.get(0).getPort()); - assertEquals(10002, servers.get(1).getPort()); + assertThat(mongo.getClusterDescription().getClusterSettings().getHosts()).isEqualTo(2); + List servers = mongo.getClusterDescription().getClusterSettings().getHosts(); + assertThat(servers.get(0).getHost()).isEqualTo("127.0.0.1"); + assertThat(servers.get(1).getHost()).isEqualTo("localhost"); + assertThat(servers.get(0).getPort()).isEqualTo(10001); + assertThat(servers.get(1).getPort()).isEqualTo(10002); MongoTemplate template = new MongoTemplate(mongo, "admin"); Document result = template.executeCommand("{replSetGetStatus : 1}"); - assertEquals("blort", result.get("set").toString()); + assertThat(result.get("set").toString()).isEqualTo("blort"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java index 0f722f011c..a3b1a16a85 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoNamespaceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,32 @@ */ package org.springframework.data.mongodb.config; -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.test.util.ReflectionTestUtils.*; import javax.net.ssl.SSLSocketFactory; -import org.junit.BeforeClass; +import java.util.function.Supplier; + +import com.mongodb.client.MongoCollection; +import com.mongodb.client.gridfs.GridFSBucket; +import com.mongodb.client.gridfs.model.GridFSFile; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoClientFactoryBean; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.gridfs.GridFsOperations; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; -import com.mongodb.MongoOptions; +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; /** * Integration tests for the MongoDB namespace. @@ -50,7 +51,7 @@ * @author Thomas Darimont * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoNamespaceTests { @@ -59,181 +60,174 @@ public class MongoNamespaceTests { @Test public void testMongoSingleton() throws Exception { - assertTrue(ctx.containsBean("noAttrMongo")); + assertThat(ctx.containsBean("noAttrMongo")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&noAttrMongo"); - assertNull(getField(mfb, "host")); - assertNull(getField(mfb, "port")); + assertThat(getField(mfb, "host")).isNull(); + assertThat(getField(mfb, "port")).isNull(); } @Test public void testMongoSingletonWithAttributes() throws Exception { - assertTrue(ctx.containsBean("defaultMongo")); + assertThat(ctx.containsBean("defaultMongo")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&defaultMongo"); String host = (String) getField(mfb, "host"); Integer port = (Integer) getField(mfb, "port"); - assertEquals("localhost", host); - assertEquals(new Integer(27017), port); + assertThat(host).isEqualTo("localhost"); + assertThat(port).isEqualTo(new Integer(27017)); - MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions"); - assertFalse("By default socketFactory should not be a SSLSocketFactory", - options.getSocketFactory() instanceof SSLSocketFactory); + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + assertThat(options).isNull(); } @Test // DATAMONGO-764 public void testMongoSingletonWithSslEnabled() throws Exception { - assertTrue(ctx.containsBean("mongoSsl")); + assertThat(ctx.containsBean("mongoSsl")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSsl"); - MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions"); - assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory); + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + assertThat(options.getSslSettings().getContext().getSocketFactory() instanceof SSLSocketFactory) + .as("socketFactory should be a SSLSocketFactory").isTrue(); } @Test // DATAMONGO-1490 public void testMongoClientSingletonWithSslEnabled() { - assertTrue(ctx.containsBean("mongoClientSsl")); + assertThat(ctx.containsBean("mongoClientSsl")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClientSsl"); - MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions"); - assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory); + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); + assertThat(options.getSslSettings().getContext().getSocketFactory() instanceof SSLSocketFactory) + .as("socketFactory should be a SSLSocketFactory").isTrue(); } @Test // DATAMONGO-764 public void testMongoSingletonWithSslEnabledAndCustomSslSocketFactory() throws Exception { - assertTrue(ctx.containsBean("mongoSslWithCustomSslFactory")); + assertThat(ctx.containsBean("mongoSslWithCustomSslFactory")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoSslWithCustomSslFactory"); - SSLSocketFactory customSslSocketFactory = ctx.getBean("customSslSocketFactory", SSLSocketFactory.class); - MongoClientOptions options = (MongoClientOptions) getField(mfb, "mongoClientOptions"); + MongoClientSettings options = (MongoClientSettings) getField(mfb, "mongoClientSettings"); - assertTrue("socketFactory should be a SSLSocketFactory", options.getSocketFactory() instanceof SSLSocketFactory); - assertSame(customSslSocketFactory, options.getSocketFactory()); + assertThat(options.getSslSettings().getContext().getSocketFactory() instanceof SSLSocketFactory) + .as("socketFactory should be a SSLSocketFactory").isTrue(); + assertThat(options.getSslSettings().getContext().getProvider().getName()).isEqualTo("SunJSSE"); } @Test public void testSecondMongoDbFactory() { - assertTrue(ctx.containsBean("secondMongoDbFactory")); - MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("secondMongoDbFactory"); + assertThat(ctx.containsBean("secondMongoDbFactory")).isTrue(); + MongoDatabaseFactory dbf = (MongoDatabaseFactory) ctx.getBean("secondMongoDbFactory"); MongoClient mongo = (MongoClient) getField(dbf, "mongoClient"); - assertEquals("127.0.0.1", mongo.getAddress().getHost()); - assertEquals(27017, mongo.getAddress().getPort()); - assertEquals("database", getField(dbf, "databaseName")); + assertThat(mongo.getClusterDescription().getClusterSettings().getHosts()).containsExactly(new ServerAddress()); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); } @Test // DATAMONGO-789 public void testThirdMongoDbFactory() { - assertTrue(ctx.containsBean("thirdMongoDbFactory")); + assertThat(ctx.containsBean("thirdMongoDbFactory")).isTrue(); - MongoDbFactory dbf = (MongoDbFactory) ctx.getBean("thirdMongoDbFactory"); + MongoDatabaseFactory dbf = (MongoDatabaseFactory) ctx.getBean("thirdMongoDbFactory"); MongoClient mongo = (MongoClient) getField(dbf, "mongoClient"); - assertEquals("127.0.0.1", mongo.getAddress().getHost()); - assertEquals(27017, mongo.getAddress().getPort()); - assertEquals("database", getField(dbf, "databaseName")); + assertThat(mongo.getClusterDescription().getClusterSettings().getHosts()).containsExactly(new ServerAddress()); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); } @Test // DATAMONGO-140 public void testMongoTemplateFactory() { - assertTrue(ctx.containsBean("mongoTemplate")); + assertThat(ctx.containsBean("mongoTemplate")).isTrue(); MongoOperations operations = (MongoOperations) ctx.getBean("mongoTemplate"); - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory"); - assertEquals("database", getField(dbf, "databaseName")); + MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "mongoDbFactory"); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); MongoConverter converter = (MongoConverter) getField(operations, "mongoConverter"); - assertNotNull(converter); + assertThat(converter).isNotNull(); } @Test // DATAMONGO-140 public void testSecondMongoTemplateFactory() { - assertTrue(ctx.containsBean("anotherMongoTemplate")); + assertThat(ctx.containsBean("anotherMongoTemplate")).isTrue(); MongoOperations operations = (MongoOperations) ctx.getBean("anotherMongoTemplate"); - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "mongoDbFactory"); - assertEquals("database", getField(dbf, "databaseName")); + MongoDatabaseFactory dbf = (MongoDatabaseFactory) getField(operations, "mongoDbFactory"); + assertThat(getField(dbf, "databaseName")).isEqualTo("database"); WriteConcern writeConcern = (WriteConcern) getField(operations, "writeConcern"); - assertEquals(WriteConcern.SAFE, writeConcern); + assertThat(writeConcern).isEqualTo(WriteConcern.ACKNOWLEDGED); } @Test // DATAMONGO-628 public void testGridFsTemplateFactory() { - assertTrue(ctx.containsBean("gridFsTemplate")); + assertThat(ctx.containsBean("gridFsTemplate")).isTrue(); GridFsOperations operations = (GridFsOperations) ctx.getBean("gridFsTemplate"); - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory"); - assertEquals("database", getField(dbf, "databaseName")); + Supplier gridFSBucketSupplier = (Supplier) getField(operations, "bucketSupplier"); + GridFSBucket gfsBucket = gridFSBucketSupplier.get(); + assertThat(gfsBucket.getBucketName()).isEqualTo("fs"); // fs is the default + + MongoCollection filesCollection = (MongoCollection) getField(gfsBucket, "filesCollection"); + assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database"); MongoConverter converter = (MongoConverter) getField(operations, "converter"); - assertNotNull(converter); + assertThat(converter).isNotNull(); } @Test // DATAMONGO-628 public void testSecondGridFsTemplateFactory() { - assertTrue(ctx.containsBean("secondGridFsTemplate")); + assertThat(ctx.containsBean("secondGridFsTemplate")).isTrue(); GridFsOperations operations = (GridFsOperations) ctx.getBean("secondGridFsTemplate"); - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - assertEquals(null, getField(operations, "bucket")); + Supplier gridFSBucketSupplier = (Supplier) getField(operations, "bucketSupplier"); + GridFSBucket gfsBucket = gridFSBucketSupplier.get(); + assertThat(gfsBucket.getBucketName()).isEqualTo("fs"); // fs is the default + + MongoCollection filesCollection = (MongoCollection) getField(gfsBucket, "filesCollection"); + assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database"); MongoConverter converter = (MongoConverter) getField(operations, "converter"); - assertNotNull(converter); + assertThat(converter).isNotNull(); } @Test // DATAMONGO-823 public void testThirdGridFsTemplateFactory() { - assertTrue(ctx.containsBean("thirdGridFsTemplate")); + assertThat(ctx.containsBean("thirdGridFsTemplate")).isTrue(); GridFsOperations operations = (GridFsOperations) ctx.getBean("thirdGridFsTemplate"); - MongoDbFactory dbf = (MongoDbFactory) getField(operations, "dbFactory"); - assertEquals("database", getField(dbf, "databaseName")); - assertEquals("bucketString", getField(operations, "bucket")); + Supplier gridFSBucketSupplier = (Supplier) getField(operations, "bucketSupplier"); + GridFSBucket gfsBucket = gridFSBucketSupplier.get(); + assertThat(gfsBucket.getBucketName()).isEqualTo("bucketString"); // fs is the default + + MongoCollection filesCollection = (MongoCollection) getField(gfsBucket, "filesCollection"); + assertThat(filesCollection.getNamespace().getDatabaseName()).isEqualTo("database"); MongoConverter converter = (MongoConverter) getField(operations, "converter"); - assertNotNull(converter); + assertThat(converter).isNotNull(); } @Test - @SuppressWarnings("deprecation") - public void testMongoSingletonWithPropertyPlaceHolders() throws Exception { + public void testMongoSingletonWithPropertyPlaceHolders() { - assertTrue(ctx.containsBean("mongoClient")); + assertThat(ctx.containsBean("mongoClient")).isTrue(); MongoClientFactoryBean mfb = (MongoClientFactoryBean) ctx.getBean("&mongoClient"); String host = (String) getField(mfb, "host"); Integer port = (Integer) getField(mfb, "port"); - assertEquals("127.0.0.1", host); - assertEquals(new Integer(27017), port); - - MongoClient mongo = mfb.getObject(); - MongoClientOptions mongoOpts = mongo.getMongoClientOptions(); - - assertEquals(8, mongoOpts.getConnectionsPerHost()); - assertEquals(1000, mongoOpts.getConnectTimeout()); - assertEquals(1500, mongoOpts.getMaxWaitTime()); - - assertEquals(1500, mongoOpts.getSocketTimeout()); - assertEquals(4, mongoOpts.getThreadsAllowedToBlockForConnectionMultiplier()); - - // TODO: check the damned defaults -// assertEquals("w", mongoOpts.getWriteConcern().getW()); -// assertEquals(0, mongoOpts.getWriteConcern().getWtimeout()); -// assertEquals(true, mongoOpts.getWriteConcern().fsync()); + assertThat(host).isEqualTo("127.0.0.1"); + assertThat(port).isEqualTo(new Integer(27017)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java index 1f24865998..2ae67e583b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoParserIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,14 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionReader; @@ -31,10 +32,10 @@ import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.io.ClassPathResource; -import com.mongodb.Mongo; +import com.mongodb.client.MongoClient; /** - * Integration tests for {@link MongoParser}. + * Integration tests for {@link MongoClientParser}. * * @author Oliver Gierke */ @@ -43,7 +44,7 @@ public class MongoParserIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; - @Before + @BeforeEach public void setUp() { this.factory = new DefaultListableBeanFactory(); @@ -51,6 +52,7 @@ public void setUp() { } @Test + @Disabled public void readsMongoAttributesCorrectly() { reader.loadBeanDefinitions(new ClassPathResource("namespace/mongo-bean.xml")); @@ -58,10 +60,10 @@ public void readsMongoAttributesCorrectly() { List values = definition.getPropertyValues().getPropertyValueList(); - assertThat(values.get(2).getValue(), instanceOf(BeanDefinition.class)); + assertThat(values.get(2).getValue()).isInstanceOf(BeanDefinition.class); BeanDefinition x = (BeanDefinition) values.get(2).getValue(); - assertThat(x.getPropertyValues().getPropertyValueList(), hasItem(new PropertyValue("writeConcern", "SAFE"))); + assertThat(x.getPropertyValues().getPropertyValueList()).contains(new PropertyValue("writeConcern", "SAFE")); factory.getBean("mongoClient"); } @@ -74,7 +76,7 @@ public void readsServerAddressesCorrectly() { AbstractApplicationContext context = new GenericApplicationContext(factory); context.refresh(); - assertThat(context.getBean("mongo2", Mongo.class), is(notNullValue())); + assertThat(context.getBean("mongo2", MongoClient.class)).isNotNull(); context.close(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MyWriteConcern.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MyWriteConcern.java deleted file mode 100644 index 725a9cd26b..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MyWriteConcern.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.springframework.data.mongodb.config; - -import com.mongodb.WriteConcern; - -public class MyWriteConcern { - - public MyWriteConcern(WriteConcern wc) { - this._w = wc.getWObject(); - this._fsync = wc.getFsync(); - this._j = wc.getJ(); - this._wtimeout = wc.getWtimeout(); - } - - Object _w = 0; - int _wtimeout = 0; - boolean _fsync = false; - boolean _j = false; - boolean _continueOnErrorForInsert = false; - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (_continueOnErrorForInsert ? 1231-2018 : 1237); - result = prime * result + (_fsync ? 1231-2018 : 1237); - result = prime * result + (_j ? 1231-2018 : 1237); - result = prime * result + ((_w == null) ? 0 : _w.hashCode()); - result = prime * result + _wtimeout; - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - MyWriteConcern other = (MyWriteConcern) obj; - if (_continueOnErrorForInsert != other._continueOnErrorForInsert) - return false; - if (_fsync != other._fsync) - return false; - if (_j != other._j) - return false; - if (_w == null) { - if (other._w != null) - return false; - } else if (!_w.equals(other._w)) - return false; - if (_wtimeout != other._wtimeout) - return false; - return true; - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReactiveAuditingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReactiveAuditingTests.java new file mode 100644 index 0000000000..fc47fd0572 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReactiveAuditingTests.java @@ -0,0 +1,209 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import static org.assertj.core.api.Assertions.*; + +import org.springframework.core.ResolvableType; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.test.util.ReflectionTestUtils; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.ReactiveAuditorAware; +import org.springframework.data.mongodb.core.AuditablePerson; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration test for the auditing support via {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate}. + * + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration +class ReactiveAuditingTests { + + static @Client MongoClient mongoClient; + + @Autowired ReactiveAuditablePersonRepository auditablePersonRepository; + @Autowired MongoMappingContext context; + @Autowired ReactiveMongoOperations operations; + + @Configuration + @EnableReactiveMongoAuditing + @EnableReactiveMongoRepositories(basePackageClasses = ReactiveAuditingTests.class, considerNestedRepositories = true, + includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ReactiveAuditablePersonRepository.class)) + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected Set> getInitialEntitySet() { + return new HashSet<>( + Arrays.asList(AuditablePerson.class, VersionedAuditablePerson.class, SimpleVersionedAuditablePerson.class)); + } + + @Bean + public ReactiveAuditorAware auditorProvider() { + + AuditablePerson person = new AuditablePerson("some-person"); + person.setId("foo"); + + return () -> Mono.just(person); + } + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2179 + void auditingWorksForVersionedEntityBatchWithWrapperVersion() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + s -> auditablePersonRepository.saveAll(Collections.singletonList(s)).next(), // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithSimpleVersion() { + + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + auditablePersonRepository::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithWrapperVersionOnTemplate() { + + verifyAuditingViaVersionProperty(new VersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + null, 0L, 1L); + } + + @Test // DATAMONGO-2139, DATAMONGO-2150, DATAMONGO-2586 + void auditingWorksForVersionedEntityWithSimpleVersionOnTemplate() { + verifyAuditingViaVersionProperty(new SimpleVersionedAuditablePerson(), // + it -> it.version, // + AuditablePerson::getCreatedAt, // + operations::save, // + 0L, 1L, 2L); + } + + @Test // DATAMONGO-2586 + void auditingShouldOnlyRegisterReactiveAuditingCallback() { + + Object callbacks = ReflectionTestUtils.getField(operations, "entityCallbacks"); + Object callbackDiscoverer = ReflectionTestUtils.getField(callbacks, "callbackDiscoverer"); + List> actualCallbacks = ReflectionTestUtils.invokeMethod(callbackDiscoverer, "getEntityCallbacks", + AuditablePerson.class, ResolvableType.forClass(EntityCallback.class)); + + assertThat(actualCallbacks) // + .hasAtLeastOneElementOfType(ReactiveAuditingEntityCallback.class) // + .doesNotHaveAnyElementsOfTypes(AuditingEntityCallback.class); + } + + private void verifyAuditingViaVersionProperty(T instance, + Function versionExtractor, Function createdDateExtractor, Function> persister, + Object... expectedValues) { + + AtomicReference instanceHolder = new AtomicReference<>(instance); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(instance.getClass()); + + assertThat(versionExtractor.apply(instance)).isEqualTo(expectedValues[0]); + assertThat(createdDateExtractor.apply(instance)).isNull(); + assertThat(entity.isNew(instance)).isTrue(); + + persister.apply(instanceHolder.get()) // + .as(StepVerifier::create).consumeNextWith(actual -> { + + instanceHolder.set(actual); + + assertThat(versionExtractor.apply(actual)).isEqualTo(expectedValues[1]); + assertThat(createdDateExtractor.apply(instance)).isNotNull(); + assertThat(entity.isNew(actual)).isFalse(); + }).verifyComplete(); + + persister.apply(instanceHolder.get()) // + .as(StepVerifier::create).consumeNextWith(actual -> { + + instanceHolder.set(actual); + + assertThat(versionExtractor.apply(actual)).isEqualTo(expectedValues[2]); + assertThat(entity.isNew(actual)).isFalse(); + }).verifyComplete(); + } + + interface ReactiveAuditablePersonRepository extends ReactiveMongoRepository {} + + static class VersionedAuditablePerson extends AuditablePerson { + @Version Long version; + } + + static class SimpleVersionedAuditablePerson extends AuditablePerson { + @Version long version; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java index 4f19637ad5..d9280a416d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,10 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import com.mongodb.ReadPreference; @@ -32,11 +29,9 @@ */ public class ReadPreferencePropertyEditorUnitTests { - @Rule public ExpectedException expectedException = ExpectedException.none(); - ReadPreferencePropertyEditor editor; - @Before + @BeforeEach public void setUp() { editor = new ReadPreferencePropertyEditor(); } @@ -44,11 +39,8 @@ public void setUp() { @Test // DATAMONGO-1158 public void shouldThrowExceptionOnUndefinedPreferenceString() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("ReadPreference"); - expectedException.expectMessage("foo"); - - editor.setAsText("foo"); + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText("foo")).withMessageContaining("foo") + .withMessageContaining("ReadPreference"); } @Test // DATAMONGO-1158 @@ -56,7 +48,7 @@ public void shouldAllowUsageNativePreferenceStrings() { editor.setAsText("secondary"); - assertThat(editor.getValue(), is((Object) ReadPreference.secondary())); + assertThat(editor.getValue()).isEqualTo((Object) ReadPreference.secondary()); } @Test // DATAMONGO-1158 @@ -64,6 +56,6 @@ public void shouldAllowUsageOfUppcaseEnumStringsForPreferences() { editor.setAsText("NEAREST"); - assertThat(editor.getValue(), is((Object) ReadPreference.nearest())); + assertThat(editor.getValue()).isEqualTo((Object) ReadPreference.nearest()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java index 140b93720a..38210b3f33 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,17 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Collection; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfSystemProperty; import com.mongodb.ServerAddress; @@ -39,16 +37,14 @@ */ public class ServerAddressPropertyEditorUnitTests { - @Rule public ExpectedException expectedException = ExpectedException.none(); - ServerAddressPropertyEditor editor; - @Before + @BeforeEach public void setUp() { editor = new ServerAddressPropertyEditor(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-454, DATAMONGO-1062 + @Test // DATAMONGO-454, DATAMONGO-1062 public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress() { String unknownHost1 = "gugu.nonexistant.example.org"; @@ -56,10 +52,12 @@ public void rejectsAddressConfigWithoutASingleParsableAndResolvableServerAddress assertUnresolveableHostnames(unknownHost1, unknownHost2); - editor.setAsText(unknownHost1 + "," + unknownHost2); + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> editor.setAsText(unknownHost1 + "," + unknownHost2)); } @Test // DATAMONGO-454 + @EnabledIfSystemProperty(named = "user.name", matches = "jenkins") public void skipsUnparsableAddressIfAtLeastOneIsParsable() throws UnknownHostException { editor.setAsText("foo, localhost"); @@ -77,7 +75,7 @@ public void handlesEmptyAddressAsParseError() throws UnknownHostException { public void interpretEmptyStringAsNull() { editor.setAsText(""); - assertNull(editor.getValue()); + assertThat(editor.getValue()).isNull(); } @Test // DATAMONGO-808 @@ -124,12 +122,11 @@ public void handleIPv6HostaddressLoopbackLongWithBrackets() throws UnknownHostEx * We can't tell whether the last part of the hostAddress represents a port or not. */ @Test // DATAMONGO-808 - public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() throws UnknownHostException { - - expectedException.expect(IllegalArgumentException.class); + public void shouldFailToHandleAmbiguousIPv6HostaddressLongWithoutPortAndWithoutBrackets() { String hostAddress = "0000:0000:0000:0000:0000:0000:0000:128"; - editor.setAsText(hostAddress); + + assertThatIllegalArgumentException().isThrownBy(() -> editor.setAsText(hostAddress)); } @Test // DATAMONGO-808 @@ -159,13 +156,13 @@ private static void assertSingleAddressOfLocalhost(Object result) throws Unknown private static void assertSingleAddressWithPort(String hostAddress, Integer port, Object result) throws UnknownHostException { - assertThat(result, is(instanceOf(ServerAddress[].class))); + assertThat(result).isInstanceOf(ServerAddress[].class); Collection addresses = Arrays.asList((ServerAddress[]) result); - assertThat(addresses, hasSize(1)); + assertThat(addresses).hasSize(1); if (port == null) { - assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress)))); + assertThat(addresses).contains(new ServerAddress(InetAddress.getByName(hostAddress))); } else { - assertThat(addresses, hasItem(new ServerAddress(InetAddress.getByName(hostAddress), port))); + assertThat(addresses).contains(new ServerAddress(InetAddress.getByName(hostAddress), port)); } } @@ -173,9 +170,9 @@ private void assertUnresolveableHostnames(String... hostnames) { for (String hostname : hostnames) { try { - InetAddress.getByName(hostname); - Assert.fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved."); - } catch (UnknownHostException expected) { + InetAddress.getByName(hostname).isReachable(1500); + fail("Supposedly unresolveable hostname '" + hostname + "' can be resolved."); + } catch (IOException expected) { // ok } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java index ecbef6c6a0..7f400c80e0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/StringToWriteConcernConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,9 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; import com.mongodb.WriteConcern; @@ -26,18 +25,19 @@ * Unit tests for {@link StringToWriteConcernConverter}. * * @author Oliver Gierke + * @author Christoph Strobl */ public class StringToWriteConcernConverterUnitTests { StringToWriteConcernConverter converter = new StringToWriteConcernConverter(); - @Test + @Test // DATAMONGO-2199 public void createsWellKnownConstantsCorrectly() { - assertThat(converter.convert("SAFE"), is(WriteConcern.SAFE)); + assertThat(converter.convert("ACKNOWLEDGED")).isEqualTo(WriteConcern.ACKNOWLEDGED); } @Test public void createsWriteConcernForUnknownValue() { - assertThat(converter.convert("-1"), is(new WriteConcern("-1"))); + assertThat(converter.convert("-1")).isEqualTo(new WriteConcern("-1")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java index b0452d40c2..2f9732a33a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.config; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import com.mongodb.WriteConcern; @@ -27,27 +26,28 @@ * Unit tests for {@link WriteConcernPropertyEditor}. * * @author Oliver Gierke + * @author Christoph Strobl */ public class WriteConcernPropertyEditorUnitTests { WriteConcernPropertyEditor editor; - @Before + @BeforeEach public void setUp() { editor = new WriteConcernPropertyEditor(); } - @Test + @Test // DATAMONGO-2199 public void createsWriteConcernForWellKnownConstants() { - editor.setAsText("SAFE"); - assertThat(editor.getValue(), is((Object) WriteConcern.SAFE)); + editor.setAsText("JOURNALED"); + assertThat(editor.getValue()).isEqualTo(WriteConcern.JOURNALED); } @Test public void createsWriteConcernForUnknownConstants() { editor.setAsText("-1"); - assertThat(editor.getValue(), is((Object) new WriteConcern("-1"))); + assertThat(editor.getValue()).isEqualTo(new WriteConcern("-1")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java index d716fccd97..b7bc73a728 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/AuditablePerson.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ChangeStreamOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ChangeStreamOptionsUnitTests.java new file mode 100644 index 0000000000..a4192df40a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ChangeStreamOptionsUnitTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.BsonDocument; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ChangeStreamOptions}. + * + * @author Mark Paluch + */ +public class ChangeStreamOptionsUnitTests { + + @Test // DATAMONGO-2258 + public void shouldReportResumeAfter() { + + ChangeStreamOptions options = ChangeStreamOptions.builder().resumeAfter(new BsonDocument()).build(); + + assertThat(options.isResumeAfter()).isTrue(); + assertThat(options.isStartAfter()).isFalse(); + } + + @Test // DATAMONGO-2258 + public void shouldReportStartAfter() { + + ChangeStreamOptions options = ChangeStreamOptions.builder().startAfter(new BsonDocument()).build(); + + assertThat(options.isResumeAfter()).isFalse(); + assertThat(options.isStartAfter()).isTrue(); + } + + @Test // DATAMONGO-2258 + public void shouldNotReportResumeStartAfter() { + + ChangeStreamOptions options = ChangeStreamOptions.empty(); + + assertThat(options.isResumeAfter()).isFalse(); + assertThat(options.isStartAfter()).isFalse(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ClientSessionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ClientSessionTests.java new file mode 100644 index 0000000000..7fb1cddafb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ClientSessionTests.java @@ -0,0 +1,273 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link ClientSession} through {@link MongoTemplate#withSession(ClientSession)}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class }) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +class ClientSessionTests { + + private static final String DB_NAME = "client-session-tests"; + private static final String COLLECTION_NAME = "test"; + private static final String REF_COLLECTION_NAME = "test-with-ref"; + + private static @ReplSetClient MongoClient mongoClient; + + private MongoTemplate template; + + @BeforeEach + void setUp() { + + MongoTestUtils.createOrReplaceCollection(DB_NAME, COLLECTION_NAME, mongoClient); + + template = new MongoTemplate(mongoClient, DB_NAME); + template.getDb().getCollection(COLLECTION_NAME).insertOne(new Document("_id", "id-1").append("value", "spring")); + } + + @Test // DATAMONGO-1880 + void shouldApplyClientSession() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + Document doc = template.withSession(() -> session) + .execute(action -> action.findOne(new Query(), Document.class, "test")); + + assertThat(doc).isNotNull(); + assertThat(session.getOperationTime()).isNotNull(); + assertThat(session.getServerSession().isClosed()).isFalse(); + + session.close(); + } + + @Test // DATAMONGO-2241 + void shouldReuseConfiguredInfrastructure() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + MappingMongoConverter source = MappingMongoConverter.class.cast(template.getConverter()); + MappingMongoConverter sessionTemplateConverter = MappingMongoConverter.class + .cast(template.withSession(() -> session).execute(MongoOperations::getConverter)); + + assertThat(sessionTemplateConverter.getMappingContext()).isSameAs(source.getMappingContext()); + assertThat(ReflectionTestUtils.getField(sessionTemplateConverter, "conversions")) + .isSameAs(ReflectionTestUtils.getField(source, "conversions")); + assertThat(ReflectionTestUtils.getField(sessionTemplateConverter, "instantiators")) + .isSameAs(ReflectionTestUtils.getField(source, "instantiators")); + } + + @Test // DATAMONGO-1920 + void withCommittedTransaction() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + session.startTransaction(); + + SomeDoc saved = template.withSession(() -> session).execute(action -> { + + SomeDoc doc = new SomeDoc("id-2", "value2"); + action.insert(doc); + return doc; + }); + + session.commitTransaction(); + session.close(); + + assertThat(saved).isNotNull(); + assertThat(session.getOperationTime()).isNotNull(); + + assertThat(template.exists(query(where("id").is(saved.getId())), SomeDoc.class)).isTrue(); + } + + @Test // DATAMONGO-1920 + void withAbortedTransaction() { + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + session.startTransaction(); + + SomeDoc saved = template.withSession(() -> session).execute(action -> { + + SomeDoc doc = new SomeDoc("id-2", "value2"); + action.insert(doc); + return doc; + }); + + session.abortTransaction(); + session.close(); + + assertThat(saved).isNotNull(); + assertThat(session.getOperationTime()).isNotNull(); + + assertThat(template.exists(query(where("id").is(saved.getId())), SomeDoc.class)).isFalse(); + } + + @Test // DATAMONGO-2490 + void shouldBeAbleToReadDbRefDuringTransaction() { + + SomeDoc ref = new SomeDoc("ref-1", "da value"); + WithDbRef source = new WithDbRef("source-1", "da source", ref); + + ClientSession session = mongoClient.startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + assertThat(session.getOperationTime()).isNull(); + + session.startTransaction(); + + WithDbRef saved = template.withSession(() -> session).execute(action -> { + + template.save(ref); + template.save(source); + + return template.findOne(query(where("id").is(source.id)), WithDbRef.class); + }); + + assertThat(saved.getSomeDocRef()).isEqualTo(ref); + + session.abortTransaction(); + } + + @org.springframework.data.mongodb.core.mapping.Document(COLLECTION_NAME) + static class SomeDoc { + + @Id String id; + String value; + + SomeDoc(String id, String value) { + + this.id = id; + this.value = value; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeDoc someDoc = (SomeDoc) o; + return Objects.equals(id, someDoc.id) && Objects.equals(value, someDoc.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "ClientSessionTests.SomeDoc(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(REF_COLLECTION_NAME) + static class WithDbRef { + + @Id String id; + String value; + @DBRef SomeDoc someDocRef; + + WithDbRef(String id, String value, SomeDoc someDocRef) { + this.id = id; + this.value = value; + this.someDocRef = someDocRef; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public SomeDoc getSomeDocRef() { + return this.someDocRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setSomeDocRef(SomeDoc someDocRef) { + this.someDocRef = someDocRef; + } + + public String toString() { + return "ClientSessionTests.WithDbRef(id=" + this.getId() + ", value=" + this.getValue() + ", someDocRef=" + + this.getSomeDocRef() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java index 328fd36048..a912b04adc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CloseableIterableCursorAdapterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,16 @@ */ package org.springframework.data.mongodb.core; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.core.MongoTemplate.CloseableIterableCursorAdapter; import org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback; @@ -35,40 +37,39 @@ * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class CloseableIterableCursorAdapterUnitTests { +@ExtendWith(MockitoExtension.class) +class CloseableIterableCursorAdapterUnitTests { @Mock PersistenceExceptionTranslator exceptionTranslator; @Mock DocumentCallback callback; - MongoCursor cursor; - CloseableIterator adapter; - - @Before - public void setUp() { + private MongoCursor cursor; + private CloseableIterator adapter; - this.cursor = doThrow(IllegalArgumentException.class).when(mock(MongoCursor.class)); - this.adapter = new CloseableIterableCursorAdapter(cursor, exceptionTranslator, callback); + @BeforeEach + void setUp() { + this.cursor = mock(MongoCursor.class); + this.adapter = new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, callback); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1276 - public void propagatesOriginalExceptionFromAdapterDotNext() { + @Test // DATAMONGO-1276 + void propagatesOriginalExceptionFromAdapterDotNext() { - cursor.next(); - adapter.next(); + doThrow(IllegalArgumentException.class).when(cursor).next(); + assertThatIllegalArgumentException().isThrownBy(() -> adapter.next()); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1276 - public void propagatesOriginalExceptionFromAdapterDotHasNext() { + @Test // DATAMONGO-1276 + void propagatesOriginalExceptionFromAdapterDotHasNext() { - cursor.hasNext(); - adapter.hasNext(); + doThrow(IllegalArgumentException.class).when(cursor).hasNext(); + assertThatIllegalArgumentException().isThrownBy(() -> adapter.hasNext()); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1276 - public void propagatesOriginalExceptionFromAdapterDotClose() { + @Test // DATAMONGO-1276 + void propagatesOriginalExceptionFromAdapterDotClose() { - cursor.close(); - adapter.close(); + doThrow(IllegalArgumentException.class).when(cursor).close(); + assertThatIllegalArgumentException().isThrownBy(() -> adapter.close()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java index 81d6425984..0d5e22202c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,7 @@ import java.util.Locale; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Collation.Alternate; import org.springframework.data.mongodb.core.query.Collation.CaseFirst; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java new file mode 100644 index 0000000000..9de0863cd2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java @@ -0,0 +1,180 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import static org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import static org.springframework.data.mongodb.core.CollectionOptions.emitChangedRevisions; +import static org.springframework.data.mongodb.core.CollectionOptions.empty; +import static org.springframework.data.mongodb.core.CollectionOptions.encryptedCollection; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.int32; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.queryable; + +import java.util.List; + +import org.bson.BsonNull; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; +import org.springframework.data.mongodb.core.validation.Validator; + +/** + * @author Christoph Strobl + */ +class CollectionOptionsUnitTests { + + @Test // GH-4210 + void emptyEquals() { + assertThat(empty()).isEqualTo(empty()); + } + + @Test // GH-4210 + void collectionProperties() { + assertThat(empty().maxDocuments(10).size(1).disableValidation()) + .isEqualTo(empty().maxDocuments(10).size(1).disableValidation()); + } + + @Test // GH-4210 + void changedRevisionsEquals() { + assertThat(emitChangedRevisions()).isNotEqualTo(empty()).isEqualTo(emitChangedRevisions()); + } + + @Test // GH-4210 + void cappedEquals() { + assertThat(empty().capped()).isNotEqualTo(empty()).isEqualTo(empty().capped()); + } + + @Test // GH-4210 + void collationEquals() { + + assertThat(empty().collation(Collation.of("en_US"))) // + .isEqualTo(empty().collation(Collation.of("en_US"))) // + .isNotEqualTo(empty()) // + .isNotEqualTo(empty().collation(Collation.of("de_AT"))); + } + + @Test // GH-4210 + void timeSeriesEquals() { + + assertThat(empty().timeSeries(TimeSeriesOptions.timeSeries("tf"))) // + .isEqualTo(empty().timeSeries(TimeSeriesOptions.timeSeries("tf"))) // + .isNotEqualTo(empty()) // + .isNotEqualTo(empty().timeSeries(TimeSeriesOptions.timeSeries("other"))); + } + + @Test // GH-4210 + void validatorEquals() { + + assertThat(empty().validator(Validator.document(new Document("one", "two")))) // + .isEqualTo(empty().validator(Validator.document(new Document("one", "two")))) // + .isNotEqualTo(empty()) // + .isNotEqualTo(empty().validator(Validator.document(new Document("three", "four")))) + .isNotEqualTo(empty().validator(Validator.document(new Document("one", "two"))).moderateValidation()); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionOptionsFromSchemaRenderCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() + .property(JsonSchemaProperty.object("spring") + .properties(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("data")), List.of()))) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("mongodb")), List.of())).build(); + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(schema); + + assertThat(encryptionOptions.toDocument().get("fields", List.class)).hasSize(2) + .contains(new Document("path", "mongodb").append("bsonType", "long").append("queries", List.of()) + .append("keyId", BsonNull.VALUE)) + .contains(new Document("path", "spring.data").append("bsonType", "int").append("queries", List.of()) + .append("keyId", BsonNull.VALUE)); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverrideByPath() { + + CollectionOptions collectionOptions = encryptedCollection(options -> options // + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("spring"))) + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("data"))) + + // override first with data type long + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverridesPathFromSchema() { + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(MongoJsonSchema.builder() + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("spring")), List.of())) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("data")), List.of())).build()); + + // override spring from schema with data type long + CollectionOptions collectionOptions = CollectionOptions.encryptedCollection( + encryptionOptions.queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } + + @Test // GH-4185 + void encryptionOptionsAreImmutable() { + + EncryptedFieldsOptions source = EncryptedFieldsOptions + .fromProperties(List.of(queryable(int32("spring.data"), List.of(QueryCharacteristics.range().min(1))))); + + assertThat(source.queryable(queryable(int32("mongodb"), List.of(QueryCharacteristics.range().min(1))))) + .isNotSameAs(source).satisfies(it -> { + assertThat(it.toDocument().get("fields", List.class)).hasSize(2); + }); + + assertThat(source.toDocument().get("fields", List.class)).hasSize(1); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverridesNestedPathFromSchema() { + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(MongoJsonSchema.builder() + .property(JsonSchemaProperty.object("spring") + .properties(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("data")), List.of()))) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("mongodb")), List.of())).build()); + + // override spring from schema with data type long + CollectionOptions collectionOptions = CollectionOptions.encryptedCollection( + encryptionOptions.queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring.data")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring.data") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java new file mode 100644 index 0000000000..5be870a295 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java @@ -0,0 +1,193 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link CountQuery}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +class CountQueryUnitTests { + + private QueryMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; + + @BeforeEach + void setUp() { + + this.context = new MongoMappingContext(); + + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.afterPropertiesSet(); + + this.mapper = new QueryMapper(converter); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithoutDistance() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document + .parse("{\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 1.7976931348623157E308]}}}")); + } + + @Test // DATAMONGO-2059 + void nearAndExisting$and() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01)) + .addCriteria(new Criteria().andOperator(where("foo").is("bar"))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse("{\"$and\":[" // + + "{\"foo\":\"bar\"}" // + + "{\"$nor\":[{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 0.01]}}}]},"// + + " {\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 1.7976931348623157E308]}}},"// + + "]}")); + } + + @Test // DATAMONGO-2059 + void nearSphereToGeoWithinWithoutDistance() { + + Query source = query(where("location").nearSphere(new Point(-73.99171, 40.738868))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 1.7976931348623157E308]}}}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMaxDistance() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868)).maxDistance(10)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo( + org.bson.Document.parse("{\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}}")); + } + + @Test // DATAMONGO-2059 + void nearSphereToGeoWithinWithMaxDistance() { + + Query source = query(where("location").nearSphere(new Point(-73.99171, 40.738868)).maxDistance(10)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document + .parse("{\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 10.0]}}}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMinDistance() { + + Query source = query(where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$and\":[{\"$nor\":[{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 0.01]}}}]}," + + " {\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 1.7976931348623157E308]}}}]}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMaxDistanceAndCombinedWithOtherCriteria() { + + Query source = query( + where("name").is("food").and("location").near(new Point(-73.99171, 40.738868)).maxDistance(10)); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document + .parse("{\"name\": \"food\", \"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMinDistanceOrCombinedWithOtherCriteria() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"$and\":[{\"$nor\":[{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 0.01]}}}]},{\"location\":{\"$geoWithin\":{\"$center\":[ [ -73.99171, 40.738868 ], 1.7976931348623157E308]}}}]} ]}")); + } + + @Test // DATAMONGO-2059 + void nearToGeoWithinWithMaxDistanceOrCombinedWithOtherCriteria() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").near(new Point(-73.99171, 40.738868)).maxDistance(10))); + org.bson.Document target = postProcessQueryForCount(source); + + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}} ]}")); + } + + @Test // GH-4004 + void nearToGeoWithinWithMaxDistanceUsingGeoJsonSource() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").near(new GeoJsonPoint(-73.99171, 40.738868)).maxDistance(10))); + + org.bson.Document target = postProcessQueryForCount(source); + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$center\": [[-73.99171, 40.738868], 10.0]}}} ]}")); + } + + @Test // GH-4004 + void nearSphereToGeoWithinWithoutMaxDistanceUsingGeoJsonSource() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), + where("location").nearSphere(new GeoJsonPoint(-73.99171, 40.738868)))); + + org.bson.Document target = postProcessQueryForCount(source); + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 1.7976931348623157E308]}}} ]}")); + } + + @Test // GH-4004 + void nearSphereToGeoWithinWithMaxDistanceUsingGeoJsonSource() { + + Query source = query(new Criteria().orOperator(where("name").is("food"), where("location") + .nearSphere(new GeoJsonPoint(-73.99171, 40.738868)).maxDistance/*in meters for geojson*/(10d))); + + org.bson.Document target = postProcessQueryForCount(source); + assertThat(target).isEqualTo(org.bson.Document.parse( + "{\"$or\" : [ { \"name\": \"food\" }, {\"location\": {\"$geoWithin\": {\"$centerSphere\": [[-73.99171, 40.738868], 1.567855942887398E-6]}}} ]}")); + } + + private org.bson.Document postProcessQueryForCount(Query source) { + + org.bson.Document intermediate = mapper.getMappedObject(source.getQueryObject(), (MongoPersistentEntity) null); + return CountQuery.of(intermediate).toQueryDocument(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java index 3ad96d7e20..f0e7eb67b0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,44 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.domain.Sort.Direction.DESC; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Optional; +import java.util.stream.Stream; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.BulkOperationException; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; import org.springframework.data.util.Pair; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.mongodb.MongoBulkWriteException; import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.MongoCollection; /** @@ -50,40 +61,37 @@ * @author Tobias Trelle * @author Oliver Gierke * @author Christoph Strobl + * @author Minsu Kim */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class DefaultBulkOperationsIntegrationTests { static final String COLLECTION_NAME = "bulk_ops"; - @Autowired MongoOperations operations; + @Template(initialEntitySet = BaseDoc.class) // + static MongoTestTemplate operations; - MongoCollection collection; - - @Before + @BeforeEach public void setUp() { - - this.collection = this.operations.getCollection(COLLECTION_NAME); - this.collection.deleteMany(new Document()); + operations.flush(COLLECTION_NAME); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-934 + @Test // DATAMONGO-934 public void rejectsNullMongoOperations() { - new DefaultBulkOperations(null, COLLECTION_NAME, - new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null)); - + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultBulkOperations(null, COLLECTION_NAME, + new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null, null, null))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-934 + @Test // DATAMONGO-934 public void rejectsNullCollectionName() { - new DefaultBulkOperations(operations, null, - new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null)); + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultBulkOperations(operations, null, + new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null, null, null))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-934 + @Test // DATAMONGO-934 public void rejectsEmptyCollectionName() { - new DefaultBulkOperations(operations, "", new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null)); + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultBulkOperations(operations, "", + new BulkOperationContext(BulkMode.ORDERED, Optional.empty(), null, null, null, null))); } @Test // DATAMONGO-934 @@ -91,22 +99,25 @@ public void insertOrdered() { List documents = Arrays.asList(newDoc("1"), newDoc("2")); - assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount(), is(2)); + assertThat(createBulkOps(BulkMode.ORDERED).insert(documents).execute().getInsertedCount()).isEqualTo(2); } - @Test // DATAMONGO-934 + @Test // DATAMONGO-934, DATAMONGO-2285 public void insertOrderedFails() { List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); - try { - createBulkOps(BulkMode.ORDERED).insert(documents).execute(); - fail(); - } catch (MongoBulkWriteException e) { - assertThat(e.getWriteResult().getInsertedCount(), is(1)); // fails after first error - assertThat(e.getWriteErrors(), notNullValue()); - assertThat(e.getWriteErrors().size(), is(1)); - } + assertThatThrownBy(() -> createBulkOps(BulkMode.ORDERED).insert(documents).execute()) // + .isInstanceOf(BulkOperationException.class) // + .hasCauseInstanceOf(MongoBulkWriteException.class) // + .extracting(Throwable::getCause) // + .satisfies(it -> { + + MongoBulkWriteException ex = (MongoBulkWriteException) it; + assertThat(ex.getWriteResult().getInsertedCount()).isOne(); + assertThat(ex.getWriteErrors()).isNotNull(); + assertThat(ex.getWriteErrors().size()).isOne(); + }); } @Test // DATAMONGO-934 @@ -114,53 +125,58 @@ public void insertUnOrdered() { List documents = Arrays.asList(newDoc("1"), newDoc("2")); - assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount(), is(2)); + assertThat(createBulkOps(BulkMode.UNORDERED).insert(documents).execute().getInsertedCount()).isEqualTo(2); } - @Test // DATAMONGO-934 + @Test // DATAMONGO-934, DATAMONGO-2285 public void insertUnOrderedContinuesOnError() { List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); - try { - createBulkOps(BulkMode.UNORDERED).insert(documents).execute(); - fail(); - } catch (MongoBulkWriteException e) { - assertThat(e.getWriteResult().getInsertedCount(), is(2)); // two docs were inserted - assertThat(e.getWriteErrors(), notNullValue()); - assertThat(e.getWriteErrors().size(), is(1)); - } + assertThatThrownBy(() -> createBulkOps(BulkMode.UNORDERED).insert(documents).execute()) // + .isInstanceOf(BulkOperationException.class) // + .hasCauseInstanceOf(MongoBulkWriteException.class) // + .extracting(Throwable::getCause) // + .satisfies(it -> { + + MongoBulkWriteException ex = (MongoBulkWriteException) it; + assertThat(ex.getWriteResult().getInsertedCount()).isEqualTo(2); + assertThat(ex.getWriteErrors()).isNotNull(); + assertThat(ex.getWriteErrors().size()).isOne(); + }); } - @Test // DATAMONGO-934 - public void upsertDoesUpdate() { + @ParameterizedTest // DATAMONGO-934, GH-3872 + @MethodSource("upsertArguments") + void upsertDoesUpdate(UpdateDefinition update) { insertSomeDocuments(); com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// - upsert(where("value", "value1"), set("value", "value2")).// + upsert(where("value", "value1"), update).// execute(); - assertThat(result, notNullValue()); - assertThat(result.getMatchedCount(), is(2)); - assertThat(result.getModifiedCount(), is(2)); - assertThat(result.getInsertedCount(), is(0)); - assertThat(result.getUpserts(), is(notNullValue())); - assertThat(result.getUpserts().size(), is(0)); + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isEqualTo(2); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isZero(); } - @Test // DATAMONGO-934 - public void upsertDoesInsert() { + @ParameterizedTest // DATAMONGO-934, GH-3872 + @MethodSource("upsertArguments") + void upsertDoesInsert(UpdateDefinition update) { com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// - upsert(where("_id", "1"), set("value", "v1")).// + upsert(where("_id", "1"), update).// execute(); - assertThat(result, notNullValue()); - assertThat(result.getMatchedCount(), is(0)); - assertThat(result.getModifiedCount(), is(0)); - assertThat(result.getUpserts(), is(notNullValue())); - assertThat(result.getUpserts().size(), is(1)); + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isOne(); } @Test // DATAMONGO-934 @@ -168,11 +184,37 @@ public void updateOneOrdered() { testUpdate(BulkMode.ORDERED, false, 2); } + @Test // GH-3872 + public void updateOneWithAggregation() { + + insertSomeDocuments(); + + BulkOperations bulkOps = createBulkOps(BulkMode.ORDERED); + bulkOps.updateOne(where("value", "value1"), AggregationUpdate.update().set("value").toValue("value3")); + BulkWriteResult result = bulkOps.execute(); + + assertThat(result.getModifiedCount()).isEqualTo(1); + assertThat(operations.execute(COLLECTION_NAME, collection -> collection.countDocuments(new org.bson.Document("value", "value3")))).isOne(); + } + @Test // DATAMONGO-934 public void updateMultiOrdered() { testUpdate(BulkMode.ORDERED, true, 4); } + @Test // GH-3872 + public void updateMultiWithAggregation() { + + insertSomeDocuments(); + + BulkOperations bulkOps = createBulkOps(BulkMode.ORDERED); + bulkOps.updateMulti(where("value", "value1"), AggregationUpdate.update().set("value").toValue("value3")); + BulkWriteResult result = bulkOps.execute(); + + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(operations.execute(COLLECTION_NAME, collection -> collection.countDocuments(new org.bson.Document("value", "value3")))).isEqualTo(2); + } + @Test // DATAMONGO-934 public void updateOneUnOrdered() { testUpdate(BulkMode.UNORDERED, false, 2); @@ -193,6 +235,45 @@ public void removeUnordered() { testRemove(BulkMode.UNORDERED); } + @Test // DATAMONGO-2218 + public void replaceOneOrdered() { + testReplaceOne(BulkMode.ORDERED); + } + + @Test // DATAMONGO-2218 + public void replaceOneUnordered() { + testReplaceOne(BulkMode.UNORDERED); + } + + @Test // DATAMONGO-2218 + public void replaceOneDoesReplace() { + + insertSomeDocuments(); + + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2")).// + execute(); + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getInsertedCount()).isZero(); + } + + @Test // DATAMONGO-2218 + public void replaceOneWithUpsert() { + + com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2"), FindAndReplaceOptions.options().upsert()).// + execute(); + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts().size()).isOne(); + } + /** * If working on the same set of documents, only an ordered bulk operation will yield predictable results. */ @@ -204,10 +285,10 @@ public void mixedBulkOrdered() { remove(where("value", "v2")).// execute(); - assertThat(result, notNullValue()); - assertThat(result.getInsertedCount(), is(1)); - assertThat(result.getModifiedCount(), is(1)); - assertThat(result.getDeletedCount(), is(1)); + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getDeletedCount()).isOne(); } /** @@ -218,16 +299,16 @@ public void mixedBulkOrdered() { public void mixedBulkOrderedWithList() { List inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2")); - List> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3"))); + List> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3"))); List removes = Arrays.asList(where("_id", "1")); com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(inserts) .updateMulti(updates).remove(removes).execute(); - assertThat(result, notNullValue()); - assertThat(result.getInsertedCount(), is(3)); - assertThat(result.getModifiedCount(), is(2)); - assertThat(result.getDeletedCount(), is(1)); + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isEqualTo(3); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getDeletedCount()).isOne(); } @Test // DATAMONGO-1534 @@ -242,8 +323,41 @@ public void insertShouldConsiderInheritance() { BaseDoc doc = operations.findOne(where("_id", specialDoc.id), BaseDoc.class, COLLECTION_NAME); - assertThat(doc, notNullValue()); - assertThat(doc, instanceOf(SpecialDoc.class)); + assertThat(doc).isNotNull(); + assertThat(doc).isInstanceOf(SpecialDoc.class); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void updateShouldConsiderSorting() { + + insertSomeDocuments(); + + BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) + .updateOne(new Query().with(Sort.by(DESC, "renamedField")), new Update().set("bsky", "altnps")).execute(); + + assertThat(result.getModifiedCount()).isOne(); + + Document raw = operations.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()); + assertThat(raw).containsEntry("bsky", "altnps"); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void replaceShouldConsiderSorting() { + + insertSomeDocuments(); + + BaseDocWithRenamedField target = new BaseDocWithRenamedField(); + target.value = "replacement"; + + BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) + .replaceOne(new Query().with(Sort.by(DESC, "renamedField")), target).execute(); + + assertThat(result.getModifiedCount()).isOne(); + + Document raw = operations.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()); + assertThat(raw).containsEntry("value", target.value); } private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) { @@ -252,14 +366,14 @@ private void testUpdate(BulkMode mode, boolean multi, int expectedUpdates) { insertSomeDocuments(); - List> updates = new ArrayList>(); + List> updates = new ArrayList<>(); updates.add(Pair.of(where("value", "value1"), set("value", "value3"))); updates.add(Pair.of(where("value", "value2"), set("value", "value4"))); int modifiedCount = multi ? bulkOps.updateMulti(updates).execute().getModifiedCount() : bulkOps.updateOne(updates).execute().getModifiedCount(); - assertThat(modifiedCount, is(expectedUpdates)); + assertThat(modifiedCount).isEqualTo(expectedUpdates); } private void testRemove(BulkMode mode) { @@ -268,7 +382,20 @@ private void testRemove(BulkMode mode) { List removes = Arrays.asList(where("_id", "1"), where("value", "value2")); - assertThat(createBulkOps(mode).remove(removes).execute().getDeletedCount(), is(3)); + assertThat(createBulkOps(mode).remove(removes).execute().getDeletedCount()).isEqualTo(3); + } + + private void testReplaceOne(BulkMode mode) { + + BulkOperations bulkOps = createBulkOps(mode); + + insertSomeDocuments(); + + Query query = where("_id", "1"); + Document document = rawDoc("1", "value2"); + int modifiedCount = bulkOps.replaceOne(query, document).execute().getModifiedCount(); + + assertThat(modifiedCount).isOne(); } private BulkOperations createBulkOps(BulkMode mode) { @@ -278,10 +405,11 @@ private BulkOperations createBulkOps(BulkMode mode) { private BulkOperations createBulkOps(BulkMode mode, Class entityType) { Optional> entity = entityType != null - ? Optional.of(operations.getConverter().getMappingContext().getPersistentEntity(entityType)) : Optional.empty(); + ? Optional.of(operations.getConverter().getMappingContext().getPersistentEntity(entityType)) + : Optional.empty(); BulkOperationContext bulkOperationContext = new BulkOperationContext(mode, entity, - new QueryMapper(operations.getConverter()), new UpdateMapper(operations.getConverter())); + new QueryMapper(operations.getConverter()), new UpdateMapper(operations.getConverter()), null, null); DefaultBulkOperations bulkOps = new DefaultBulkOperations(operations, COLLECTION_NAME, bulkOperationContext); bulkOps.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED); @@ -293,10 +421,14 @@ private void insertSomeDocuments() { final MongoCollection coll = operations.getCollection(COLLECTION_NAME); - coll.insertOne(rawDoc("1", "value1")); - coll.insertOne(rawDoc("2", "value1")); - coll.insertOne(rawDoc("3", "value2")); - coll.insertOne(rawDoc("4", "value2")); + coll.insertOne(rawDoc("1", "value1").append("rn_f", "001")); + coll.insertOne(rawDoc("2", "value1").append("rn_f", "002")); + coll.insertOne(rawDoc("3", "value2").append("rn_f", "003")); + coll.insertOne(rawDoc("4", "value2").append("rn_f", "004")); + } + + private static Stream upsertArguments() { + return Stream.of(Arguments.of(set("value", "value2")), Arguments.of(AggregationUpdate.update().set("value").toValue("value2"))); } private static BaseDoc newDoc(String id) { @@ -326,4 +458,10 @@ private static Update set(String field, String value) { private static Document rawDoc(String id, String value) { return new Document("_id", id).append("value", value); } + + static class BaseDocWithRenamedField extends BaseDoc { + + @Field("rn_f") + String renamedField; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java index 9f36332ed7..6bdcb132f9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultBulkOperationsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,24 +16,34 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.any; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Collections; import java.util.List; import java.util.Optional; +import org.bson.BsonDocument; +import org.bson.BsonString; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mongodb.BulkOperationException; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; import org.springframework.data.mongodb.core.convert.DbRefResolver; @@ -43,12 +53,30 @@ import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Update; +import com.mongodb.MongoBulkWriteException; +import com.mongodb.MongoWriteException; +import com.mongodb.ServerAddress; +import com.mongodb.WriteConcern; +import com.mongodb.WriteError; +import com.mongodb.bulk.BulkWriteError; +import com.mongodb.bulk.WriteConcernError; import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.BulkWriteOptions; import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; import com.mongodb.client.model.UpdateManyModel; import com.mongodb.client.model.UpdateOneModel; import com.mongodb.client.model.WriteModel; @@ -58,37 +86,46 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Minsu Kim + * @author Jens Schauder + * @author Roman Puchkovskiy + * @author Jacob Botuck */ -@RunWith(MockitoJUnitRunner.class) -public class DefaultBulkOperationsUnitTests { +@ExtendWith(MockitoExtension.class) +class DefaultBulkOperationsUnitTests { - @Mock MongoTemplate template; - @Mock MongoCollection collection; + private MongoTemplate template; + @Mock MongoDatabase database; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection collection; + @Mock MongoDatabaseFactory factory; @Mock DbRefResolver dbRefResolver; @Captor ArgumentCaptor>> captor; - MongoConverter converter; - MongoMappingContext mappingContext; + private MongoConverter converter; + private MongoMappingContext mappingContext; - DefaultBulkOperations ops; + private DefaultBulkOperations ops; - @Before - public void setUp() { + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(database); + when(factory.getExceptionTranslator()).thenReturn(new NullExceptionTranslator()); + when(database.getCollection(anyString(), eq(Document.class))).thenReturn(collection); mappingContext = new MongoMappingContext(); mappingContext.afterPropertiesSet(); converter = new MappingMongoConverter(dbRefResolver, mappingContext); - - when(template.getCollection(anyString())).thenReturn(collection); + template = new MongoTemplate(factory, converter); ops = new DefaultBulkOperations(template, "collection-1", new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(SomeDomainType.class)), new QueryMapper(converter), - new UpdateMapper(converter))); + new UpdateMapper(converter), null, null)); } @Test // DATAMONGO-1518 - public void updateOneShouldUseCollationWhenPresent() { + void updateOneShouldUseCollationWhenPresent() { ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) .execute(); @@ -101,7 +138,7 @@ public void updateOneShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void updateManyShouldUseCollationWhenPresent() { + void updateManyShouldUseCollationWhenPresent() { ops.updateMulti(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) .execute(); @@ -114,7 +151,7 @@ public void updateManyShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void removeShouldUseCollationWhenPresent() { + void removeShouldUseCollationWhenPresent() { ops.remove(new BasicQuery("{}").collation(Collation.of("de"))).execute(); @@ -125,8 +162,20 @@ public void removeShouldUseCollationWhenPresent() { .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); } + @Test // DATAMONGO-2218 + void replaceOneShouldUseCollationWhenPresent() { + + ops.replaceOne(new BasicQuery("{}").collation(Collation.of("de")), new SomeDomainType()).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(ReplaceOneModel.class); + assertThat(((ReplaceOneModel) captor.getValue().get(0)).getReplaceOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + @Test // DATAMONGO-1678 - public void bulkUpdateShouldMapQueryAndUpdateCorrectly() { + void bulkUpdateShouldMapQueryAndUpdateCorrectly() { ops.updateOne(query(where("firstName").is("danerys")), Update.update("firstName", "queen danerys")).execute(); @@ -138,7 +187,7 @@ public void bulkUpdateShouldMapQueryAndUpdateCorrectly() { } @Test // DATAMONGO-1678 - public void bulkRemoveShouldMapQueryCorrectly() { + void bulkRemoveShouldMapQueryCorrectly() { ops.remove(query(where("firstName").is("danerys"))).execute(); @@ -148,6 +197,222 @@ public void bulkRemoveShouldMapQueryCorrectly() { assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); } + @Test // DATAMONGO-2218 + void bulkReplaceOneShouldMapQueryCorrectly() { + + SomeDomainType replacement = new SomeDomainType(); + replacement.firstName = "Minsu"; + replacement.lastName = "Kim"; + + ops.replaceOne(query(where("firstName").is("danerys")), replacement).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + ReplaceOneModel updateModel = (ReplaceOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getReplacement().getString("first_name")).isEqualTo("Minsu"); + assertThat(updateModel.getReplacement().getString("lastName")).isEqualTo("Kim"); + } + + @Test // DATAMONGO-2261, DATAMONGO-2479 + void bulkInsertInvokesEntityCallbacks() { + + BeforeConvertPersonCallback beforeConvertCallback = spy(new BeforeConvertPersonCallback()); + BeforeSavePersonCallback beforeSaveCallback = spy(new BeforeSavePersonCallback()); + AfterSavePersonCallback afterSaveCallback = spy(new AfterSavePersonCallback()); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), null, + EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback, afterSaveCallback))); + + Person entity = new Person("init"); + ops.insert(entity); + + ArgumentCaptor personArgumentCaptor = ArgumentCaptor.forClass(Person.class); + verify(beforeConvertCallback).onBeforeConvert(personArgumentCaptor.capture(), eq("collection-1")); + verifyNoInteractions(beforeSaveCallback); + + ops.execute(); + + verify(beforeSaveCallback).onBeforeSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + verify(afterSaveCallback).onAfterSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + assertThat(personArgumentCaptor.getAllValues()).extracting("firstName").containsExactly("init", "before-convert", + "before-convert"); + verify(collection).bulkWrite(captor.capture(), any()); + + InsertOneModel updateModel = (InsertOneModel) captor.getValue().get(0); + assertThat(updateModel.getDocument()).containsEntry("firstName", "after-save"); + } + + @Test // DATAMONGO-2290 + void bulkReplaceOneEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null)); + + ops.replaceOne(query(where("firstName").is("danerys")), new SomeDomainType()); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute(); + + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // DATAMONGO-2290 + void bulkInsertEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute(); + + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // DATAMONGO-2290 + void noAfterSaveEventOnFailure() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoWriteException( + new WriteError(89, "NetworkTimeout", new BsonDocument("hi", new BsonString("there"))), null)); + + ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(Person.class)), + new QueryMapper(converter), new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + + try { + ops.execute(); + fail("Missing MongoWriteException"); + } catch (MongoWriteException expected) { + + } + + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + } + + @Test // DATAMONGO-2330 + void writeConcernNotAppliedWhenNotSet() { + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute(); + + verify(collection, never()).withWriteConcern(any()); + } + + @Test // DATAMONGO-2330 + void writeConcernAppliedCorrectlyWhenSet() { + + ops.setDefaultWriteConcern(WriteConcern.MAJORITY); + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute(); + + verify(collection).withWriteConcern(eq(WriteConcern.MAJORITY)); + } + + @Test // DATAMONGO-2450 + void appliesArrayFilterWhenPresent() { + + ops.updateOne(new BasicQuery("{}"), new Update().filterArray(Criteria.where("element").gte(100))).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getOptions().getArrayFilters().get(0)) + .isEqualTo(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-2502 + void shouldRetainNestedArrayPathWithPlaceholdersForNoMatchingPaths() { + + ops.updateOne(new BasicQuery("{}"), new Update().set("items.$.documents.0.fileId", "new-id")).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getUpdate()) + .isEqualTo(new Document("$set", new Document("items.$.documents.0.fileId", "new-id"))); + } + + @Test // DATAMONGO-2502 + void shouldRetainNestedArrayPathWithPlaceholdersForMappedEntity() { + + DefaultBulkOperations ops = new DefaultBulkOperations(template, "collection-1", + new BulkOperationContext(BulkMode.ORDERED, Optional.of(mappingContext.getPersistentEntity(OrderTest.class)), + new QueryMapper(converter), new UpdateMapper(converter), null, null)); + + ops.updateOne(new BasicQuery("{}"), Update.update("items.$.documents.0.fileId", "file-id")).execute(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getUpdate()) + .isEqualTo(new Document("$set", new Document("items.$.documents.0.the_file_id", "file-id"))); + } + + @Test // DATAMONGO-2285 + public void translateMongoBulkOperationExceptionWithWriteConcernError() { + + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoBulkWriteException(null, + Collections.emptyList(), + new WriteConcernError(42, "codename", "writeconcern error happened", new BsonDocument()), new ServerAddress(), Collections.emptySet())); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> ops.insert(new SomeDomainType()).execute()); + + } + + @Test // DATAMONGO-2285 + public void translateMongoBulkOperationExceptionWithoutWriteConcernError() { + + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoBulkWriteException(null, + Collections.singletonList(new BulkWriteError(42, "a write error happened", new BsonDocument(), 49)), null, + new ServerAddress(), Collections.emptySet())); + + assertThatExceptionOfType(BulkOperationException.class) + .isThrownBy(() -> ops.insert(new SomeDomainType()).execute()); + } + + static class OrderTest { + + String id; + List items; + } + + static class OrderTestItem { + + private String cartId; + private List documents; + } + + static class OrderTestDocument { + + @Field("the_file_id") + private String fileId; + } + class SomeDomainType { @Id String id; @@ -159,4 +424,40 @@ class SomeDomainType { enum Gender { M, F } + + static class BeforeConvertPersonCallback implements BeforeConvertCallback { + + @Override + public Person onBeforeConvert(Person entity, String collection) { + return new Person("before-convert"); + } + } + + static class BeforeSavePersonCallback implements BeforeSaveCallback { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + document.put("firstName", "before-save"); + return new Person("before-save"); + } + } + + static class AfterSavePersonCallback implements AfterSaveCallback { + + @Override + public Person onAfterSave(Person entity, Document document, String collection) { + + document.put("firstName", "after-save"); + return new Person("after-save"); + } + } + + static class NullExceptionTranslator implements PersistenceExceptionTranslator { + + @Override + public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return null; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java index 29a19f8ade..78a6e6b496 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,30 @@ */ package org.springframework.data.mongodb.core; -import static org.assertj.core.api.Assertions.*; -import static org.hamcrest.core.Is.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.core.index.PartialIndexFilter.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.index.PartialIndexFilter.of; +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import org.bson.BsonDocument; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.data.mongodb.core.query.Collation.CaseFirst; -import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Field; -import org.springframework.data.util.Version; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Collation.CaseFirst; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; import org.springframework.util.ObjectUtils; import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.IndexOptions; /** * Integration tests for {@link DefaultIndexOperations}. @@ -48,121 +47,110 @@ * @author Oliver Gierke * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class DefaultIndexOperationsIntegrationTests { - private static final Version THREE_DOT_TWO = new Version(3, 2); - private static final Version THREE_DOT_FOUR = new Version(3, 4); - private static Version mongoVersion; + static final String COLLECTION_NAME = "default-index-operations-tests"; static final org.bson.Document GEO_SPHERE_2D = new org.bson.Document("loaction", "2dsphere"); - @Autowired MongoTemplate template; - DefaultIndexOperations indexOps; - MongoCollection collection; - - @Before - public void setUp() { - - queryMongoVersionIfNecessary(); - String collectionName = this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class); - - this.collection = this.template.getDb().getCollection(collectionName, Document.class); - this.collection.dropIndexes(); - this.indexOps = new DefaultIndexOperations(template.getMongoDbFactory(), collectionName, - new QueryMapper(template.getConverter())); - } + @Template // + static MongoTestTemplate template; - private void queryMongoVersionIfNecessary() { + MongoCollection collection = template.getCollection(COLLECTION_NAME); + IndexOperations indexOps = template.indexOps(COLLECTION_NAME); - if (mongoVersion == null) { - Document result = template.executeCommand("{ buildInfo: 1 }"); - mongoVersion = Version.parse(result.get("version").toString()); - } + @BeforeEach + public void setUp() { + template.dropIndexes(COLLECTION_NAME); } @Test // DATAMONGO-1008 public void getIndexInfoShouldBeAbleToRead2dsphereIndex() { - collection.createIndex(GEO_SPHERE_2D); + template.getCollection(COLLECTION_NAME).createIndex(GEO_SPHERE_2D); IndexInfo info = findAndReturnIndexInfo(GEO_SPHERE_2D); assertThat(info.getIndexFields().get(0).isGeo()).isEqualTo(true); } - @Test // DATAMONGO-1467 + @Test // DATAMONGO-1467, DATAMONGO-2198 public void shouldApplyPartialFilterCorrectly() { - assumeThat(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO), is(true)); - IndexDefinition id = new Index().named("partial-with-criteria").on("k3y", Direction.ASC) .partial(of(where("q-t-y").gte(10))); - indexOps.ensureIndex(id); + indexOps.createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-criteria"); - assertThat(info.getPartialFilterExpression()).isEqualTo("{ \"q-t-y\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"q-t-y\" : { \"$gte\" : 10 } }")); } - @Test // DATAMONGO-1467 + @Test // DATAMONGO-1467, DATAMONGO-2198 public void shouldApplyPartialFilterWithMappedPropertyCorrectly() { - assumeThat(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO), is(true)); - IndexDefinition id = new Index().named("partial-with-mapped-criteria").on("k3y", Direction.ASC) .partial(of(where("quantity").gte(10))); - indexOps.ensureIndex(id); + template.indexOps(DefaultIndexOperationsIntegrationTestsSample.class).createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-mapped-criteria"); - assertThat(info.getPartialFilterExpression()).isEqualTo("{ \"qty\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); } - @Test // DATAMONGO-1467 + @Test // DATAMONGO-1467, DATAMONGO-2198 public void shouldApplyPartialDBOFilterCorrectly() { - assumeThat(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO), is(true)); - IndexDefinition id = new Index().named("partial-with-dbo").on("k3y", Direction.ASC) .partial(of(new org.bson.Document("qty", new org.bson.Document("$gte", 10)))); - indexOps.ensureIndex(id); + indexOps.createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-dbo"); - assertThat(info.getPartialFilterExpression()).isEqualTo("{ \"qty\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); } - @Test // DATAMONGO-1467 + @Test // DATAMONGO-1467, DATAMONGO-2198 public void shouldFavorExplicitMappingHintViaClass() { - assumeThat(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO), is(true)); - IndexDefinition id = new Index().named("partial-with-inheritance").on("k3y", Direction.ASC) .partial(of(where("age").gte(10))); - indexOps = new DefaultIndexOperations(template.getMongoDbFactory(), - this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class), - new QueryMapper(template.getConverter()), MappingToSameCollection.class); + indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); - indexOps.ensureIndex(id); + indexOps.createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-inheritance"); - assertThat(info.getPartialFilterExpression()).isEqualTo("{ \"a_g_e\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(info.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"a_g_e\" : { \"$gte\" : 10 } }")); + } + + @Test // DATAMONGO-2388 + public void shouldReadIndexWithPartialFilterContainingDbRefCorrectly() { + + BsonDocument partialFilter = BsonDocument.parse( + "{ \"the-ref\" : { \"$ref\" : \"other-collection\", \"$id\" : { \"$oid\" : \"59ce08baf264b906810fe8c5\"} } }"); + IndexOptions indexOptions = new IndexOptions(); + indexOptions.name("partial-with-dbref"); + indexOptions.partialFilterExpression(partialFilter); + + collection.createIndex(BsonDocument.parse("{ \"key-1\" : 1, \"key-2\": 1}"), indexOptions); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-dbref"); + assertThat(BsonDocument.parse(info.getPartialFilterExpression())).isEqualTo(partialFilter); } @Test // DATAMONGO-1518 public void shouldCreateIndexWithCollationCorrectly() { - assumeThat(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_FOUR), is(true)); - IndexDefinition id = new Index().named("with-collation").on("xyz", Direction.ASC) .collation(Collation.of("de_AT").caseFirst(CaseFirst.off())); - new DefaultIndexOperations(template.getMongoDbFactory(), - this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class), - new QueryMapper(template.getConverter()), MappingToSameCollection.class); + new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); - indexOps.ensureIndex(id); + indexOps.createIndex(id); Document expected = new Document("locale", "de_AT") // .append("caseLevel", false) // @@ -185,6 +173,40 @@ public void shouldCreateIndexWithCollationCorrectly() { assertThat(result).isEqualTo(expected); } + @Test // GH-4348 + void indexShouldNotBeHiddenByDefault() { + + IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC); + + indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); + indexOps.createIndex(index); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index"); + assertThat(info.isHidden()).isFalse(); + } + + @Test // GH-4348 + void shouldCreateHiddenIndex() { + + IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden(); + + indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); + indexOps.createIndex(index); + + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-hidden-index"); + assertThat(info.isHidden()).isTrue(); + } + + @Test // GH-4348 + void alterIndexShouldAllowHiding() { + + collection.createIndex(new Document("a", 1), new IndexOptions().name("my-index")); + + indexOps.alterIndex("my-index", org.springframework.data.mongodb.core.index.IndexOptions.hidden()); + IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index"); + assertThat(info.isHidden()).isTrue(); + } + private IndexInfo findAndReturnIndexInfo(org.bson.Document keys) { return findAndReturnIndexInfo(indexOps.getIndexInfo(), keys); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsUnitTests.java new file mode 100644 index 0000000000..14550e4c17 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsUnitTests.java @@ -0,0 +1,162 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.index.HashedIndex; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; + +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; + +/** + * Unit tests for {@link DefaultIndexOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class DefaultIndexOperationsUnitTests { + + private MongoTemplate template; + + @Mock MongoDatabaseFactory factory; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(db); + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); + when(db.getCollection(any(), any(Class.class))).thenReturn(collection); + when(collection.createIndex(any(), any(IndexOptions.class))).thenReturn("OK"); + + this.mappingContext = new MongoMappingContext(); + this.converter = spy(new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext)); + this.template = new MongoTemplate(factory, converter); + } + + @Test // DATAMONGO-1183 + void indexOperationsMapFieldNameCorrectly() { + + indexOpsFor(Jedi.class).ensureIndex(new Index("name", Direction.DESC)); + + verify(collection).createIndex(eq(new Document("firstname", -1)), any()); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotSetCollectionIfNoDefaultDefined() { + + indexOpsFor(Jedi.class).ensureIndex(new Index("firstname", Direction.DESC)); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void ensureIndexUsesDefaultCollationIfNoneDefinedInOptions() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC)); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotUseDefaultCollationIfExplicitlySpecifiedInTheIndex() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC).collation(Collation.of("en_US"))); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + @Test // DATAMONGO-1183 + void shouldCreateHashedIndexCorrectly() { + + indexOpsFor(Jedi.class).ensureIndex(HashedIndex.hashed("name")); + + verify(collection).createIndex(eq(new Document("firstname", "hashed")), any()); + } + + @Test // GH-4698 + void shouldConsiderGivenCollectionName() { + + DefaultIndexOperations operations = new DefaultIndexOperations(template, "foo", Jedi.class); + + operations.ensureIndex(HashedIndex.hashed("name")); + verify(db).getCollection(eq("foo"), any(Class.class)); + } + + private DefaultIndexOperations indexOpsFor(Class type) { + return new DefaultIndexOperations(template, template.getCollectionName(type), type); + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "DefaultIndexOperationsUnitTests.Jedi(name=" + this.getName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + @Field("firstname") String name; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsTests.java new file mode 100644 index 0000000000..79bf563159 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsTests.java @@ -0,0 +1,395 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.domain.Sort.Direction.DESC; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.dao.DuplicateKeyException; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.MongoBulkWriteException; +import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; + +/** + * Tests for {@link DefaultReactiveBulkOperations}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +class DefaultReactiveBulkOperationsTests { + + static final String COLLECTION_NAME = "reactive-bulk-ops"; + + @Template(initialEntitySet = BaseDoc.class) static ReactiveMongoTestTemplate template; + + @BeforeEach + public void setUp() { + template.flush(COLLECTION_NAME).as(StepVerifier::create).verifyComplete(); + } + + @Test // GH-2821 + void insertOrdered() { + + List documents = Arrays.asList(newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.ORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result.getInsertedCount()).isEqualTo(2); + }).verifyComplete(); + } + + @Test // GH-2821 + void insertOrderedFails() { + + List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.ORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .verifyErrorSatisfies(error -> { + assertThat(error).isInstanceOf(DuplicateKeyException.class); + }); + } + + @Test // GH-2821 + public void insertUnOrdered() { + + List documents = Arrays.asList(newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.UNORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result.getInsertedCount()).isEqualTo(2); + }).verifyComplete(); + } + + @Test // GH-2821 + public void insertUnOrderedContinuesOnError() { + + List documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2")); + + createBulkOps(BulkMode.UNORDERED).insert(documents) // + .execute().as(StepVerifier::create) // + .verifyErrorSatisfies(error -> { + + assertThat(error).isInstanceOf(DuplicateKeyException.class); + assertThat(error.getCause()).isInstanceOf(MongoBulkWriteException.class); + + MongoBulkWriteException cause = (MongoBulkWriteException) error.getCause(); + assertThat(cause.getWriteResult().getInsertedCount()).isEqualTo(2); + assertThat(cause.getWriteErrors()).isNotNull(); + assertThat(cause.getWriteErrors().size()).isOne(); + }); + } + + @Test // GH-2821 + void upsertDoesUpdate() { + + insertSomeDocuments(); + + createBulkOps(BulkMode.ORDERED).// + upsert(where("value", "value1"), set("value", "value2")).// + execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isEqualTo(2); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isZero(); + }) // + .verifyComplete(); + } + + @Test // GH-2821 + public void upsertDoesInsert() { + + createBulkOps(BulkMode.ORDERED).// + upsert(where("_id", "1"), set("value", "v1")).// + execute().as(StepVerifier::create) // + .consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts()).isNotNull(); + assertThat(result.getUpserts().size()).isOne(); + }) // + .verifyComplete(); + } + + @ParameterizedTest // GH-2821 + @MethodSource + public void testUpdates(BulkMode mode, boolean multi, int expectedUpdateCount) { + + insertSomeDocuments(); + ReactiveBulkOperations bulkOps = createBulkOps(mode); + + if (multi) { + bulkOps.updateMulti(where("value", "value1"), set("value", "value3")); + bulkOps.updateMulti(where("value", "value2"), set("value", "value4")); + } else { + bulkOps.updateOne(where("value", "value1"), set("value", "value3")); + bulkOps.updateOne(where("value", "value2"), set("value", "value4")); + } + + bulkOps.execute().map(BulkWriteResult::getModifiedCount) // + .as(StepVerifier::create) // + .expectNext(expectedUpdateCount) // + .verifyComplete(); + } + + private static Stream testUpdates() { + return Stream.of(Arguments.of(BulkMode.ORDERED, false, 2), Arguments.of(BulkMode.ORDERED, true, 4), + Arguments.of(BulkMode.UNORDERED, false, 2), Arguments.of(BulkMode.UNORDERED, false, 2)); + } + + @ParameterizedTest // GH-2821 + @EnumSource(BulkMode.class) + void testRemove(BulkMode mode) { + + insertSomeDocuments(); + + List removes = Arrays.asList(where("_id", "1"), where("value", "value2")); + + createBulkOps(mode).remove(removes).execute().map(BulkWriteResult::getDeletedCount).as(StepVerifier::create) + .expectNext(3).verifyComplete(); + } + + @ParameterizedTest // GH-2821 + @EnumSource(BulkMode.class) + void testReplaceOne(BulkMode mode) { + + insertSomeDocuments(); + + Query query = where("_id", "1"); + Document document = rawDoc("1", "value2"); + createBulkOps(mode).replaceOne(query, document).execute().map(BulkWriteResult::getModifiedCount) + .as(StepVerifier::create).expectNext(1).verifyComplete(); + } + + @Test // GH-2821 + public void replaceOneDoesReplace() { + + insertSomeDocuments(); + + createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2")).// + execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getInsertedCount()).isZero(); + }).verifyComplete(); + } + + @Test // GH-2821 + public void replaceOneWithUpsert() { + + createBulkOps(BulkMode.ORDERED).// + replaceOne(where("_id", "1"), rawDoc("1", "value2"), FindAndReplaceOptions.options().upsert()).// + execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getMatchedCount()).isZero(); + assertThat(result.getInsertedCount()).isZero(); + assertThat(result.getModifiedCount()).isZero(); + assertThat(result.getUpserts().size()).isOne(); + }); + } + + @Test // GH-2821 + public void mixedBulkOrdered() { + + createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(newDoc("1", "v1")).// + updateOne(where("_id", "1"), set("value", "v2")).// + remove(where("value", "v2")).// + execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isOne(); + assertThat(result.getModifiedCount()).isOne(); + assertThat(result.getDeletedCount()).isOne(); + }).verifyComplete(); + } + + @Test // GH-2821 + public void mixedBulkOrderedWithList() { + + List inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2")); + List removes = Arrays.asList(where("_id", "1")); + + createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(inserts).updateMulti(where("value", "v2"), set("value", "v3")) + .remove(removes).execute().as(StepVerifier::create).consumeNextWith(result -> { + + assertThat(result).isNotNull(); + assertThat(result.getInsertedCount()).isEqualTo(3); + assertThat(result.getModifiedCount()).isEqualTo(2); + assertThat(result.getDeletedCount()).isOne(); + }).verifyComplete(); + } + + @Test // GH-2821 + public void insertShouldConsiderInheritance() { + + SpecialDoc specialDoc = new SpecialDoc(); + specialDoc.id = "id-special"; + specialDoc.value = "normal-value"; + specialDoc.specialValue = "special-value"; + + createBulkOps(BulkMode.ORDERED, SpecialDoc.class).insert(Arrays.asList(specialDoc)).execute().then() + .as(StepVerifier::create).verifyComplete(); + + template.findOne(where("_id", specialDoc.id), BaseDoc.class, COLLECTION_NAME).as(StepVerifier::create) + .consumeNextWith(doc -> { + + assertThat(doc).isNotNull(); + assertThat(doc).isInstanceOf(SpecialDoc.class); + }).verifyComplete(); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void updateShouldConsiderSorting() { + + insertSomeDocuments(); + + createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) // + .updateOne(new Query().with(Sort.by(DESC, "renamedField")), new Update().set("bsky", "altnps")).execute() // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + template.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()).as(StepVerifier::create) // + .consumeNextWith(raw -> assertThat(raw).containsEntry("bsky", "altnps")) // + .verifyComplete(); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + public void replaceShouldConsiderSorting() { + + insertSomeDocuments(); + + BaseDocWithRenamedField target = new BaseDocWithRenamedField(); + target.value = "replacement"; + + createBulkOps(BulkMode.ORDERED, BaseDocWithRenamedField.class) // + .replaceOne(new Query().with(Sort.by(DESC, "renamedField")), target).execute() // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + template.execute(COLLECTION_NAME, col -> col.find(new Document("_id", "4")).first()).as(StepVerifier::create) // + .consumeNextWith(raw -> assertThat(raw).containsEntry("value", target.value)) // + .verifyComplete(); + } + + private void insertSomeDocuments() { + + template.execute(COLLECTION_NAME, collection -> { + return Flux.from(collection.insertMany( + List.of(rawDoc("1", "value1").append("rn_f", "001"), rawDoc("2", "value1").append("rn_f", "002"), rawDoc("3", "value2").append("rn_f", "003"), rawDoc("4", "value2").append("rn_f", "004")))); + }).then().as(StepVerifier::create).verifyComplete(); + + } + + private DefaultReactiveBulkOperations createBulkOps(BulkMode mode) { + return createBulkOps(mode, null); + } + + private DefaultReactiveBulkOperations createBulkOps(BulkMode mode, Class entityType) { + + Optional> entity = entityType != null + ? Optional.of(template.getConverter().getMappingContext().getPersistentEntity(entityType)) + : Optional.empty(); + + ReactiveBulkOperationContext bulkOperationContext = new ReactiveBulkOperationContext(mode, entity, + new QueryMapper(template.getConverter()), new UpdateMapper(template.getConverter()), null, null); + + DefaultReactiveBulkOperations bulkOps = new DefaultReactiveBulkOperations(template, COLLECTION_NAME, + bulkOperationContext); + bulkOps.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED); + + return bulkOps; + } + + private static BaseDoc newDoc(String id) { + + BaseDoc doc = new BaseDoc(); + doc.id = id; + + return doc; + } + + private static BaseDoc newDoc(String id, String value) { + + BaseDoc doc = newDoc(id); + doc.value = value; + + return doc; + } + + private static Query where(String field, String value) { + return new Query().addCriteria(Criteria.where(field).is(value)); + } + + private static Update set(String field, String value) { + return new Update().set(field, value); + } + + private static Document rawDoc(String id, String value) { + return new Document("_id", id).append("value", value); + } + + static class BaseDocWithRenamedField extends BaseDoc { + + @Field("rn_f") + String renamedField; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsUnitTests.java new file mode 100644 index 0000000000..3b4cb322bc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperationsUnitTests.java @@ -0,0 +1,347 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.List; +import java.util.Optional; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.DefaultBulkOperationsUnitTests.NullExceptionTranslator; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.MongoWriteException; +import com.mongodb.WriteError; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.WriteModel; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class DefaultReactiveBulkOperationsUnitTests { + + ReactiveMongoTemplate template; + @Mock ReactiveMongoDatabaseFactory factory; + + @Mock MongoDatabase database; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection collection; + @Captor ArgumentCaptor>> captor; + + private MongoConverter converter; + private MongoMappingContext mappingContext; + + private DefaultReactiveBulkOperations ops; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(Mono.just(database)); + when(factory.getExceptionTranslator()).thenReturn(new NullExceptionTranslator()); + when(database.getCollection(anyString(), eq(Document.class))).thenReturn(collection); + when(collection.bulkWrite(anyList(), any())).thenReturn(Mono.just(mock(BulkWriteResult.class))); + + mappingContext = new MongoMappingContext(); + mappingContext.afterPropertiesSet(); + + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + template = new ReactiveMongoTemplate(factory, converter); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(SomeDomainType.class)), new QueryMapper(converter), + new UpdateMapper(converter), null, null)); + } + + @Test // GH-2821 + void updateOneShouldUseCollationWhenPresent() { + + ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen")) + .execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + assertThat(captor.getValue().get(0)).isInstanceOf(UpdateOneModel.class); + assertThat(((UpdateOneModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // GH-2821 + void replaceOneShouldUseCollationWhenPresent() { + + ops.replaceOne(new BasicQuery("{}").collation(Collation.of("de")), new SomeDomainType()).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(ReplaceOneModel.class); + assertThat(((ReplaceOneModel) captor.getValue().get(0)).getReplaceOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // GH-2821 + void removeShouldUseCollationWhenPresent() { + + ops.remove(new BasicQuery("{}").collation(Collation.of("de"))).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + assertThat(captor.getValue().get(0)).isInstanceOf(DeleteManyModel.class); + assertThat(((DeleteManyModel) captor.getValue().get(0)).getOptions().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build()); + } + + @Test // GH-2821 + void bulkUpdateShouldMapQueryAndUpdateCorrectly() { + + ops.updateOne(query(where("firstName").is("danerys")), Update.update("firstName", "queen danerys")).execute() + .subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getUpdate()).isEqualTo(new Document("$set", new Document("first_name", "queen danerys"))); + } + + @Test // GH-2821 + void bulkRemoveShouldMapQueryCorrectly() { + + ops.remove(query(where("firstName").is("danerys"))).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + DeleteManyModel updateModel = (DeleteManyModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + } + + @Test // GH-2821 + void bulkReplaceOneShouldMapQueryCorrectly() { + + SomeDomainType replacement = new SomeDomainType(); + replacement.firstName = "Minsu"; + replacement.lastName = "Kim"; + + ops.replaceOne(query(where("firstName").is("danerys")), replacement).execute().subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + ReplaceOneModel updateModel = (ReplaceOneModel) captor.getValue().get(0); + assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys")); + assertThat(updateModel.getReplacement().getString("first_name")).isEqualTo("Minsu"); + assertThat(updateModel.getReplacement().getString("lastName")).isEqualTo("Kim"); + } + + @Test // GH-2821 + void bulkInsertInvokesEntityCallbacks() { + + BeforeConvertPersonCallback beforeConvertCallback = spy(new BeforeConvertPersonCallback()); + BeforeSavePersonCallback beforeSaveCallback = spy(new BeforeSavePersonCallback()); + AfterSavePersonCallback afterSaveCallback = spy(new AfterSavePersonCallback()); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), null, + ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback, afterSaveCallback))); + + Person entity = new Person("init"); + ops.insert(entity); + + ArgumentCaptor personArgumentCaptor = ArgumentCaptor.forClass(Person.class); + verifyNoInteractions(beforeConvertCallback); + verifyNoInteractions(beforeSaveCallback); + + ops.execute().then().as(StepVerifier::create).verifyComplete(); + + verify(beforeConvertCallback).onBeforeConvert(personArgumentCaptor.capture(), eq("collection-1")); + verify(beforeSaveCallback).onBeforeSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + verify(afterSaveCallback).onAfterSave(personArgumentCaptor.capture(), any(), eq("collection-1")); + assertThat(personArgumentCaptor.getAllValues()).extracting("firstName").containsExactly("init", "before-convert", + "before-save"); + verify(collection).bulkWrite(captor.capture(), any()); + + InsertOneModel updateModel = (InsertOneModel) captor.getValue().get(0); + assertThat(updateModel.getDocument()).containsEntry("firstName", "after-save"); + } + + @Test // GH-2821 + void bulkReplaceOneEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), eventPublisher, null)); + + ops.replaceOne(query(where("firstName").is("danerys")), new SomeDomainType()); + + verify(eventPublisher, never()).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute().then().as(StepVerifier::create).verifyComplete(); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // GH-2821 + void bulkInsertEmitsEventsCorrectly() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + verify(eventPublisher, never()).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + + ops.execute().then().as(StepVerifier::create).verifyComplete(); + + verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class)); + verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class)); + verify(eventPublisher).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // GH-2821 + void noAfterSaveEventOnFailure() { + + ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class); + + when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoWriteException( + new WriteError(89, "NetworkTimeout", new BsonDocument("hi", new BsonString("there"))), null)); + + ops = new DefaultReactiveBulkOperations(template, "collection-1", + new ReactiveBulkOperationContext(BulkMode.ORDERED, + Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter), + new UpdateMapper(converter), eventPublisher, null)); + + ops.insert(new SomeDomainType()); + + ops.execute().as(StepVerifier::create).expectError(); + + verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class)); + } + + @Test // GH-2821 + void appliesArrayFilterWhenPresent() { + + ops.updateOne(new BasicQuery("{}"), new Update().filterArray(Criteria.where("element").gte(100))).execute() + .subscribe(); + + verify(collection).bulkWrite(captor.capture(), any()); + + UpdateOneModel updateModel = (UpdateOneModel) captor.getValue().get(0); + assertThat(updateModel.getOptions().getArrayFilters().get(0)) + .isEqualTo(new org.bson.Document("element", new Document("$gte", 100))); + } + + static class BeforeConvertPersonCallback implements ReactiveBeforeConvertCallback { + + @Override + public Mono onBeforeConvert(Person entity, String collection) { + return Mono.just(new Person("before-convert")); + } + } + + static class BeforeSavePersonCallback implements ReactiveBeforeSaveCallback { + + @Override + public Mono onBeforeSave(Person entity, Document document, String collection) { + + document.put("firstName", "before-save"); + return Mono.just(new Person("before-save")); + } + } + + static class AfterSavePersonCallback implements ReactiveAfterSaveCallback { + + @Override + public Mono onAfterSave(Person entity, Document document, String collection) { + + document.put("firstName", "after-save"); + return Mono.just(new Person("after-save")); + } + } + + class SomeDomainType { + + @Id String id; + DefaultBulkOperationsUnitTests.Gender gender; + @Field("first_name") String firstName; + @Field String lastName; + } + + enum Gender { + M, F + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java index 1782c46428..5ecce43102 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; -import static org.junit.Assume.*; import static org.springframework.data.mongodb.core.index.PartialIndexFilter.*; import static org.springframework.data.mongodb.core.query.Criteria.*; @@ -25,13 +24,10 @@ import java.util.function.Predicate; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Configuration; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.core.index.IndexDefinition; @@ -39,75 +35,43 @@ import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Collation.CaseFirst; -import org.springframework.data.util.Version; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; -import com.mongodb.reactivestreams.client.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.client.model.IndexOptions; import com.mongodb.reactivestreams.client.MongoCollection; /** * @author Christoph Strobl * @author Mark Paluch + * @author Mathieu Ouellet */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration +@ExtendWith(MongoTemplateExtension.class) public class DefaultReactiveIndexOperationsTests { - @Configuration - static class Config extends AbstractReactiveMongoConfiguration { + @Template(initialEntitySet = DefaultIndexOperationsIntegrationTestsSample.class) // + static ReactiveMongoTestTemplate template; - @Override - public MongoClient reactiveMongoClient() { - return MongoClients.create(); - } + String collectionName = template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class); - @Override - protected String getDatabaseName() { - return "index-ops-tests"; - } - } - - private static final Version THREE_DOT_TWO = new Version(3, 2); - private static final Version THREE_DOT_FOUR = new Version(3, 4); - private static Version mongoVersion; - - @Autowired ReactiveMongoTemplate template; + DefaultReactiveIndexOperations indexOps = new DefaultReactiveIndexOperations(template, collectionName, + new QueryMapper(template.getConverter())); - MongoCollection collection; - DefaultReactiveIndexOperations indexOps; - - @Before + @BeforeEach public void setUp() { - - queryMongoVersionIfNecessary(); - String collectionName = this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class); - - this.collection = this.template.getMongoDatabase().getCollection(collectionName, Document.class); - this.indexOps = new DefaultReactiveIndexOperations(template, collectionName, - new QueryMapper(template.getConverter())); - - StepVerifier.create(this.collection.dropIndexes()).expectNextCount(1).verifyComplete(); - } - - private void queryMongoVersionIfNecessary() { - - if (mongoVersion == null) { - Document result = template.executeCommand("{ buildInfo: 1 }").block(); - mongoVersion = Version.parse(result.get("version").toString()); - } + template.getCollection(collectionName).flatMapMany(MongoCollection::dropIndexes) // + .as(StepVerifier::create) // + .verifyComplete(); } @Test // DATAMONGO-1518 public void shouldCreateIndexWithCollationCorrectly() { - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_FOUR)); - IndexDefinition id = new Index().named("with-collation").on("xyz", Direction.ASC) .collation(Collation.of("de_AT").caseFirst(CaseFirst.off())); - StepVerifier.create(indexOps.ensureIndex(id)).expectNextCount(1).verifyComplete(); + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); Document expected = new Document("locale", "de_AT") // .append("caseLevel", false) // @@ -119,7 +83,7 @@ public void shouldCreateIndexWithCollationCorrectly() { .append("normalization", false) // .append("backwards", false); - StepVerifier.create(indexOps.getIndexInfo().filter(this.indexByName("with-collation"))) // + indexOps.getIndexInfo().filter(this.indexByName("with-collation")).as(StepVerifier::create) // .consumeNextWith(indexInfo -> { assertThat(indexInfo.getCollation()).isPresent(); @@ -133,62 +97,57 @@ public void shouldCreateIndexWithCollationCorrectly() { .verifyComplete(); } - @Test // DATAMONGO-1682 + @Test // DATAMONGO-1682, DATAMONGO-2198 public void shouldApplyPartialFilterCorrectly() { - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO)); - IndexDefinition id = new Index().named("partial-with-criteria").on("k3y", Direction.ASC) .partial(of(where("q-t-y").gte(10))); - StepVerifier.create(indexOps.ensureIndex(id)).expectNextCount(1).verifyComplete(); + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(indexOps.getIndexInfo().filter(this.indexByName("partial-with-criteria"))) // + indexOps.getIndexInfo().filter(this.indexByName("partial-with-criteria")).as(StepVerifier::create) // .consumeNextWith(indexInfo -> { - assertThat(indexInfo.getPartialFilterExpression()).isEqualTo("{ \"q-t-y\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"q-t-y\" : { \"$gte\" : 10 } }")); }) // .verifyComplete(); } - @Test // DATAMONGO-1682 + @Test // DATAMONGO-1682, DATAMONGO-2198 public void shouldApplyPartialFilterWithMappedPropertyCorrectly() { - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO)); - IndexDefinition id = new Index().named("partial-with-mapped-criteria").on("k3y", Direction.ASC) .partial(of(where("quantity").gte(10))); - StepVerifier.create(indexOps.ensureIndex(id)).expectNextCount(1).verifyComplete(); + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(indexOps.getIndexInfo().filter(this.indexByName("partial-with-mapped-criteria"))) // + indexOps.getIndexInfo().filter(this.indexByName("partial-with-mapped-criteria")).as(StepVerifier::create) // .consumeNextWith(indexInfo -> { - assertThat(indexInfo.getPartialFilterExpression()).isEqualTo("{ \"qty\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); }).verifyComplete(); } - @Test // DATAMONGO-1682 + @Test // DATAMONGO-1682, DATAMONGO-2198 public void shouldApplyPartialDBOFilterCorrectly() { - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO)); - IndexDefinition id = new Index().named("partial-with-dbo").on("k3y", Direction.ASC) .partial(of(new org.bson.Document("qty", new org.bson.Document("$gte", 10)))); - StepVerifier.create(indexOps.ensureIndex(id)).expectNextCount(1).verifyComplete(); + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(indexOps.getIndexInfo().filter(this.indexByName("partial-with-dbo"))) // + indexOps.getIndexInfo().filter(this.indexByName("partial-with-dbo")).as(StepVerifier::create) // .consumeNextWith(indexInfo -> { - assertThat(indexInfo.getPartialFilterExpression()).isEqualTo("{ \"qty\" : { \"$gte\" : 10 } }"); + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"qty\" : { \"$gte\" : 10 } }")); }) // .verifyComplete(); } - @Test // DATAMONGO-1682 + @Test // DATAMONGO-1682, DATAMONGO-2198 public void shouldFavorExplicitMappingHintViaClass() { - assumeTrue(mongoVersion.isGreaterThanOrEqualTo(THREE_DOT_TWO)); - IndexDefinition id = new Index().named("partial-with-inheritance").on("k3y", Direction.ASC) .partial(of(where("age").gte(10))); @@ -196,11 +155,56 @@ public void shouldFavorExplicitMappingHintViaClass() { this.template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class), new QueryMapper(template.getConverter()), MappingToSameCollection.class); - StepVerifier.create(indexOps.ensureIndex(id)).expectNextCount(1).verifyComplete(); + indexOps.ensureIndex(id).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("partial-with-inheritance")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(Document.parse(indexInfo.getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"a_g_e\" : { \"$gte\" : 10 } }")); + }) // + .verifyComplete(); + } + + @Test // GH-4348 + void indexShouldNotBeHiddenByDefault() { + + IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC); + + indexOps.ensureIndex(index).then().as(StepVerifier::create).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("my-index")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(indexInfo.isHidden()).isFalse(); + }) // + .verifyComplete(); + } + + @Test // GH-4348 + void shouldCreateHiddenIndex() { + + IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden(); + + indexOps.ensureIndex(index).then().as(StepVerifier::create).verifyComplete(); + + indexOps.getIndexInfo().filter(this.indexByName("my-hidden-index")).as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + assertThat(indexInfo.isHidden()).isTrue(); + }) // + .verifyComplete(); + } + + @Test // GH-4348 + void alterIndexShouldAllowHiding() { + + template.execute(collectionName, collection -> { + return collection.createIndex(new Document("a", 1), new IndexOptions().name("my-index")); + }).then().as(StepVerifier::create).verifyComplete(); - StepVerifier.create(indexOps.getIndexInfo().filter(this.indexByName("partial-with-inheritance"))) // + indexOps.alterIndex("my-index", org.springframework.data.mongodb.core.index.IndexOptions.hidden()) + .as(StepVerifier::create).verifyComplete(); + indexOps.getIndexInfo().filter(this.indexByName("my-index")).as(StepVerifier::create) // .consumeNextWith(indexInfo -> { - assertThat(indexInfo.getPartialFilterExpression()).isEqualTo("{ \"a_g_e\" : { \"$gte\" : 10 } }"); + assertThat(indexInfo.isHidden()).isTrue(); }) // .verifyComplete(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsUnitTests.java new file mode 100644 index 0000000000..e863a7df8b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperationsUnitTests.java @@ -0,0 +1,141 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.index.Index; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; + +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +public class DefaultReactiveIndexOperationsUnitTests { + + private ReactiveMongoTemplate template; + + @Mock ReactiveMongoDatabaseFactory factory; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + @Mock Publisher publisher; + + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + + @BeforeEach + void setUp() { + + when(factory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); + when(db.getCollection(any(), any(Class.class))).thenReturn(collection); + when(collection.createIndex(any(), any(IndexOptions.class))).thenReturn(publisher); + + this.mappingContext = new MongoMappingContext(); + this.converter = spy(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + this.template = new ReactiveMongoTemplate(factory, converter); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotSetCollectionIfNoDefaultDefined() { + + indexOpsFor(Jedi.class).ensureIndex(new Index("firstname", Direction.DESC)).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void ensureIndexUsesDefaultCollationIfNoneDefinedInOptions() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC)).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void ensureIndexDoesNotUseDefaultCollationIfExplicitlySpecifiedInTheIndex() { + + indexOpsFor(Sith.class).ensureIndex(new Index("firstname", Direction.DESC).collation(Collation.of("en_US"))) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(IndexOptions.class); + verify(collection).createIndex(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + private DefaultReactiveIndexOperations indexOpsFor(Class type) { + return new DefaultReactiveIndexOperations(template, template.getCollectionName(type), + new QueryMapper(template.getConverter()), type); + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "DefaultReactiveIndexOperationsUnitTests.Jedi(name=" + this.getName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + @Field("firstname") String name; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java index 905e94df78..6331e1dbc7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,15 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.junit.Assume.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -32,10 +31,13 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link DefaultScriptOperations}. @@ -44,10 +46,13 @@ * @author Oliver Gierke * @since 1.7 */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isLessThan = "4.1.0") @ContextConfiguration public class DefaultScriptOperationsTests { + static @Client MongoClient mongoClient; + @Configuration static class Config { @@ -55,14 +60,13 @@ static class Config { @Bean public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; } @Bean public MongoTemplate template() throws Exception { return new MongoTemplate(mongoClient(), DB_NAME); } - } static final String JAVASCRIPT_COLLECTION_NAME = "system.js"; @@ -74,7 +78,7 @@ public MongoTemplate template() throws Exception { @Autowired MongoTemplate template; DefaultScriptOperations scriptOps; - @Before + @BeforeEach public void setUp() { template.getCollection(JAVASCRIPT_COLLECTION_NAME).deleteMany(new Document()); @@ -83,18 +87,18 @@ public void setUp() { @Test // DATAMONGO-479 public void executeShouldDirectlyRunExecutableMongoScript() { - assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, 10), is((Object) 10D)); + assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, 10)).isEqualTo((Object) 10D); } @Test // DATAMONGO-479 public void saveShouldStoreCallableScriptCorrectly() { Query query = query(where("_id").is(SCRIPT_NAME)); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(false)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isFalse(); scriptOps.register(CALLABLE_SCRIPT); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(true)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isTrue(); } @Test // DATAMONGO-479 @@ -103,7 +107,7 @@ public void saveShouldStoreExecutableScriptCorrectly() { NamedMongoScript script = scriptOps.register(EXECUTABLE_SCRIPT); Query query = query(where("_id").is(script.getName())); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(true)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isTrue(); } @Test // DATAMONGO-479 @@ -112,11 +116,11 @@ public void executeShouldRunCallableScriptThatHasBeenSavedBefore() { scriptOps.register(CALLABLE_SCRIPT); Query query = query(where("_id").is(SCRIPT_NAME)); - assumeThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME), is(true)); + assertThat(template.exists(query, JAVASCRIPT_COLLECTION_NAME)).isTrue(); Object result = scriptOps.call(CALLABLE_SCRIPT.getName(), 10); - assertThat(result, is((Object) 10D)); + assertThat(result).isEqualTo(10D); } @Test // DATAMONGO-479 @@ -124,12 +128,12 @@ public void existsShouldReturnTrueIfScriptAvailableOnServer() { scriptOps.register(CALLABLE_SCRIPT); - assertThat(scriptOps.exists(SCRIPT_NAME), is(true)); + assertThat(scriptOps.exists(SCRIPT_NAME)).isTrue(); } @Test // DATAMONGO-479 public void existsShouldReturnFalseIfScriptNotAvailableOnServer() { - assertThat(scriptOps.exists(SCRIPT_NAME), is(false)); + assertThat(scriptOps.exists(SCRIPT_NAME)).isFalse(); } @Test // DATAMONGO-479 @@ -139,12 +143,12 @@ public void callShouldExecuteExistingScript() { Object result = scriptOps.call(SCRIPT_NAME, 10); - assertThat(result, is((Object) 10D)); + assertThat(result).isEqualTo((Object) 10D); } - @Test(expected = UncategorizedDataAccessException.class) // DATAMONGO-479 + @Test // DATAMONGO-479 public void callShouldThrowExceptionWhenCallingScriptThatDoesNotExist() { - scriptOps.call(SCRIPT_NAME, 10); + assertThatExceptionOfType(UncategorizedDataAccessException.class).isThrownBy(() -> scriptOps.call(SCRIPT_NAME, 10)); } @Test // DATAMONGO-479 @@ -152,16 +156,16 @@ public void scriptNamesShouldContainNameOfRegisteredScript() { scriptOps.register(CALLABLE_SCRIPT); - assertThat(scriptOps.getScriptNames(), hasItems("echo")); + assertThat(scriptOps.getScriptNames()).contains("echo"); } @Test // DATAMONGO-479 public void scriptNamesShouldReturnEmptySetWhenNoScriptRegistered() { - assertThat(scriptOps.getScriptNames(), is(empty())); + assertThat(scriptOps.getScriptNames()).isEmpty(); } @Test // DATAMONGO-1465 public void executeShouldNotQuoteStrings() { - assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, "spring-data"), is((Object) "spring-data")); + assertThat(scriptOps.execute(EXECUTABLE_SCRIPT, "spring-data")).isEqualTo((Object) "spring-data"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java index cfa80af676..7418f17c39 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultScriptOperationsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,16 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.core.IsNull.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; @@ -35,29 +35,29 @@ * @author Oliver Gierke * @since 1.7 */ -@RunWith(MockitoJUnitRunner.class) -public class DefaultScriptOperationsUnitTests { +@ExtendWith(MockitoExtension.class) +class DefaultScriptOperationsUnitTests { - DefaultScriptOperations scriptOps; + private DefaultScriptOperations scriptOps; @Mock MongoOperations mongoOperations; - @Before - public void setUp() { + @BeforeEach + void setUp() { this.scriptOps = new DefaultScriptOperations(mongoOperations); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void rejectsNullExecutableMongoScript() { - scriptOps.register((ExecutableMongoScript) null); + @Test // DATAMONGO-479 + void rejectsNullExecutableMongoScript() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.register((ExecutableMongoScript) null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void rejectsNullNamedMongoScript() { - scriptOps.register((NamedMongoScript) null); + @Test // DATAMONGO-479 + void rejectsNullNamedMongoScript() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.register((NamedMongoScript) null)); } @Test // DATAMONGO-479 - public void saveShouldUseCorrectCollectionName() { + void saveShouldUseCorrectCollectionName() { scriptOps.register(new NamedMongoScript("foo", "function...")); @@ -65,38 +65,38 @@ public void saveShouldUseCorrectCollectionName() { } @Test // DATAMONGO-479 - public void saveShouldGenerateScriptNameForExecutableMongoScripts() { + void saveShouldGenerateScriptNameForExecutableMongoScripts() { scriptOps.register(new ExecutableMongoScript("function...")); ArgumentCaptor captor = ArgumentCaptor.forClass(NamedMongoScript.class); verify(mongoOperations, times(1)).save(captor.capture(), eq("system.js")); - Assert.assertThat(captor.getValue().getName(), notNullValue()); + assertThat(captor.getValue().getName()).isNotNull(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void executeShouldThrowExceptionWhenScriptIsNull() { - scriptOps.execute(null); + @Test // DATAMONGO-479 + void executeShouldThrowExceptionWhenScriptIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.execute(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void existsShouldThrowExceptionWhenScriptNameIsNull() { - scriptOps.exists(null); + @Test // DATAMONGO-479 + void existsShouldThrowExceptionWhenScriptNameIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.exists(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void existsShouldThrowExceptionWhenScriptNameIsEmpty() { - scriptOps.exists(""); + @Test // DATAMONGO-479 + void existsShouldThrowExceptionWhenScriptNameIsEmpty() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.exists("")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void callShouldThrowExceptionWhenScriptNameIsNull() { - scriptOps.call(null); + @Test // DATAMONGO-479 + void callShouldThrowExceptionWhenScriptNameIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.call(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 - public void callShouldThrowExceptionWhenScriptNameIsEmpty() { - scriptOps.call(""); + @Test // DATAMONGO-479 + void callShouldThrowExceptionWhenScriptNameIsEmpty() { + assertThatIllegalArgumentException().isThrownBy(() -> scriptOps.call("")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java index 686b1e6e83..ed468f8ed2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DocumentTestUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,6 +29,7 @@ * * @author Oliver Gierke * @author Mark Paluch + * @author Jongwoo Han */ public abstract class DocumentTestUtils { @@ -38,7 +39,7 @@ private DocumentTestUtils() {} * Expects the field with the given key to be not {@literal null} and a {@link Document} in turn and returns it. * * @param source the {@link Document} to lookup the nested one - * @param key the key of the field to lokup the nested {@link Document} + * @param key the key of the field to lookup the nested {@link Document} * @return */ public static Document getAsDocument(Document source, String key) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java new file mode 100644 index 0000000000..a2197463e6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java @@ -0,0 +1,187 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; +import java.util.Map; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; + +/** + * Unit tests for {@link EntityOperations}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +class EntityOperationsUnitTests { + + ConversionService conversionService = new DefaultConversionService(); + + EntityOperations operations = new EntityOperations( + new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoTestMappingContext.newTestContext())); + + @Test // GH-3731 + void shouldReportInvalidTimeField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidTimeField.class).getCollectionOptions()) + .withMessageContaining("Time series field 'foo' does not exist"); + } + + @Test // GH-3731 + void shouldReportInvalidMetaField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidMetaField.class).getCollectionOptions()) + .withMessageContaining("Meta field 'foo' does not exist"); + } + + @Test // DATAMONGO-2293 + void populateIdShouldReturnTargetBeanWhenIdIsNull() { + assertThat(initAdaptibleEntity(new DomainTypeWithIdProperty()).populateIdIfNecessary(null)).isNotNull(); + } + + @Test // GH-4308 + void shouldExtractKeysFromEntity() { + + WithNestedDocument object = new WithNestedDocument("foo"); + + Map keys = operations.forEntity(object).extractKeys(new Document("id", 1), + WithNestedDocument.class); + + assertThat(keys).containsEntry("id", "foo"); + } + + @Test // GH-4308 + void shouldExtractKeysFromDocument() { + + Document object = new Document("id", "foo"); + + Map keys = operations.forEntity(object).extractKeys(new Document("id", 1), Document.class); + + assertThat(keys).containsEntry("id", "foo"); + } + + @Test // GH-4308 + void shouldExtractKeysFromNestedEntity() { + + WithNestedDocument object = new WithNestedDocument("foo", new WithNestedDocument("bar"), null); + + Map keys = operations.forEntity(object).extractKeys(new Document("nested.id", 1), + WithNestedDocument.class); + + assertThat(keys).containsEntry("nested.id", "bar"); + } + + @Test // GH-4308 + void shouldExtractKeysFromNestedEntityDocument() { + + WithNestedDocument object = new WithNestedDocument("foo", new WithNestedDocument("bar"), + new Document("john", "doe")); + + Map keys = operations.forEntity(object).extractKeys(new Document("document.john", 1), + WithNestedDocument.class); + + assertThat(keys).containsEntry("document.john", "doe"); + } + + @Test // GH-4308 + void shouldExtractKeysFromNestedDocument() { + + Document object = new Document("document", new Document("john", "doe")); + + Map keys = operations.forEntity(object).extractKeys(new Document("document.john", 1), + Document.class); + + assertThat(keys).containsEntry("document.john", "doe"); + } + + @Test // GH-4308 + void shouldExtractIdPropertyNameFromRawDocument() { + + Document object = new Document("_id", "id-1").append("value", "val"); + + Map keys = operations.forEntity(object).extractKeys(new Document("value", 1), DomainTypeWithIdProperty.class); + + assertThat(keys).containsEntry("id", "id-1"); + } + + @Test // GH-4308 + void shouldExtractValuesFromProxy() { + + ProjectionInterface source = new SpelAwareProxyProjectionFactory().createProjection(ProjectionInterface.class, new Document("_id", "id-1").append("value", "val")); + + Map keys = operations.forEntity(source).extractKeys(new Document("value", 1), DomainTypeWithIdProperty.class); + + assertThat(keys).isEqualTo(new Document("id", "id-1").append("value", "val")); + } + + EntityOperations.AdaptibleEntity initAdaptibleEntity(T source) { + return operations.forEntity(source, conversionService); + } + + private static class DomainTypeWithIdProperty { + + @Id String id; + String value; + } + + @TimeSeries(timeField = "foo") + static class InvalidTimeField { + + } + + @TimeSeries(timeField = "time", metaField = "foo") + static class InvalidMetaField { + Instant time; + } + + class WithNestedDocument { + + String id; + + WithNestedDocument nested; + + Document document; + + public WithNestedDocument() {} + + public WithNestedDocument(String id) { + this.id = id; + } + + public WithNestedDocument(String id, WithNestedDocument nested, Document document) { + + this.id = id; + this.nested = nested; + this.document = document; + } + } + + interface ProjectionInterface { + String getValue(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java index 47cea28238..05f0695839 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,17 +16,17 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.aggregation.Aggregation; /** @@ -34,39 +34,40 @@ * * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class ExecutableAggregationOperationSupportUnitTests { @Mock MongoTemplate template; - ExecutableAggregationOperationSupport opSupport; + private ExecutableAggregationOperationSupport opSupport; - @Before - public void setUp() { + @BeforeEach + void setUp() { opSupport = new ExecutableAggregationOperationSupport(template); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void throwsExceptionOnNullDomainType() { - opSupport.aggregateAndReturn(null); + @Test // DATAMONGO-1563 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void throwsExceptionOnNullCollectionWhenUsed() { - opSupport.aggregateAndReturn(Person.class).inCollection(null); + @Test // DATAMONGO-1563 + void throwsExceptionOnNullCollectionWhenUsed() { + assertThatIllegalArgumentException() + .isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void throwsExceptionOnEmptyCollectionWhenUsed() { - opSupport.aggregateAndReturn(Person.class).inCollection(""); + @Test // DATAMONGO-1563 + void throwsExceptionOnEmptyCollectionWhenUsed() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection("")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void throwsExceptionOnNullAggregation() { - opSupport.aggregateAndReturn(Person.class).by(null); + @Test // DATAMONGO-1563 + void throwsExceptionOnNullAggregation() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).by(null)); } @Test // DATAMONGO-1563 - public void aggregateWithUntypedAggregationAndExplicitCollection() { + void aggregateWithUntypedAggregationAndExplicitCollection() { opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all(); @@ -76,37 +77,37 @@ public void aggregateWithUntypedAggregationAndExplicitCollection() { } @Test // DATAMONGO-1563 - public void aggregateWithUntypedAggregation() { + void aggregateWithUntypedAggregation() { - when(template.determineCollectionName(any(Class.class))).thenReturn("person"); + when(template.getCollectionName(any(Class.class))).thenReturn("person"); opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).all(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); } @Test // DATAMONGO-1563 - public void aggregateWithTypeAggregation() { + void aggregateWithTypeAggregation() { - when(template.determineCollectionName(any(Class.class))).thenReturn("person"); + when(template.getCollectionName(any(Class.class))).thenReturn("person"); opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).all(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); } @Test // DATAMONGO-1563 - public void aggregateStreamWithUntypedAggregationAndExplicitCollection() { + void aggregateStreamWithUntypedAggregationAndExplicitCollection() { opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).stream(); @@ -116,30 +117,30 @@ public void aggregateStreamWithUntypedAggregationAndExplicitCollection() { } @Test // DATAMONGO-1563 - public void aggregateStreamWithUntypedAggregation() { + void aggregateStreamWithUntypedAggregation() { - when(template.determineCollectionName(any(Class.class))).thenReturn("person"); + when(template.getCollectionName(any(Class.class))).thenReturn("person"); opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).stream(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture()); assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); } @Test // DATAMONGO-1563 - public void aggregateStreamWithTypeAggregation() { + void aggregateStreamWithTypeAggregation() { - when(template.determineCollectionName(any(Class.class))).thenReturn("person"); + when(template.getCollectionName(any(Class.class))).thenReturn("person"); opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).stream(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture()); assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java index 93b5c7a0c8..eac248e69a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,19 +18,19 @@ import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; - -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; +import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; import java.util.Date; +import java.util.Objects; import java.util.stream.Stream; import org.bson.BsonString; import org.bson.BsonValue; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Value; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; @@ -40,11 +40,15 @@ import org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.NearQuery; - -import com.mongodb.MongoClient; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; /** * Integration tests for {@link ExecutableFindOperationSupport}. @@ -52,25 +56,29 @@ * @author Christoph Strobl * @author Mark Paluch */ -public class ExecutableFindOperationSupportTests { +@ExtendWith({ MongoTemplateExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class ExecutableFindOperationSupportTests implements StateFunctions { private static final String STAR_WARS = "star-wars"; private static final String STAR_WARS_PLANETS = "star-wars-universe"; - MongoTemplate template; - Person han; - Person luke; + @Template(database = "executable-find-operation-support-tests", initialEntitySet = { Person.class, Planet.class }) // + private static MongoTestTemplate template; - Planet alderan; - Planet dantooine; + private Person han; + private Person luke; - @Before - public void setUp() { + private Planet alderan; + private Planet dantooine; - template = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableFindOperationSupportTests")); - template.dropCollection(STAR_WARS); - template.dropCollection(STAR_WARS_PLANETS); + @Override + public void clear() { + template.flush(); + } + @Override + public void setupState() { template.indexOps(Planet.class).ensureIndex( new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); @@ -78,111 +86,138 @@ public void setUp() { initPlanets(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void domainTypeIsRequired() { - template.query(null); + @Test // DATAMONGO-1563 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void returnTypeIsRequiredOnSet() { - template.query(Person.class).as(null); + @Test // DATAMONGO-1563 + void returnTypeIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).as(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void collectionIsRequiredOnSet() { - template.query(Person.class).inCollection(null); + @Test // DATAMONGO-1563 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).inCollection(null)); } @Test // DATAMONGO-1563 - public void findAll() { + void findAll() { assertThat(template.query(Person.class).all()).containsExactlyInAnyOrder(han, luke); } @Test // DATAMONGO-1563 - public void findAllWithCollection() { + void findAllWithCollection() { assertThat(template.query(Human.class).inCollection(STAR_WARS).all()).hasSize(2); } @Test // DATAMONGO-1563 - public void findAllWithProjection() { + void findAllWithProjection() { assertThat(template.query(Person.class).as(Jedi.class).all()).hasOnlyElementsOfType(Jedi.class).hasSize(2); } + @Test // DATAMONGO-2041 + @DirtiesState + void findAllWithProjectionOnEmbeddedType() { + + luke.father = new Person(); + luke.father.firstname = "anakin"; + + template.save(luke); + + assertThat(template.query(Person.class).as(PersonDtoProjection.class).matching(query(where("id").is(luke.id))) + .firstValue()).hasFieldOrPropertyWithValue("father", luke.father); + } + @Test // DATAMONGO-1733 - public void findByReturningAllValuesAsClosedInterfaceProjection() { + void findByReturningAllValuesAsClosedInterfaceProjection() { assertThat(template.query(Person.class).as(PersonProjection.class).all()) .hasOnlyElementsOfTypes(PersonProjection.class); } @Test // DATAMONGO-1563 - public void findAllBy() { + void findAllBy() { assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).all()) .containsExactlyInAnyOrder(luke); } @Test // DATAMONGO-1563 - public void findAllByWithCollectionUsingMappingInformation() { + void findAllByWithCollectionUsingMappingInformation() { assertThat(template.query(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all()) .hasSize(1).hasOnlyElementsOfType(Jedi.class); } @Test // DATAMONGO-1563 - public void findAllByWithCollection() { + void findAllByWithCollection() { assertThat(template.query(Human.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all()) .hasSize(1); } + @Test // DATAMONGO-2323 + void findAllAsDocument() { + assertThat( + template.query(Document.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all()) + .hasSize(1); + } + @Test // DATAMONGO-1563 - public void findAllByWithProjection() { + void findAllByWithProjection() { assertThat(template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).all()) .hasOnlyElementsOfType(Jedi.class).hasSize(1); } @Test // DATAMONGO-1563 - public void findBy() { + void findBy() { assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).one()).contains(luke); } + @Test // DATAMONGO-2416 + void findByCriteria() { + assertThat(template.query(Person.class).matching(where("firstname").is("luke")).one()).contains(luke); + } + @Test // DATAMONGO-1563 - public void findByNoMatch() { + void findByNoMatch() { assertThat(template.query(Person.class).matching(query(where("firstname").is("spock"))).one()).isEmpty(); } - @Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1563 - public void findByTooManyResults() { - template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one(); + @Test // DATAMONGO-1563 + void findByTooManyResults() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one()); } @Test // DATAMONGO-1726 - public void findByReturningOneValue() { + void findByReturningOneValue() { assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).oneValue()).isEqualTo(luke); } - @Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1726 - public void findByReturningOneValueButTooManyResults() { - template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).oneValue(); + @Test // DATAMONGO-1726 + void findByReturningOneValueButTooManyResults() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class).isThrownBy( + () -> template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).oneValue()); } @Test // DATAMONGO-1726 - public void findByReturningFirstValue() { + void findByReturningFirstValue() { assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).firstValue()) .isEqualTo(luke); } @Test // DATAMONGO-1726 - public void findByReturningFirstValueForManyResults() { + void findByReturningFirstValueForManyResults() { assertThat(template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).firstValue()) .isIn(han, luke); } @Test // DATAMONGO-1733 - public void findByReturningFirstValueAsClosedInterfaceProjection() { + void findByReturningFirstValueAsClosedInterfaceProjection() { PersonProjection result = template.query(Person.class).as(PersonProjection.class) .matching(query(where("firstname").is("han"))).firstValue(); @@ -192,7 +227,7 @@ public void findByReturningFirstValueAsClosedInterfaceProjection() { } @Test // DATAMONGO-1733 - public void findByReturningFirstValueAsOpenInterfaceProjection() { + void findByReturningFirstValueAsOpenInterfaceProjection() { PersonSpELProjection result = template.query(Person.class).as(PersonSpELProjection.class) .matching(query(where("firstname").is("han"))).firstValue(); @@ -202,7 +237,7 @@ public void findByReturningFirstValueAsOpenInterfaceProjection() { } @Test // DATAMONGO-1563 - public void streamAll() { + void streamAll() { try (Stream stream = template.query(Person.class).stream()) { assertThat(stream).containsExactlyInAnyOrder(han, luke); @@ -210,7 +245,7 @@ public void streamAll() { } @Test // DATAMONGO-1563 - public void streamAllWithCollection() { + void streamAllWithCollection() { try (Stream stream = template.query(Human.class).inCollection(STAR_WARS).stream()) { assertThat(stream).hasSize(2); @@ -218,7 +253,7 @@ public void streamAllWithCollection() { } @Test // DATAMONGO-1563 - public void streamAllWithProjection() { + void streamAllWithProjection() { try (Stream stream = template.query(Person.class).as(Jedi.class).stream()) { assertThat(stream).hasOnlyElementsOfType(Jedi.class).hasSize(2); @@ -226,7 +261,7 @@ public void streamAllWithProjection() { } @Test // DATAMONGO-1733 - public void streamAllReturningResultsAsClosedInterfaceProjection() { + void streamAllReturningResultsAsClosedInterfaceProjection() { TerminatingFind operation = template.query(Person.class).as(PersonProjection.class); @@ -239,7 +274,7 @@ public void streamAllReturningResultsAsClosedInterfaceProjection() { } @Test // DATAMONGO-1733 - public void streamAllReturningResultsAsOpenInterfaceProjection() { + void streamAllReturningResultsAsOpenInterfaceProjection() { TerminatingFind operation = template.query(Person.class).as(PersonSpELProjection.class); @@ -252,7 +287,7 @@ public void streamAllReturningResultsAsOpenInterfaceProjection() { } @Test // DATAMONGO-1563 - public void streamAllBy() { + void streamAllBy() { try (Stream stream = template.query(Person.class).matching(query(where("firstname").is("luke"))).stream()) { assertThat(stream).containsExactlyInAnyOrder(luke); @@ -260,7 +295,7 @@ public void streamAllBy() { } @Test // DATAMONGO-1563 - public void findAllNearBy() { + void findAllNearBy() { GeoResults results = template.query(Planet.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)) .all(); @@ -269,7 +304,7 @@ public void findAllNearBy() { } @Test // DATAMONGO-1563 - public void findAllNearByWithCollectionAndProjection() { + void findAllNearByWithCollectionAndProjection() { GeoResults results = template.query(Object.class).inCollection(STAR_WARS_PLANETS).as(Human.class) .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all(); @@ -281,7 +316,7 @@ public void findAllNearByWithCollectionAndProjection() { } @Test // DATAMONGO-1733 - public void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { + void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { GeoResults results = template.query(Planet.class).as(PlanetProjection.class) .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all(); @@ -294,7 +329,7 @@ public void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() } @Test // DATAMONGO-1733 - public void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { + void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { GeoResults results = template.query(Planet.class).as(PlanetSpELProjection.class) .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all(); @@ -307,29 +342,30 @@ public void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { } @Test // DATAMONGO-1728 - public void firstShouldReturnFirstEntryInCollection() { + void firstShouldReturnFirstEntryInCollection() { assertThat(template.query(Person.class).first()).isNotEmpty(); } @Test // DATAMONGO-1734 - public void countShouldReturnNrOfElementsInCollectionWhenNoQueryPresent() { + void countShouldReturnNrOfElementsInCollectionWhenNoQueryPresent() { assertThat(template.query(Person.class).count()).isEqualTo(2); } @Test // DATAMONGO-1734 - public void countShouldReturnNrOfElementsMatchingQuery() { + void countShouldReturnNrOfElementsMatchingQuery() { assertThat(template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).count()) .isEqualTo(1); } @Test // DATAMONGO-1734 - public void existsShouldReturnTrueIfAtLeastOneElementExistsInCollection() { + void existsShouldReturnTrueIfAtLeastOneElementExistsInCollection() { assertThat(template.query(Person.class).exists()).isTrue(); } @Test // DATAMONGO-1734 - public void existsShouldReturnFalseIfNoElementExistsInCollection() { + @DirtiesState + void existsShouldReturnFalseIfNoElementExistsInCollection() { template.remove(new BasicQuery("{}"), STAR_WARS); @@ -337,29 +373,29 @@ public void existsShouldReturnFalseIfNoElementExistsInCollection() { } @Test // DATAMONGO-1734 - public void existsShouldReturnTrueIfAtLeastOneElementMatchesQuery() { + void existsShouldReturnTrueIfAtLeastOneElementMatchesQuery() { assertThat(template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).exists()) .isTrue(); } @Test // DATAMONGO-1734 - public void existsShouldReturnFalseWhenNoElementMatchesQuery() { + void existsShouldReturnFalseWhenNoElementMatchesQuery() { assertThat(template.query(Person.class).matching(query(where("firstname").is("spock"))).exists()).isFalse(); } @Test // DATAMONGO-1734 - public void returnsTargetObjectDirectlyIfProjectionInterfaceIsImplemented() { + void returnsTargetObjectDirectlyIfProjectionInterfaceIsImplemented() { assertThat(template.query(Person.class).as(Contact.class).all()).allMatch(it -> it instanceof Person); } @Test // DATAMONGO-1761 - public void distinctReturnsEmptyListIfNoMatchFound() { + void distinctReturnsEmptyListIfNoMatchFound() { assertThat(template.query(Person.class).distinct("actually-not-property-in-use").as(String.class).all()).isEmpty(); } @Test // DATAMONGO-1761 - public void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTypeSpecifiedThatCanBeConvertedDirectlyByACodec() { + void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTypeSpecifiedThatCanBeConvertedDirectlyByACodec() { Person anakin = new Person(); anakin.firstname = "anakin"; @@ -372,7 +408,7 @@ public void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTy } @Test // DATAMONGO-1761 - public void distinctReturnsSimpleFieldValuesCorrectly() { + void distinctReturnsSimpleFieldValuesCorrectly() { Person anakin = new Person(); anakin.firstname = "anakin"; @@ -395,7 +431,7 @@ public void distinctReturnsSimpleFieldValuesCorrectly() { } @Test // DATAMONGO-1761 - public void distinctReturnsComplexValuesCorrectly() { + void distinctReturnsComplexValuesCorrectly() { Sith sith = new Sith(); sith.rank = "lord"; @@ -410,7 +446,7 @@ public void distinctReturnsComplexValuesCorrectly() { } @Test // DATAMONGO-1761 - public void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { + void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { Sith sith = new Sith(); sith.rank = "lord"; @@ -426,7 +462,7 @@ public void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { } @Test // DATAMONGO-1761 - public void distinctReturnsComplexValuesCorrectlyHavingReturnTypeDocumentSpecified() { + void distinctReturnsComplexValuesCorrectlyHavingReturnTypeDocumentSpecified() { Sith sith = new Sith(); sith.rank = "lord"; @@ -442,21 +478,22 @@ public void distinctReturnsComplexValuesCorrectlyHavingReturnTypeDocumentSpecifi } @Test // DATAMONGO-1761 - public void distinctMapsFieldNameCorrectly() { + void distinctMapsFieldNameCorrectly() { assertThat(template.query(Jedi.class).inCollection(STAR_WARS).distinct("name").as(String.class).all()) .containsExactlyInAnyOrder("han", "luke"); } @Test // DATAMONGO-1761 - public void distinctReturnsRawValuesIfReturnTypeIsBsonValue() { + void distinctReturnsRawValuesIfReturnTypeIsBsonValue() { assertThat(template.query(Person.class).distinct("lastname").as(BsonValue.class).all()) .containsExactlyInAnyOrder(new BsonString("solo"), new BsonString("skywalker")); } @Test // DATAMONGO-1761 - public void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefinedByTheDomainType() { + @DirtiesState + void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefinedByTheDomainType() { template.save(new Document("darth", "vader"), STAR_WARS); @@ -464,7 +501,8 @@ public void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefin } @Test // DATAMONGO-1761 - public void distinctReturnsMappedDomainTypeForProjections() { + @DirtiesState + void distinctReturnsMappedDomainTypeForProjections() { luke.father = new Person(); luke.father.firstname = "anakin"; @@ -476,7 +514,8 @@ public void distinctReturnsMappedDomainTypeForProjections() { } @Test // DATAMONGO-1761 - public void distinctAlllowsQueryUsingObjectSourceType() { + @DirtiesState + void distinctAlllowsQueryUsingObjectSourceType() { luke.father = new Person(); luke.father.firstname = "anakin"; @@ -488,7 +527,8 @@ public void distinctAlllowsQueryUsingObjectSourceType() { } @Test // DATAMONGO-1761 - public void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTypePresent() { + @DirtiesState + void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTypePresent() { luke.father = new Person(); luke.father.firstname = "anakin"; @@ -501,14 +541,92 @@ public void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTy assertThat(template.query(Person.class).distinct("father").all()).containsExactlyInAnyOrder(expected); } - @Test(expected = InvalidDataAccessApiUsageException.class) // DATAMONGO-1761 - public void distinctThrowsExceptionWhenExplicitMappingTypeCannotBeApplied() { - template.query(Person.class).distinct("firstname").as(Long.class).all(); + @Test // DATAMONGO-1761 + void distinctThrowsExceptionWhenExplicitMappingTypeCannotBeApplied() { + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> template.query(Person.class).distinct("firstname").as(Long.class).all()); + } + + @Test // DATAMONGO-2507 + void distinctAppliesFilterQuery() { + + assertThat(template.query(Person.class).inCollection(STAR_WARS).distinct("firstname") // + .matching(where("lastname").is(luke.lastname)) // + .as(String.class) // + .all() // + ).containsExactlyInAnyOrder("luke"); + } + + @Test // GH-2860 + void projectionOnDbRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDbRef = alderan; + + template.save(source); + + WithDbRefProjection target = template.query(WithRefs.class).as(WithDbRefProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDbRef()).isEqualTo(alderan); + } + + @Test // GH-2860 + @Disabled("GH-3913") + @DirtiesState + void propertyProjectionOnDbRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDbRef = alderan; + + template.save(source); + + WithDbRefPropertyProjection target = template.query(WithRefs.class).as(WithDbRefPropertyProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDbRef().getName()).isEqualTo(alderan.getName()); + } + + @Test // GH-2860 + @DirtiesState + void projectionOnDocRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDocRef = alderan; + + template.save(source); + + WithDocumentRefProjection target = template.query(WithRefs.class).as(WithDocumentRefProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDocRef()).isEqualTo(alderan); + } + + @Test // GH-2860 + @DirtiesState + void propertyProjectionOnDocRef() { + + WithRefs source = new WithRefs(); + source.id = "id-1"; + source.noRef = "value"; + source.planetDocRef = alderan; + + template.save(source); + + WithDocRefPropertyProjection target = template.query(WithRefs.class).as(WithDocRefPropertyProjection.class) + .matching(where("id").is(source.id)).oneValue(); + + assertThat(target.getPlanetDocRef().getName()).isEqualTo(alderan.getName()); } interface Contact {} - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person implements Contact { @@ -517,6 +635,72 @@ static class Person implements Contact { String lastname; Object ability; Person father; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(ability, person.ability) + && Objects.equals(father, person.father); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, ability, father); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + this.getFather() + + ")"; + } } interface PersonProjection { @@ -529,32 +713,166 @@ public interface PersonSpELProjection { String getName(); } - @Data + // TODO: Without getters/setters, not identified as projection/properties + static class PersonDtoProjection { + + @Field("firstname") String name; + Person father; + + public String getName() { + return this.name; + } + + public Person getFather() { + return this.father; + } + + public void setName(String name) { + this.name = name; + } + + public void setFather(Person father) { + this.father = father; + } + } + static class Human { + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Human(id=" + this.getId() + ")"; + } } - @Data - @AllArgsConstructor - @NoArgsConstructor static class Jedi { @Field("firstname") String name; + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } } - @Data static class Sith { String rank; + + public String getRank() { + return this.rank; + } + + public void setRank(String rank) { + this.rank = rank; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sith sith = (Sith) o; + return Objects.equals(rank, sith.rank); + } + + @Override + public int hashCode() { + return Objects.hash(rank); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Sith(rank=" + this.getRank() + ")"; + } } - @Data - @AllArgsConstructor @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS_PLANETS) static class Planet { @Id String name; Point coordinates; + + public Planet(String name, Point coordinates) { + this.name = name; + this.coordinates = coordinates; + } + + public String getName() { + return this.name; + } + + public Point getCoordinates() { + return this.coordinates; + } + + public void setName(String name) { + this.name = name; + } + + public void setCoordinates(Point coordinates) { + this.coordinates = coordinates; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Planet planet = (Planet) o; + return Objects.equals(name, planet.name) && Objects.equals(coordinates, planet.coordinates); + } + + @Override + public int hashCode() { + return Objects.hash(name, coordinates); + } + + public String toString() { + return "ExecutableFindOperationSupportTests.Planet(name=" + this.getName() + ", coordinates=" + + this.getCoordinates() + ")"; + } } interface PlanetProjection { @@ -567,6 +885,70 @@ interface PlanetSpELProjection { String getId(); } + static class WithRefs { + + @Id String id; + + String noRef; + + @DBRef Planet planetDbRef; + + @DocumentReference Planet planetDocRef; + + public String getId() { + return this.id; + } + + public String getNoRef() { + return this.noRef; + } + + public Planet getPlanetDbRef() { + return this.planetDbRef; + } + + public Planet getPlanetDocRef() { + return this.planetDocRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setNoRef(String noRef) { + this.noRef = noRef; + } + + public void setPlanetDbRef(Planet planetDbRef) { + this.planetDbRef = planetDbRef; + } + + public void setPlanetDocRef(Planet planetDocRef) { + this.planetDocRef = planetDocRef; + } + + public String toString() { + return "ExecutableFindOperationSupportTests.WithRefs(id=" + this.getId() + ", noRef=" + this.getNoRef() + + ", planetDbRef=" + this.getPlanetDbRef() + ", planetDocRef=" + this.getPlanetDocRef() + ")"; + } + } + + interface WithDbRefProjection { + Planet getPlanetDbRef(); + } + + interface WithDocumentRefProjection { + Planet getPlanetDocRef(); + } + + interface WithDbRefPropertyProjection { + PlanetProjection getPlanetDbRef(); + } + + interface WithDocRefPropertyProjection { + PlanetProjection getPlanetDocRef(); + } + private void initPersons() { han = new Person(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java index d4f981528f..d5e5d603c0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupportUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,21 +16,17 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.anyList; - -import lombok.Data; import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; @@ -40,7 +36,7 @@ * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class ExecutableInsertOperationSupportUnitTests { private static final String STAR_WARS = "star-wars"; @@ -48,16 +44,12 @@ public class ExecutableInsertOperationSupportUnitTests { @Mock MongoTemplate template; @Mock BulkOperations bulkOperations; - ExecutableInsertOperationSupport ops; - - Person luke, han; + private ExecutableInsertOperationSupport ops; - @Before - public void setUp() { + private Person luke, han; - when(template.bulkOps(any(), any(), any())).thenReturn(bulkOperations); - when(template.determineCollectionName(any(Class.class))).thenReturn(STAR_WARS); - when(bulkOperations.insert(anyList())).thenReturn(bulkOperations); + @BeforeEach + void setUp() { ops = new ExecutableInsertOperationSupport(template); @@ -70,78 +62,110 @@ public void setUp() { han.id = "id-2"; } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void nullCollectionShouldThrowException() { - ops.insert(Person.class).inCollection(null); - + @Test // DATAMONGO-1563 + void nullCollectionShouldThrowException() { + assertThatIllegalArgumentException().isThrownBy(() -> ops.insert(Person.class).inCollection(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void nullBulkModeShouldThrowException() { - ops.insert(Person.class).withBulkMode(null); + @Test // DATAMONGO-1563 + void nullBulkModeShouldThrowException() { + assertThatIllegalArgumentException().isThrownBy(() -> ops.insert(Person.class).withBulkMode(null)); } @Test // DATAMONGO-1563 - public void insertShouldUseDerivedCollectionName() { + void insertShouldUseDerivedCollectionName() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); ops.insert(Person.class).one(luke); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).insert(eq(luke), eq(STAR_WARS)); assertThat(captor.getAllValues()).containsExactly(Person.class); } @Test // DATAMONGO-1563 - public void insertShouldUseExplicitCollectionName() { + void insertShouldUseExplicitCollectionName() { ops.insert(Person.class).inCollection(STAR_WARS).one(luke); - verify(template, never()).determineCollectionName(any(Class.class)); + verify(template, never()).getCollectionName(any(Class.class)); verify(template).insert(eq(luke), eq(STAR_WARS)); } @Test // DATAMONGO-1563 - public void insertCollectionShouldDelegateCorrectly() { + void insertCollectionShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); ops.insert(Person.class).all(Arrays.asList(luke, han)); - verify(template).determineCollectionName(any(Class.class)); + verify(template).getCollectionName(any(Class.class)); verify(template).insert(anyList(), eq(STAR_WARS)); } @Test // DATAMONGO-1563 - public void bulkInsertCollectionShouldDelegateCorrectly() { + void bulkInsertCollectionShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + when(template.bulkOps(any(), any(), any())).thenReturn(bulkOperations); + when(bulkOperations.insert(anyList())).thenReturn(bulkOperations); ops.insert(Person.class).bulk(Arrays.asList(luke, han)); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(any(Class.class)); + verify(template).getCollectionName(any(Class.class)); verify(template).bulkOps(eq(BulkMode.ORDERED), captor.capture(), eq(STAR_WARS)); verify(bulkOperations).insert(anyList()); verify(bulkOperations).execute(); } @Test // DATAMONGO-1563 - public void bulkInsertWithBulkModeShouldDelegateCorrectly() { + void bulkInsertWithBulkModeShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); + when(template.bulkOps(any(), any(), any())).thenReturn(bulkOperations); + when(bulkOperations.insert(anyList())).thenReturn(bulkOperations); ops.insert(Person.class).withBulkMode(BulkMode.UNORDERED).bulk(Arrays.asList(luke, han)); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(any(Class.class)); + verify(template).getCollectionName(any(Class.class)); verify(template).bulkOps(eq(BulkMode.UNORDERED), captor.capture(), eq(STAR_WARS)); verify(bulkOperations).insert(anyList()); verify(bulkOperations).execute(); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person { + @Id String id; String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public String toString() { + return "ExecutableInsertOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupportUnitTests.java new file mode 100644 index 0000000000..167852f723 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupportUnitTests.java @@ -0,0 +1,212 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link ExecutableMapReduceOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Beyond the Shadows - Brent Weeks + */ +@ExtendWith(MockitoExtension.class) +class ExecutableMapReduceOperationSupportUnitTests { + + private static final String STAR_WARS = "star-wars"; + private static final String MAP_FUNCTION = "function() { emit(this.id, this.firstname) }"; + private static final String REDUCE_FUNCTION = "function(id, name) { return sum(id, name); }"; + + @Mock MongoTemplate template; + + private ExecutableMapReduceOperationSupport mapReduceOpsSupport; + + @BeforeEach + void setUp() { + mapReduceOpsSupport = new ExecutableMapReduceOperationSupport(template); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullTemplate() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMapReduceOperationSupport(null)); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> mapReduceOpsSupport.mapReduce(null)); + } + + @Test // DATAMONGO-1929 + void usesExtractedCollectionName() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesExplicitCollectionName() { + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .inCollection("the-night-angel").all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq("the-night-angel"), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesMapReduceOptionsWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + MapReduceOptions options = MapReduceOptions.options(); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).with(options).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + eq(options), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesQueryWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + Query query = new BasicQuery("{ 'lastname' : 'skywalker' }"); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).matching(query).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-2416 + void usesCriteriaWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + Query query = Query.query(where("lastname").is("skywalker")); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .matching(where("lastname").is("skywalker")).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Person.class)); + } + + @Test // DATAMONGO-1929 + void usesProjectionWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).as(Jedi.class).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), + isNull(), eq(Jedi.class)); + } + + interface Contact {} + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person implements Contact { + + @Id String id; + String firstname; + String lastname; + Object ability; + Person father; + + public Person() {} + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + public String toString() { + return "ExecutableMapReduceOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + + this.getFather() + ")"; + } + } + + static class Jedi { + + @Field("firstname") // + String name; + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ExecutableMapReduceOperationSupportUnitTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java index 26f42b32ea..621e2a0764 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,16 +19,18 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - import java.util.List; +import java.util.Objects; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; -import com.mongodb.MongoClient; import com.mongodb.client.result.DeleteResult; /** @@ -37,19 +39,21 @@ * @author Christoph Strobl * @author Mark Paluch */ -public class ExecutableRemoveOperationSupportTests { +@ExtendWith(MongoTemplateExtension.class) +class ExecutableRemoveOperationSupportTests { private static final String STAR_WARS = "star-wars"; - MongoTemplate template; - Person han; - Person luke; + @Template(initialEntitySet = Person.class) // + private static MongoTestTemplate template; + + private Person han; + private Person luke; - @Before - public void setUp() { + @BeforeEach + void setUp() { - template = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableRemoveOperationSupportTests")); - template.dropCollection(STAR_WARS); + template.flush(); han = new Person(); han.firstname = "han"; @@ -64,7 +68,7 @@ public void setUp() { } @Test // DATAMONGO-1563 - public void removeAll() { + void removeAll() { DeleteResult result = template.remove(Person.class).all(); @@ -72,15 +76,23 @@ public void removeAll() { } @Test // DATAMONGO-1563 - public void removeAllMatching() { + void removeAllMatching() { DeleteResult result = template.remove(Person.class).matching(query(where("firstname").is("han"))).all(); assertThat(result.getDeletedCount()).isEqualTo(1L); } + @Test // DATAMONGO-2416 + void removeAllMatchingCriteria() { + + DeleteResult result = template.remove(Person.class).matching(where("firstname").is("han")).all(); + + assertThat(result.getDeletedCount()).isEqualTo(1L); + } + @Test // DATAMONGO-1563 - public void removeAllMatchingWithAlternateDomainTypeAndCollection() { + void removeAllMatchingWithAlternateDomainTypeAndCollection() { DeleteResult result = template.remove(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))) .all(); @@ -89,23 +101,74 @@ public void removeAllMatchingWithAlternateDomainTypeAndCollection() { } @Test // DATAMONGO-1563 - public void removeAndReturnAllMatching() { + void removeAndReturnAllMatching() { List result = template.remove(Person.class).matching(query(where("firstname").is("han"))).findAndRemove(); assertThat(result).containsExactly(han); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person { + @Id String id; String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ExecutableRemoveOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } } - @Data static class Jedi { - @Field("firstname") String name; + @Field("firstname") // + String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ExecutableRemoveOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java index b6abc1816f..e7f50dab53 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,19 +19,21 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - +import java.util.Objects; import java.util.Optional; import org.bson.BsonString; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; -import com.mongodb.MongoClient; import com.mongodb.client.result.UpdateResult; /** @@ -40,19 +42,21 @@ * @author Christoph Strobl * @author Mark Paluch */ -public class ExecutableUpdateOperationSupportTests { +@ExtendWith(MongoTemplateExtension.class) +class ExecutableUpdateOperationSupportTests { private static final String STAR_WARS = "star-wars"; - MongoTemplate template; - Person han; - Person luke; + @Template(initialEntitySet = { Human.class, Jedi.class, Person.class }) // + private static MongoTestTemplate template; + + private Person han; + private Person luke; - @Before - public void setUp() { + @BeforeEach + void setUp() { - template = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableUpdateOperationSupportTests")); - template.dropCollection(STAR_WARS); + template.remove(Person.class).all(); han = new Person(); han.firstname = "han"; @@ -66,28 +70,29 @@ public void setUp() { template.save(luke); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void domainTypeIsRequired() { - template.update(null); + @Test // DATAMONGO-1563 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void updateIsRequired() { - template.update(Person.class).apply(null); + @Test // DATAMONGO-1563 + void updateIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).apply(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void collectionIsRequiredOnSet() { - template.update(Person.class).inCollection(null); + @Test // DATAMONGO-1563 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).inCollection(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1563 - public void findAndModifyOptionsAreRequiredOnSet() { - template.update(Person.class).apply(new Update()).withOptions(null); + @Test // DATAMONGO-1563 + void findAndModifyOptionsAreRequiredOnSet() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.update(Person.class).apply(new Update()).withOptions(null)); } @Test // DATAMONGO-1563 - public void updateFirst() { + void updateFirst() { UpdateResult result = template.update(Person.class).apply(new Update().set("firstname", "Han")).first(); @@ -96,7 +101,7 @@ public void updateFirst() { } @Test // DATAMONGO-1563 - public void updateAll() { + void updateAll() { UpdateResult result = template.update(Person.class).apply(new Update().set("firstname", "Han")).all(); @@ -105,7 +110,7 @@ public void updateAll() { } @Test // DATAMONGO-1563 - public void updateAllMatching() { + void updateAllMatching() { UpdateResult result = template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) .all(); @@ -114,8 +119,19 @@ public void updateAllMatching() { assertThat(result.getUpsertedId()).isNull(); } + @Test // DATAMONGO-2416 + void updateAllMatchingCriteria() { + + UpdateResult result = template.update(Person.class).matching(where("id").is(han.getId())) + .apply(new Update().set("firstname", "Han")) + .all(); + + assertThat(result.getModifiedCount()).isEqualTo(1L); + assertThat(result.getUpsertedId()).isNull(); + } + @Test // DATAMONGO-1563 - public void updateWithDifferentDomainClassAndCollection() { + void updateWithDifferentDomainClassAndCollection() { UpdateResult result = template.update(Jedi.class).inCollection(STAR_WARS) .matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).all(); @@ -127,7 +143,7 @@ public void updateWithDifferentDomainClassAndCollection() { } @Test // DATAMONGO-1719 - public void findAndModifyValue() { + void findAndModifyValue() { Person result = template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) .findAndModifyValue(); @@ -138,7 +154,7 @@ public void findAndModifyValue() { } @Test // DATAMONGO-1563 - public void findAndModify() { + void findAndModify() { Optional result = template.update(Person.class).matching(queryHan()) .apply(new Update().set("firstname", "Han")).findAndModify(); @@ -149,7 +165,7 @@ public void findAndModify() { } @Test // DATAMONGO-1563 - public void findAndModifyWithDifferentDomainTypeAndCollection() { + void findAndModifyWithDifferentDomainTypeAndCollection() { Optional result = template.update(Jedi.class).inCollection(STAR_WARS) .matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).findAndModify(); @@ -160,7 +176,7 @@ public void findAndModifyWithDifferentDomainTypeAndCollection() { } @Test // DATAMONGO-1563 - public void findAndModifyWithOptions() { + void findAndModifyWithOptions() { Optional result = template.update(Person.class).matching(queryHan()) .apply(new Update().set("firstname", "Han")).withOptions(FindAndModifyOptions.options().returnNew(true)) @@ -170,7 +186,7 @@ public void findAndModifyWithOptions() { } @Test // DATAMONGO-1563 - public void upsert() { + void upsert() { UpdateResult result = template.update(Person.class).matching(query(where("id").is("id-3"))) .apply(new Update().set("firstname", "Chewbacca")).upsert(); @@ -179,25 +195,174 @@ public void upsert() { assertThat(result.getUpsertedId()).isEqualTo(new BsonString("id-3")); } + @Test // DATAMONGO-1827 + void findAndReplaceValue() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Person result = template.update(Person.class).matching(queryHan()).replaceWith(luke).findAndReplaceValue(); + + assertThat(result).isEqualTo(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Luke"); + } + + @Test // DATAMONGO-1827 + void findAndReplace() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Optional result = template.update(Person.class).matching(queryHan()).replaceWith(luke).findAndReplace(); + + assertThat(result).contains(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Luke"); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithCollection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Optional result = template.update(Person.class).inCollection(STAR_WARS).matching(queryHan()) + .replaceWith(luke).findAndReplace(); + + assertThat(result).contains(han); + assertThat(template.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", + "Luke"); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithOptions() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Person result = template.update(Person.class).matching(queryHan()).replaceWith(luke) + .withOptions(FindAndReplaceOptions.options().returnNew()).findAndReplaceValue(); + + assertThat(result).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + } + + @Test // GH-4463 + void replace() { + + Person luke = new Person(); + luke.id = han.id; + luke.firstname = "Luke"; + + UpdateResult result = template.update(Person.class).matching(queryHan()).replaceWith(luke).replaceFirst(); + assertThat(result.getModifiedCount()).isEqualTo(1L); + } + + @Test // GH-4463 + void replaceWithOptions() { + + Person luke = new Person(); + luke.id = "upserted-luke"; + luke.firstname = "Luke"; + + UpdateResult result = template.update(Person.class).matching(query(where("firstname") + .is("c3p0"))).replaceWith(luke).withOptions(ReplaceOptions.replaceOptions().upsert()).replaceFirst(); + assertThat(result.getUpsertedId()).isEqualTo(new BsonString("upserted-luke")); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithProjection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + Jedi result = template.update(Person.class).matching(queryHan()).replaceWith(luke).as(Jedi.class) + .findAndReplaceValue(); + + assertThat(result.getName()).isEqualTo(han.firstname); + } + private Query queryHan() { return query(where("id").is(han.getId())); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person { + @Id String id; String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ExecutableUpdateOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } } - @Data static class Human { + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ExecutableUpdateOperationSupportTests.Human(id=" + this.getId() + ")"; + } } - @Data static class Jedi { - @Field("firstname") String name; + @Field("firstname") // + String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ExecutableUpdateOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java index 6f02afd81a..59938113fd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Friend.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java index 48d1adfe1b..c9f979d2d0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/GeoCommandStatisticsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,33 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link GeoCommandStatistics}. * * @author Oliver Gierke + * @author Mark Paluch * @soundtrack Fruitcake - Jeff Coffin (The Inside of the Outside) */ public class GeoCommandStatisticsUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1361 + @Test // DATAMONGO-1361 public void rejectsNullCommandResult() { - GeoCommandStatistics.from(null); + assertThatIllegalArgumentException().isThrownBy(() -> GeoCommandStatistics.from(null)); } @Test // DATAMONGO-1361 public void fallsBackToNanIfNoAverageDistanceIsAvailable() { GeoCommandStatistics statistics = GeoCommandStatistics.from(new Document("stats", null)); - assertThat(statistics.getAverageDistance(), is(Double.NaN)); + assertThat(statistics.getAverageDistance()).isNaN(); statistics = GeoCommandStatistics.from(new Document("stats", new Document())); - assertThat(statistics.getAverageDistance(), is(Double.NaN)); + assertThat(statistics.getAverageDistance()).isNaN(); } @Test // DATAMONGO-1361 @@ -50,6 +50,6 @@ public void returnsAverageDistanceIfPresent() { GeoCommandStatistics statistics = GeoCommandStatistics .from(new Document("stats", new Document("avgDistance", 1.5))); - assertThat(statistics.getAverageDistance(), is(1.5)); + assertThat(statistics.getAverageDistance()).isEqualTo(1.5); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java index 8e4a17fe5a..004bda1544 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,7 +22,9 @@ * * @author Mark Pollack * @author Oliver Gierke + * @deprecated since 4.5. */ +@Deprecated(since = "4.5", forRemoval = true) public class JmxServer { public static void main(String[] args) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java index b7eef7e29f..3afcef93d0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JsonSchemaQueryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,37 +20,44 @@ import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; -import lombok.Data; import reactor.test.StepVerifier; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; - -import com.mongodb.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; /** * @author Christoph Strobl + * @author Mark Paluch */ +@ExtendWith(MongoTemplateExtension.class) public class JsonSchemaQueryTests { public static final String DATABASE_NAME = "json-schema-query-tests"; - public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_6_0 = MongoVersionRule.atLeast(Version.parse("3.6.0")); + static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + @Template(database = DATABASE_NAME, initialEntitySet = Person.class) // + static MongoTestTemplate template; - MongoTemplate template; Person jellyBelly, roseSpringHeart, kazmardBoombub; - @Before + @BeforeEach public void setUp() { - template = new MongoTemplate(new MongoClient(), DATABASE_NAME); + template.flush(); jellyBelly = new Person(); jellyBelly.id = "1"; @@ -79,6 +86,28 @@ public void setUp() { template.save(jellyBelly); template.save(roseSpringHeart); template.save(kazmardBoombub); + + } + + @Test // DATAMONGO-1835 + public void createsWorkingSchema() { + + try { + template.dropCollection("person_schema"); + } catch (Exception e) {} + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(template.getConverter()).createSchemaFor(Person.class); + + template.createCollection("person_schema", CollectionOptions.empty().schema(schema)); + } + + @Test // DATAMONGO-1835 + public void queriesBooleanType() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties(JsonSchemaProperty.bool("alive")).build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Person.class)).hasSize(3); + assertThat(template.find(query(Criteria.where("alive").type(Type.BOOLEAN)), Person.class)).hasSize(3); } @Test // DATAMONGO-1835 @@ -95,8 +124,9 @@ public void findsDocumentsWithRequiredFieldsReactively() { MongoJsonSchema schema = MongoJsonSchema.builder().required("address").build(); - StepVerifier.create(new ReactiveMongoTemplate(MongoClients.create(), DATABASE_NAME) - .find(query(matchingDocumentStructure(schema)), Person.class)).expectNextCount(2).verifyComplete(); + new ReactiveMongoTemplate(reactiveClient, DATABASE_NAME) + .find(query(matchingDocumentStructure(schema)), Person.class).as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); } @Test // DATAMONGO-1835 @@ -178,23 +208,145 @@ public void useTypeOperatorWithMultipleTypesOnFieldLevel() { .containsExactlyInAnyOrder(jellyBelly, kazmardBoombub); } - @Data + @Test // DATAMONGO-1835 + public void findsWithSchemaReturningRawDocument() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("address").build(); + + assertThat(template.find(query(matchingDocumentStructure(schema)), Document.class, + template.getCollectionName(Person.class))).hasSize(2); + } + static class Person { @Id String id; - @Field("full_name") String name; + @Field("full_name") // + String name; Gender gender; Address address; Object value; + + boolean alive; + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Gender getGender() { + return this.gender; + } + + public Address getAddress() { + return this.address; + } + + public Object getValue() { + return this.value; + } + + public boolean isAlive() { + return this.alive; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setGender(Gender gender) { + this.gender = gender; + } + + public void setAddress(Address address) { + this.address = address; + } + + public void setValue(Object value) { + this.value = value; + } + + public void setAlive(boolean alive) { + this.alive = alive; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return alive == person.alive && Objects.equals(id, person.id) && Objects.equals(name, person.name) + && gender == person.gender && Objects.equals(address, person.address) && Objects.equals(value, person.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, gender, address, value, alive); + } + + public String toString() { + return "JsonSchemaQueryTests.Person(id=" + this.getId() + ", name=" + this.getName() + ", gender=" + + this.getGender() + ", address=" + this.getAddress() + ", value=" + this.getValue() + ", alive=" + + this.isAlive() + ")"; + } } - @Data static class Address { String city; - @Field("str") String street; + @Field("str") // + String street; + + public String getCity() { + return this.city; + } + + public String getStreet() { + return this.street; + } + + public void setCity(String city) { + this.city = city; + } + + public void setStreet(String street) { + this.street = street; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(city, address.city) && Objects.equals(street, address.street); + } + + @Override + public int hashCode() { + return Objects.hash(city, street); + } + + public String toString() { + return "JsonSchemaQueryTests.Address(city=" + this.getCity() + ", street=" + this.getStreet() + ")"; + } } static enum Gender { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java new file mode 100644 index 0000000000..adaecad5da --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java @@ -0,0 +1,765 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import static org.springframework.data.mongodb.test.util.Assertions.assertThatExceptionOfType; + +import java.util.Collections; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.bson.BsonDocument; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.mapping.RangeEncrypted; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Resolution; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.spel.spi.Function; + +/** + * Unit tests for {@link MappingMongoJsonSchemaCreator}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class MappingMongoJsonSchemaCreatorUnitTests { + + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private MappingMongoJsonSchemaCreator schemaCreator; + + @BeforeEach + void setUp() { + + mappingContext = new MongoMappingContext(); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + schemaCreator = new MappingMongoJsonSchemaCreator(converter); + } + + @Test // DATAMONGO-1849 + void simpleTypes() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(VariousFieldTypes.class); + + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(Document.parse(VARIOUS_FIELD_TYPES)); + } + + @Test // DATAMONGO-1849 + void withRemappedIdType() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(WithExplicitMongoIdTypeMapping.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(WITH_EXPLICIT_MONGO_ID_TYPE_MAPPING); + } + + @Test // DATAMONGO-1849 + void cyclic() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(Cyclic.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(CYCLIC); + } + + @Test // DATAMONGO-1849 + void converterRegistered() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + MongoCustomConversions mcc = new MongoCustomConversions( + Collections.singletonList(SimpleToDocumentConverter.INSTANCE)); + converter.setCustomConversions(mcc); + converter.afterPropertiesSet(); + + schemaCreator = new MappingMongoJsonSchemaCreator(converter); + + MongoJsonSchema schema = schemaCreator.createSchemaFor(WithNestedDomainType.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo( + "{ 'type' : 'object', 'properties' : { '_id' : { 'type' : 'object' }, 'nested' : { 'type' : 'object' } } }"); + } + + @Test // GH-3800 + void csfle/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Patient.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema.toBsonDocument()).isEqualTo(BsonDocument.parse(PATIENT)); + } + + @Test // GH-3800 + void csfleCyclic/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Cyclic.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema).isNotNull(); + } + + @Test // GH-3800 + void csfleWithKeyFromProperties() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromProperty.class); + + assertThat(schema.schemaDocument().toBsonDocument()).isEqualTo(BsonDocument.parse(ENC_FROM_PROPERTY_SCHEMA)); + } + + @Test // GH-3800 + void csfleWithKeyFromMethod() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromMethod.class); + + assertThat(schema.schemaDocument().toBsonDocument()).isEqualTo(BsonDocument.parse(ENC_FROM_METHOD_SCHEMA)); + } + + // --> Combining Schemas and Properties + + @Test // GH-3870 + void shouldAllowToSpecifyPolymorphicTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("objectValue").withTypes(A.class, B.class).createSchemaFor(SomeTestObject.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema) // + .containsEntry("properties.objectValue.properties.aNonEncrypted", new Document("type", "string")) // + .containsEntry("properties.objectValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.objectValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void shouldAllowToSpecifyNestedPolymorphicTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("value.objectValue").withTypes(A.class, B.class) // + .createSchemaFor(WrapperAroundA.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.value.properties.objectValue.properties.aNonEncrypted", + new Document("type", "string")) // + .containsEntry("properties.value.properties.objectValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.value.properties.objectValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + + } + + @Test // GH-3870 + void shouldAllowToSpecifyGenericTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("genericValue").withTypes(A.class, B.class).createSchemaFor(SomeTestObject.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.genericValue.properties.aNonEncrypted", new Document("type", "string")) // + .containsEntry("properties.genericValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.genericValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void encryptionFilterShouldCaptureSpecifiedPolymorphicTypesForProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .property("objectValue").withTypes(A.class, B.class) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(SomeTestObject.class); + + assertThat(schema.schemaDocument()) // + .doesNotContainKey("properties.objectValue.properties.aNonEncrypted") // + .containsEntry("properties.objectValue.properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.objectValue.properties.bEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void allowsToCreateCombinedSchemaWhenPropertiesDoNotOverlap() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create().mergedSchemaFor(A.class, B.class, C.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.aNonEncrypted", new Document("type", "string")) // + .containsEntry("properties.aEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.bEncrypted", ENCRYPTED_BSON_STRING) // + .containsEntry("properties.cEncrypted", ENCRYPTED_BSON_STRING); + } + + @Test // GH-3870 + void combinedSchemaFailsOnPropertyClash() { + + MongoJsonSchema schemaA = MongoJsonSchemaCreator.create() // + .createSchemaFor(A.class); + MongoJsonSchema schemaAButDifferent = MongoJsonSchemaCreator.create() // + .createSchemaFor(PropertyClashWithA.class); + + MongoJsonSchema targetSchema = schemaA.mergeWith(schemaAButDifferent); + + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(targetSchema::schemaDocument); + } + + @Test // GH-3870 + void combinedSchemaAllowsToCompensateErrors() { + + MongoJsonSchema schemaA = MongoJsonSchemaCreator.create() // + .createSchemaFor(A.class); + MongoJsonSchema schemaAButDifferent = MongoJsonSchemaCreator.create() // + .createSchemaFor(PropertyClashWithA.class); + + MongoJsonSchema schema = schemaA.mergeWith(Collections.singleton(schemaAButDifferent), + (path, a, b) -> Resolution.ofValue(path, "object")); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.aNonEncrypted", new Document("type", "object")); + } + + @Test // GH-3870 + void bsonTypeVsJustTypeValueResolutionIsDoneByDefault() { + + MongoJsonSchema schemaUsingType = MongoJsonSchema.builder() + .property(JsonSchemaProperty.named("value").ofType(Type.jsonTypeOf("string"))).build(); + MongoJsonSchema schemaUsingBsonType = MongoJsonSchema.builder() + .property(JsonSchemaProperty.named("value").ofType(Type.bsonTypeOf("string"))).build(); + + MongoJsonSchema targetSchema = MongoJsonSchema.merge(schemaUsingType, schemaUsingBsonType); + + assertThat(targetSchema.schemaDocument()) // + .containsEntry("properties.value", new Document("type", "string")); + } + + @Test // GH-4454 + void wrapEncryptedEntityTypeLikeProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(WithEncryptedEntityLikeProperty.class); + + assertThat(schema.schemaDocument()) // + .containsEntry("properties.domainTypeValue", Document.parse("{'encrypt': {'bsonType': 'object' } }")); + } + + @Test // GH-4185 + void qeRangeEncryptedProperties() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(QueryableEncryptedRoot.class); + + String expectedForInt = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'int', + 'queries' : [ + { 'queryType' : 'range', 'contention' : { '$numberLong' : '0' }, 'max' : 200, 'min' : 0, 'sparsity' : 1, 'trimFactor' : 1 } + ] + }}"""; + + String expectedForRootLong = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'long', + 'queries' : [ + { 'queryType' : 'range', contention : { '$numberLong' : '0' }, 'sparsity' : 0 } + ] + }}"""; + + String expectedForNestedLong = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'long', + 'queries' : [ + { 'queryType' : 'range', contention : { '$numberLong' : '1' }, 'max' : { '$numberLong' : '1' }, 'min' : { '$numberLong' : '-1' }, 'sparsity' : 1, 'trimFactor' : 1 } + ] + }}"""; + + assertThat(schema.schemaDocument()) // + .doesNotContainKey("properties.unencrypted") // + .containsEntry("properties.encryptedInt", Document.parse(expectedForInt)) + .containsEntry("properties.encryptedLong", Document.parse(expectedForRootLong)) + .containsEntry("properties.nested.properties.encrypted_long", Document.parse(expectedForNestedLong)); + + } + + // --> TYPES AND JSON + + // --> ENUM + + private static final String JUST_SOME_ENUM = "{ 'type' : 'string', 'enum' : ['ONE', 'TWO'] }"; + + enum JustSomeEnum { + ONE, TWO + } + + // --> VARIOUS FIELD TYPES + + static final String VARIOUS_FIELD_TYPES = "" + // + "{" + // + " 'type' : 'object'," + // + " 'required' : ['primitiveInt']," + // + " 'properties' : {" + // + " 'id' : { 'type' : 'string' }," + // + " 're-named-property' : { 'type' : 'string' }," + // + " 'retypedProperty' : { 'bsonType' : 'javascript' }," + // + " 'primitiveInt' : { 'bsonType' : 'int' }," + // + " 'booleanProperty' : { 'type' : 'boolean' }," + // + " 'longProperty' : { 'bsonType' : 'long' }," + // + " 'intProperty' : { 'bsonType' : 'int' }," + // + " 'dateProperty' : { 'bsonType' : 'date' }," + // + " 'arrayProperty' : { 'type' : 'array' }," + // + " 'binaryDataProperty' : { 'bsonType' : 'binData' }," + // + " 'collectionProperty' : { 'type' : 'array' }," + // + " 'simpleTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'string' } }," + // + " 'complexTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'object', 'properties' : { 'field' : { 'type' : 'string'} } } }" + + // + " 'enumTypeCollectionProperty' : { 'type' : 'array', 'items' : " + JUST_SOME_ENUM + " }" + // + " 'mapProperty' : { 'type' : 'object' }," + // + " 'objectProperty' : { 'type' : 'object' }," + // + " 'enumProperty' : " + JUST_SOME_ENUM + " }" + // + "}"; + + static class VariousFieldTypes { + + @Field("id") String id; + @Field("re-named-property") String renamedProperty; + @Field(targetType = FieldType.SCRIPT) String retypedProperty; + @Transient String transientProperty; + int primitiveInt; + Boolean booleanProperty; + Long longProperty; + Integer intProperty; + Date dateProperty; + Object[] arrayProperty; + byte[] binaryDataProperty; + List collectionProperty; + List simpleTypeCollectionProperty; + List complexTypeCollectionProperty; + List enumTypeCollectionProperty; + Map mapProperty; + Object objectProperty; + JustSomeEnum enumProperty; + } + + static class SomeDomainType { + String field; + } + + // --> NESTED DOMAIN TYPE + + static final String WITH_NESTED_DOMAIN_TYPE = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " '_id' : { 'type' : 'object' }," + // + " 'nested' : " + VARIOUS_FIELD_TYPES + // + " }" + // + "}"; + + static class WithNestedDomainType { + + String id; + VariousFieldTypes nested; + } + + // --> EXPLICIT MONGO_ID MAPPING + + final String WITH_EXPLICIT_MONGO_ID_TYPE_MAPPING = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " '_id' : { 'bsonType' : 'objectId' }," + // + " 'nested' : " + VARIOUS_FIELD_TYPES + // + " }" + // + "}"; + + static class WithExplicitMongoIdTypeMapping { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + VariousFieldTypes nested; + } + + // --> OH NO - A CYCLIC PROPERTY RELATIONSHIP 😱 + + static final String CYCLIC_FIN = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'root' : { 'type' : 'string' }" + // + " 'cyclic' : { 'type' : 'object' }" + // + " }" + // + "}"; + + static final String CYCLIC_2 = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'nested2' : { 'type' : 'string' }," + // + " 'cyclic' : " + CYCLIC_FIN + // + " }" + // + "}"; + + class Cyclic2 { + + String nested2; + Cyclic cyclic; + } + + static final String CYCLIC_1 = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'nested1' : { 'type' : 'string' }," + // + " 'cyclic2' : " + CYCLIC_2 + // + " }" + // + "}"; + + class Cyclic1 { + + String nested1; + Cyclic2 cyclic2; + } + + static final String CYCLIC = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'root' : { 'type' : 'string' }," + // + " 'cyclic1' : " + CYCLIC_1 + // + " }" + // + "}"; + + class Cyclic { + + String root; + Cyclic1 cyclic1; + } + + @WritingConverter + enum SimpleToDocumentConverter + implements org.springframework.core.convert.converter.Converter { + INSTANCE; + + @Override + public org.bson.Document convert(VariousFieldTypes source) { + return null; + } + } + + static final String PATIENT = "{" + // + " 'type': 'object'," + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': 'xKVup8B1Q+CkHaVRx+qa+g=='," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'properties': {" + // + " 'ssn': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }," + // + " 'bloodType': {" + // + " 'encrypt': {" + // + " 'bsonType': 'string'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'medicalRecords': {" + // + " 'encrypt': {" + // + " 'bsonType': 'array'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'insurance': {" + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==") + static class Patient { + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer ssn; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + String bloodType; + + String keyAltNameField; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + List> medicalRecords; + + Insurance insurance; + } + + static class Insurance { + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_PROPERTY_ENTITY_KEY = "C5a5aMB7Ttq4wSJTFeRn8g=="; + static final String ENC_FROM_PROPERTY_PROPOERTY_KEY = "Mw6mdTVPQfm4quqSCLVB3g=="; + static final String ENC_FROM_PROPERTY_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{entityKey}") + static class EncryptionMetadataFromProperty { + + @Encrypted(keyId = "#{propertyKey}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_METHOD_ENTITY_KEY = "4fPYFM9qSgyRAjgQ2u+IMQ=="; + static final String ENC_FROM_METHOD_PROPOERTY_KEY = "+idiseKwTVCJfSKC3iUeYQ=="; + static final String ENC_FROM_METHOD_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}") + static class EncryptionMetadataFromMethod { + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + public static class EncryptionExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + @Override + public Map getProperties() { + + Map properties = new LinkedHashMap<>(); + properties.put("entityKey", ENC_FROM_PROPERTY_ENTITY_KEY); + properties.put("propertyKey", ENC_FROM_PROPERTY_PROPOERTY_KEY); + return properties; + } + + @Override + public Map getFunctions() { + try { + return Collections.singletonMap("keyId", + new Function(EncryptionExtension.class.getMethod("keyId", String.class), this)); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + return Collections.emptyMap(); + } + + public String keyId(String target) { + + if (target.equals("EncryptionMetadataFromMethod")) { + return ENC_FROM_METHOD_ENTITY_KEY; + } + + if (target.equals("EncryptionMetadataFromMethod.policyNumber")) { + return ENC_FROM_METHOD_PROPOERTY_KEY; + } + + return "xKVup8B1Q+CkHaVRx+qa+g=="; + } + } + + private static final Document ENCRYPTED_BSON_STRING = Document + .parse("{'encrypt': { 'bsonType': 'string','algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'} }"); + + static class SomeTestObject { + T genericValue; + Object objectValue; + } + + static class RootWithGenerics { + S sValue; + T tValue; + } + + static class SubWithFixedGeneric extends RootWithGenerics { + + } + + static class Concrete extends SubWithFixedGeneric { + + } + + static class WrapperAroundA { + + SomeTestObject value; + } + + static class A { + + String aNonEncrypted; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") String aEncrypted; + } + + static class B { + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") String bEncrypted; + } + + static class C extends A { + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") String cEncrypted; + } + + static class PropertyClashWithA { + Integer aNonEncrypted; + } + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + static class WithEncryptedEntityLikeProperty { + @Encrypted SomeDomainType domainTypeValue; + } + + static class QueryableEncryptedRoot { + + String unencrypted; + + @RangeEncrypted(contentionFactor = 0L, rangeOptions = "{ 'min': 0, 'max': 200, 'trimFactor': 1, 'sparsity': 1}") // + Integer encryptedInt; + + @Encrypted(algorithm = "Range") + @Queryable(contentionFactor = 0L, queryType = "range", queryAttributes = "{ 'sparsity': 0 }") // + Long encryptedLong; + + NestedRangeEncrypted nested; + + } + + static class NestedRangeEncrypted { + + @Field("encrypted_long") + @RangeEncrypted(contentionFactor = 1L, + rangeOptions = "{ 'min': { '$numberLong' : '-1' }, 'max': { '$numberLong' : '1' }, 'trimFactor': 1, 'sparsity': 1}") // + Long encryptedLong; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java index c248535b78..f8a5c1128a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoAdminIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,24 +22,21 @@ import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.DB; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * This test class assumes that you are already running the MongoDB server. * * @author Mark Pollack */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class MongoAdminIntegrationTests { private static final Log logger = LogFactory.getLog(MongoAdminIntegrationTests.class); - @SuppressWarnings("unused") private DB testAdminDb; - @Autowired MongoClient mongoClient; MongoAdmin mongoAdmin; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientFactoryBeanUnitTests.java new file mode 100644 index 0000000000..868190db5d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientFactoryBeanUnitTests.java @@ -0,0 +1,92 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; + +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; + +/** + * Unit tests for {@link MongoClientFactoryBean}. + * + * @author Christoph Strobl + */ +class MongoClientFactoryBeanUnitTests { + + static final String CONNECTION_STRING_STRING = "mongodb://db1.example.net:27017,db2.example.net:2500/?replicaSet=test&connectTimeoutMS=300000"; + static final ConnectionString CONNECTION_STRING = new ConnectionString(CONNECTION_STRING_STRING); + + @Test // DATAMONGO-2427 + void connectionStringParametersNotOverriddenByDefaults() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setConnectionString(CONNECTION_STRING); + factoryBean.setMongoClientSettings(MongoClientSettings.builder().build()); + + MongoClientSettings settings = factoryBean.computeClientSetting(); + + assertThat(settings.getClusterSettings().getRequiredReplicaSetName()).isEqualTo("test"); + assertThat(settings.getSocketSettings().getConnectTimeout(TimeUnit.MILLISECONDS)).isEqualTo(300000); + assertThat(settings.getClusterSettings().getHosts()).hasSize(2); + } + + @Test // DATAMONGO-2427 + void hostPortParametersNotOverriddenByDefaults() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setPort(2500); + factoryBean.setHost("db2.example.net"); + factoryBean.setReplicaSet("rs0"); + factoryBean.setMongoClientSettings(MongoClientSettings.builder().build()); + + MongoClientSettings settings = factoryBean.computeClientSetting(); + + assertThat(settings.getClusterSettings().getRequiredReplicaSetName()).isEqualTo("rs0"); + assertThat(settings.getClusterSettings().getHosts()).containsExactly(new ServerAddress("db2.example.net", 2500)); + } + + @Test // DATAMONGO-2427 + void explicitSettingsOverrideConnectionStringOnes() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setConnectionString(CONNECTION_STRING); + factoryBean.setMongoClientSettings( + MongoClientSettings.builder().applyToClusterSettings(it -> it.requiredReplicaSetName("rs0")) + .applyToSocketSettings(it -> it.connectTimeout(100, TimeUnit.MILLISECONDS)).build()); + + MongoClientSettings settings = factoryBean.computeClientSetting(); + + assertThat(settings.getClusterSettings().getRequiredReplicaSetName()).isEqualTo("rs0"); + assertThat(settings.getSocketSettings().getConnectTimeout(TimeUnit.MILLISECONDS)).isEqualTo(100); + assertThat(settings.getClusterSettings().getHosts()).hasSize(2); + } + + @Test // DATAMONGO-2427 + void hostAndPortPlusConnectionStringError() { + + MongoClientFactoryBean factoryBean = new MongoClientFactoryBean(); + factoryBean.setConnectionString(CONNECTION_STRING); + factoryBean.setHost("localhost"); + factoryBean.setPort(27017); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(factoryBean::createInstance); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBeanIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanIntegrationTests.java similarity index 73% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBeanIntegrationTests.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanIntegrationTests.java index 39afb11b19..7c6e33ec5e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBeanIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,10 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.data.mongodb.config.ReadPreferencePropertyEditor; @@ -27,16 +27,16 @@ import com.mongodb.ReadPreference; /** - * Integration tests for {@link MongoClientOptionsFactoryBean}. + * Integration tests for {@link MongoClientSettingsFactoryBean}. * * @author Christoph Strobl */ -public class MongoClientOptionsFactoryBeanIntegrationTests { +public class MongoClientSettingsFactoryBeanIntegrationTests { @Test // DATAMONGO-1158 public void convertsReadPreferenceConcernCorrectly() { - RootBeanDefinition definition = new RootBeanDefinition(MongoClientOptionsFactoryBean.class); + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); definition.getPropertyValues().addPropertyValue("readPreference", "NEAREST"); DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); @@ -44,7 +44,7 @@ public void convertsReadPreferenceConcernCorrectly() { factory.registerBeanDefinition("factory", definition); - MongoClientOptionsFactoryBean bean = factory.getBean("&factory", MongoClientOptionsFactoryBean.class); - assertThat(ReflectionTestUtils.getField(bean, "readPreference"), is((Object) ReadPreference.nearest())); + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "readPreference")).isEqualTo((Object) ReadPreference.nearest()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanUnitTests.java new file mode 100644 index 0000000000..2ddaca7f24 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBeanUnitTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.data.mongodb.config.ReadConcernPropertyEditor; +import org.springframework.data.mongodb.config.ReadPreferencePropertyEditor; +import org.springframework.data.mongodb.config.UUidRepresentationPropertyEditor; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; + +/** + * Unit tests for {@link MongoClientSettingsFactoryBean}. + * + * @author Christoph Strobl + */ +public class MongoClientSettingsFactoryBeanUnitTests { + + @Test // DATAMONGO-2384 + public void convertsReadPreferenceConcernCorrectly() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("readPreference", "NEAREST"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerCustomEditor(ReadPreference.class, ReadPreferencePropertyEditor.class); + + factory.registerBeanDefinition("factory", definition); + + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "readPreference")).isEqualTo(ReadPreference.nearest()); + } + + @Test // DATAMONGO-2384 + public void convertsReadConcernConcernCorrectly() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("readConcern", "MAJORITY"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerCustomEditor(ReadPreference.class, ReadConcernPropertyEditor.class); + + factory.registerBeanDefinition("factory", definition); + + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "readConcern")).isEqualTo(ReadConcern.MAJORITY); + } + + @Test // DATAMONGO-2427 + public void convertsUuidRepresentationCorrectly() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoClientSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("uUidRepresentation", "STANDARD"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerCustomEditor(ReadPreference.class, UUidRepresentationPropertyEditor.class); + + factory.registerBeanDefinition("factory", definition); + + MongoClientSettingsFactoryBean bean = factory.getBean("&factory", MongoClientSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "uUidRepresentation")).isEqualTo(UuidRepresentation.STANDARD); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBeanTests.java new file mode 100644 index 0000000000..a45b099640 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBeanTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.AutoEncryptionSettings; + +/** + * Integration tests for {@link MongoEncryptionSettingsFactoryBean}. + * + * @author Christoph Strobl + */ +public class MongoEncryptionSettingsFactoryBeanTests { + + @Test // DATAMONGO-2306 + public void createsAutoEncryptionSettings() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoEncryptionSettingsFactoryBean.class); + definition.getPropertyValues().addPropertyValue("bypassAutoEncryption", true); + definition.getPropertyValues().addPropertyValue("keyVaultNamespace", "ns"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoEncryptionSettingsFactoryBean bean = factory.getBean("&factory", MongoEncryptionSettingsFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "bypassAutoEncryption")).isEqualTo(true); + + AutoEncryptionSettings target = factory.getBean(AutoEncryptionSettings.class); + assertThat(target.getKeyVaultNamespace()).isEqualTo("ns"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java index b7fe8d5838..9730e61e51 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoExceptionTranslatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,30 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; +import static org.assertj.core.api.Assertions.*; -import java.io.IOException; -import java.net.UnknownHostException; - -import com.mongodb.WriteConcern; import org.bson.BsonDocument; -import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + import org.springframework.core.NestedRuntimeException; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.dao.DuplicateKeyException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.InvalidDataAccessResourceUsageException; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.MongoTransactionException; import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.lang.Nullable; import com.mongodb.MongoCursorNotFoundException; import com.mongodb.MongoException; import com.mongodb.MongoInternalException; import com.mongodb.MongoSocketException; +import com.mongodb.MongoSocketReadTimeoutException; +import com.mongodb.MongoSocketWriteException; import com.mongodb.ServerAddress; /** @@ -50,18 +47,20 @@ * @author Michal Vich * @author Oliver Gierke * @author Christoph Strobl + * @author Brice Vandeputte */ -public class MongoExceptionTranslatorUnitTests { +class MongoExceptionTranslatorUnitTests { - MongoExceptionTranslator translator; + private static final String EXCEPTION_MESSAGE = "IOException"; + private MongoExceptionTranslator translator; - @Before - public void setUp() { + @BeforeEach + void setUp() { translator = new MongoExceptionTranslator(); } @Test - public void translateDuplicateKey() { + void translateDuplicateKey() { expectExceptionWithCauseMessage( translator.translateExceptionIfPossible( @@ -69,39 +68,53 @@ public void translateDuplicateKey() { DuplicateKeyException.class, null); } - @Test - public void translateSocketException() { + @Test // GH-3568 + void translateSocketException() { + + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible(new MongoSocketException(EXCEPTION_MESSAGE, new ServerAddress())), + DataAccessResourceFailureException.class, EXCEPTION_MESSAGE); + } + + @Test // GH-3568 + void translateSocketExceptionSubclasses() { expectExceptionWithCauseMessage( - translator.translateExceptionIfPossible(new MongoSocketException("IOException", new ServerAddress())), - DataAccessResourceFailureException.class, "IOException"); + translator.translateExceptionIfPossible(new MongoSocketWriteException("intermediate message", + new ServerAddress(), new Exception(EXCEPTION_MESSAGE))), + DataAccessResourceFailureException.class, EXCEPTION_MESSAGE); + + expectExceptionWithCauseMessage( + translator.translateExceptionIfPossible(new MongoSocketReadTimeoutException("intermediate message", + new ServerAddress(), new Exception(EXCEPTION_MESSAGE))), + DataAccessResourceFailureException.class, EXCEPTION_MESSAGE); } @Test - public void translateCursorNotFound() throws UnknownHostException { + void translateCursorNotFound() { expectExceptionWithCauseMessage( - translator.translateExceptionIfPossible(new MongoCursorNotFoundException(1L, new ServerAddress())), + translator.translateExceptionIfPossible(new MongoCursorNotFoundException(1L, new BsonDocument(), Mockito.mock(ServerAddress.class))), DataAccessResourceFailureException.class); } @Test - public void translateToDuplicateKeyException() { + void translateToDuplicateKeyException() { checkTranslatedMongoException(DuplicateKeyException.class, 11000); checkTranslatedMongoException(DuplicateKeyException.class, 11001); } @Test - public void translateToDataAccessResourceFailureException() { + void translateToDataAccessResourceFailureException() { checkTranslatedMongoException(DataAccessResourceFailureException.class, 12000); checkTranslatedMongoException(DataAccessResourceFailureException.class, 13440); } @Test - public void translateToInvalidDataAccessApiUsageException() { + void translateToInvalidDataAccessApiUsageException() { checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 10003); checkTranslatedMongoException(InvalidDataAccessApiUsageException.class, 12001); @@ -111,7 +124,7 @@ public void translateToInvalidDataAccessApiUsageException() { } @Test - public void translateToUncategorizedMongoDbException() { + void translateToUncategorizedMongoDbException() { MongoException exception = new MongoException(0, ""); DataAccessException translatedException = translator.translateExceptionIfPossible(exception); @@ -120,7 +133,7 @@ public void translateToUncategorizedMongoDbException() { } @Test - public void translateMongoInternalException() { + void translateMongoInternalException() { MongoInternalException exception = new MongoInternalException("Internal exception"); DataAccessException translatedException = translator.translateExceptionIfPossible(exception); @@ -129,37 +142,81 @@ public void translateMongoInternalException() { } @Test - public void translateUnsupportedException() { + void translateUnsupportedException() { RuntimeException exception = new RuntimeException(); - assertThat(translator.translateExceptionIfPossible(exception), is(nullValue())); + assertThat(translator.translateExceptionIfPossible(exception)).isNull(); + } + + @Test // DATAMONGO-2045 + void translateSessionExceptions() { + + checkTranslatedMongoException(ClientSessionException.class, 206); + checkTranslatedMongoException(ClientSessionException.class, 213); + checkTranslatedMongoException(ClientSessionException.class, 228); + checkTranslatedMongoException(ClientSessionException.class, 264); + } + + @Test // DATAMONGO-2045 + void translateTransactionExceptions() { + + checkTranslatedMongoException(MongoTransactionException.class, 217); + checkTranslatedMongoException(MongoTransactionException.class, 225); + checkTranslatedMongoException(MongoTransactionException.class, 244); + checkTranslatedMongoException(MongoTransactionException.class, 251); + checkTranslatedMongoException(MongoTransactionException.class, 256); + checkTranslatedMongoException(MongoTransactionException.class, 257); + checkTranslatedMongoException(MongoTransactionException.class, 263); + checkTranslatedMongoException(MongoTransactionException.class, 267); + } + + @Test // DATAMONGO-2073 + public void translateTransientTransactionExceptions() { + + MongoException source = new MongoException(267, "PreparedTransactionInProgress"); + source.addLabel(MongoException.TRANSIENT_TRANSACTION_ERROR_LABEL); + + expectExceptionWithCauseMessage(translator.translateExceptionIfPossible(source), + UncategorizedMongoDbException.class, + "PreparedTransactionInProgress"); + assertThat(translator.isTransientFailure(source)).isTrue(); + assertThat(translator.isTransientFailure(translator.translateExceptionIfPossible(source))).isTrue(); + } + + @Test // DATAMONGO-2073 + public void translateMongoExceptionWithTransientLabel() { + + MongoException exception = new MongoException(0, ""); + exception.addLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL); + DataAccessException translatedException = translator.translateExceptionIfPossible(exception); + + expectExceptionWithCauseMessage(translatedException, UncategorizedMongoDbException.class); } private void checkTranslatedMongoException(Class clazz, int code) { - try { - translator.translateExceptionIfPossible(new MongoException(code, "")); - fail("Expected exception of type " + clazz.getName() + "!"); - } catch (NestedRuntimeException e) { - Throwable cause = e.getRootCause(); - assertThat(cause, is(instanceOf(MongoException.class))); - assertThat(((MongoException) cause).getCode(), is(code)); - } + DataAccessException translated = translator.translateExceptionIfPossible(new MongoException(code, "")); + + assertThat(translated).as("Expected exception of type " + clazz.getName()).isNotNull(); + + Throwable cause = translated.getRootCause(); + assertThat(cause).isInstanceOf(MongoException.class); + assertThat(((MongoException) cause).getCode()).isEqualTo(code); } - private static void expectExceptionWithCauseMessage(NestedRuntimeException e, + private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e, Class type) { expectExceptionWithCauseMessage(e, type, null); } - private static void expectExceptionWithCauseMessage(NestedRuntimeException e, - Class type, String message) { + private static void expectExceptionWithCauseMessage(@Nullable NestedRuntimeException e, + Class type, @Nullable String message) { - assertThat(e, is(instanceOf(type))); + assertThat(e).isInstanceOf(type); if (message != null) { - assertThat(e.getRootCause(), is(notNullValue())); - assertThat(e.getRootCause().getMessage(), containsString(message)); + assertThat(e.getRootCause()).isNotNull(); + assertThat(e.getRootCause().getMessage()).contains(message); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java index 60b79b3a38..8b88552860 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOperationsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,21 @@ */ package org.springframework.data.mongodb.core; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.List; import org.bson.Document; import org.bson.conversions.Bson; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.dao.DataAccessException; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.geo.Point; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.context.MappingContext; @@ -37,6 +39,8 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.util.TypeInformation; import com.mongodb.DBRef; @@ -49,7 +53,7 @@ * @author Thomas Darimont * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public abstract class MongoOperationsUnitTests { @Mock CollectionCallback collectionCallback; @@ -59,7 +63,7 @@ public abstract class MongoOperationsUnitTests { Person person; List persons; - @Before + @BeforeEach public final void operationsSetUp() { person = new Person("Oliver"); @@ -84,7 +88,7 @@ public Object convertToMongoType(Object obj, TypeInformation typeInformation) return null; } - public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { + public DBRef toDBRef(Object object, MongoPersistentProperty referringProperty) { return null; } @@ -92,26 +96,41 @@ public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { public MongoTypeMapper getTypeMapper() { return null; } + + @Override + public ProjectionFactory getProjectionFactory() { + return null; + } + + @Override + public CustomConversions getCustomConversions() { + return null; + } + + @Override + public R project(EntityProjection descriptor, Bson bson) { + return null; + } }; } - @Test(expected = IllegalArgumentException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void rejectsNullForCollectionCallback() { - - getOperations().execute("test", (CollectionCallback) null); + assertThatIllegalArgumentException().isThrownBy(() -> getOperations().execute("test", (CollectionCallback) null)); } - @Test(expected = IllegalArgumentException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void rejectsNullForCollectionCallback2() { - getOperations().execute("collection", (CollectionCallback) null); + assertThatIllegalArgumentException() + .isThrownBy(() -> getOperations().execute("collection", (CollectionCallback) null)); } - @Test(expected = IllegalArgumentException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void rejectsNullForDbCallback() { - getOperations().execute((DbCallback) null); + assertThatIllegalArgumentException().isThrownBy(() -> getOperations().execute((DbCallback) null)); } @Test @@ -350,12 +369,7 @@ public void assertDataAccessException() { public void assertException(Class exception) { - try { - doWith(getOperationsForExceptionHandling()); - fail("Expected " + exception + " but completed without any!"); - } catch (Exception e) { - assertTrue("Expected " + exception + " but got " + e, exception.isInstance(e)); - } + assertThatThrownBy(() -> doWith(getOperationsForExceptionHandling())).isInstanceOf(exception); } public abstract void doWith(MongoOperations operations); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBeanUnitTests.java deleted file mode 100644 index 7c866b01fe..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoOptionsFactoryBeanUnitTests.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.junit.Assert.*; -import static org.junit.Assume.*; -import static org.springframework.data.mongodb.util.MongoClientVersion.*; - -import javax.net.ssl.SSLSocketFactory; - -import org.junit.BeforeClass; -import org.junit.Test; - -import com.mongodb.MongoClientOptions; - -/** - * Unit tests for {@link MongoOptionsFactoryBean}. - * - * @author Oliver Gierke - * @author Mike Saavedra - * @author Christoph Strobl - */ -@SuppressWarnings("deprecation") -public class MongoOptionsFactoryBeanUnitTests { - - @BeforeClass - public static void validateMongoDriver() { - assumeFalse(isMongo3Driver()); - } - - @Test // DATAMONGO-764 - public void testSslConnection() throws Exception { - - MongoClientOptionsFactoryBean bean = new MongoClientOptionsFactoryBean(); - bean.setSsl(true); - bean.afterPropertiesSet(); - - MongoClientOptions options = bean.getObject(); - assertNotNull(options.getSocketFactory()); - assertTrue(options.getSocketFactory() instanceof SSLSocketFactory); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java new file mode 100644 index 0000000000..cfa28e9314 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; + +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApiVersion; + +/** + * Integration tests for {@link MongoServerApiFactoryBean}. + * + * @author Christoph Strobl + */ +class MongoServerApiFactoryBeanTests { + + @Test // GH-3820 + void createsServerApiForVersionString() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "V1"); + definition.getPropertyValues().addPropertyValue("deprecationErrors", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "deprecationErrors")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).contains(true); + assertThat(target.getStrict()).isNotPresent(); + } + + @Test // GH-3820 + void createsServerApiForVersionNumber() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "1"); + definition.getPropertyValues().addPropertyValue("strict", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "strict")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).isNotPresent(); + assertThat(target.getStrict()).contains(true); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java index 333f44686d..deaffab4b2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateCollationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,53 +17,64 @@ import static org.assertj.core.api.Assertions.*; +import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Set; import org.bson.Document; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Collation.Alternate; import org.springframework.data.mongodb.core.query.Collation.ComparisonLevel; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) public class MongoTemplateCollationTests { - public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_4_0 = MongoVersionRule.atLeast(Version.parse("3.4.0")); public static final String COLLECTION_NAME = "collation-1"; + static @Client MongoClient mongoClient; @Configuration - static class Config extends AbstractMongoConfiguration { + static class Config extends AbstractMongoClientConfiguration { @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; } @Override protected String getDatabaseName() { return "collation-tests"; } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } } @Autowired MongoTemplate template; - @Before + @BeforeEach public void setUp() { template.dropCollection(COLLECTION_NAME); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java index 382c3ae304..498bfec17a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDbRefTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,17 +19,27 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxy; +import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; -import com.mongodb.MongoClient; +import com.mongodb.client.model.Filters; /** * {@link org.springframework.data.mongodb.core.mapping.DBRef} related integration tests for @@ -37,18 +47,24 @@ * * @author Christoph Strobl */ +@ExtendWith(MongoTemplateExtension.class) public class MongoTemplateDbRefTests { - MongoTemplate template; + @Template(database = "mongo-template-dbref-tests", + initialEntitySet = { RefCycleLoadingIntoDifferentTypeRoot.class, + RefCycleLoadingIntoDifferentTypeIntermediate.class, RefCycleLoadingIntoDifferentTypeRootView.class, + WithDBRefOnRawStringId.class, WithLazyDBRefOnRawStringId.class, WithRefToAnotherDb.class, + WithLazyRefToAnotherDb.class, WithListRefToAnotherDb.class, WithLazyListRefToAnotherDb.class }) // + static MongoTestTemplate template; - @Before - public void setUp() { + @Template(database = "mongo-template-dbref-tests-other-db", initialEntitySet = JustSomeType.class) // + static MongoTestTemplate otherDbTemplate; - template = new MongoTemplate(new MongoClient(), "mongo-template-dbref-tests"); + @BeforeEach + public void setUp() { - template.dropCollection(RefCycleLoadingIntoDifferentTypeRoot.class); - template.dropCollection(RefCycleLoadingIntoDifferentTypeIntermediate.class); - template.dropCollection(RefCycleLoadingIntoDifferentTypeRootView.class); + template.flush(); + otherDbTemplate.flush(); } @Test // DATAMONGO-1703 @@ -82,29 +98,559 @@ public void shouldLoadRefIntoDifferentTypeCorrectly() { assertThat(loaded.getRefToIntermediate().getRefToRootView().getContent()).isEqualTo("jon snow"); } - @Data + @Test // DATAMONGO-1798 + public void stringDBRefLoading() { + + RawStringId ref = new RawStringId(); + ref.id = new ObjectId().toHexString(); + ref.value = "new value"; + + template.save(ref); + + WithDBRefOnRawStringId source = new WithDBRefOnRawStringId(); + source.id = "foo"; + source.value = ref; + + template.save(source); + + org.bson.Document result = template + .execute(db -> (org.bson.Document) db.getCollection(template.getCollectionName(WithDBRefOnRawStringId.class)) + .find(Filters.eq("_id", source.id)).limit(1).into(new ArrayList()).iterator().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("value")) + .isEqualTo(new com.mongodb.DBRef(template.getCollectionName(RawStringId.class), ref.getId())); + + WithDBRefOnRawStringId target = template.findOne(query(where("id").is(source.id)), WithDBRefOnRawStringId.class); + assertThat(target.value).isEqualTo(ref); + } + + @Test // DATAMONGO-1798 + public void stringDBRefLazyLoading() { + + RawStringId ref = new RawStringId(); + ref.id = new ObjectId().toHexString(); + ref.value = "new value"; + + template.save(ref); + + WithLazyDBRefOnRawStringId source = new WithLazyDBRefOnRawStringId(); + source.id = "foo"; + source.value = ref; + + template.save(source); + + org.bson.Document result = template.execute( + db -> (org.bson.Document) db.getCollection(template.getCollectionName(WithLazyDBRefOnRawStringId.class)) + .find(Filters.eq("_id", source.id)).limit(1).into(new ArrayList()).iterator().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("value")) + .isEqualTo(new com.mongodb.DBRef(template.getCollectionName(RawStringId.class), ref.getId())); + + WithLazyDBRefOnRawStringId target = template.findOne(query(where("id").is(source.id)), + WithLazyDBRefOnRawStringId.class); + + assertThat(target.value).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getValue()).isEqualTo(ref); + } + + @Test // DATAMONGO-2223 + public void shouldResolveSingleDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + otherDbTemplate.insert(one); + + WithRefToAnotherDb source = new WithRefToAnotherDb(); + source.value = one; + + template.save(source); + + WithRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), WithRefToAnotherDb.class); + assertThat(target.getValue()).isEqualTo(one); + } + + @Test // DATAMONGO-2223 + public void shouldResolveSingleLazyDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + otherDbTemplate.insert(one); + + WithLazyRefToAnotherDb source = new WithLazyRefToAnotherDb(); + source.value = one; + + template.save(source); + + WithLazyRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), WithLazyRefToAnotherDb.class); + LazyLoadingTestUtils.assertProxyIsResolved(target.value, false); + assertThat(target.getValue()).isEqualTo(one); + } + + @Test // DATAMONGO-2223 + public void shouldResolveListDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + otherDbTemplate.insertAll(Arrays.asList(one, two)); + + WithListRefToAnotherDb source = new WithListRefToAnotherDb(); + source.value = Arrays.asList(one, two); + + template.save(source); + + WithListRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), WithListRefToAnotherDb.class); + assertThat(target.getValue()).containsExactlyInAnyOrder(one, two); + } + + @Test // DATAMONGO-2223 + public void shouldResolveLazyListDBRefToAnotherDb() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + otherDbTemplate.insertAll(Arrays.asList(one, two)); + + WithLazyListRefToAnotherDb source = new WithLazyListRefToAnotherDb(); + source.value = Arrays.asList(one, two); + + template.save(source); + + WithLazyListRefToAnotherDb target = template.findOne(query(where("id").is(source.id)), + WithLazyListRefToAnotherDb.class); + LazyLoadingTestUtils.assertProxyIsResolved(target.value, false); + assertThat(target.getValue()).containsExactlyInAnyOrder(one, two); + } + + @Test // GH-2191 + void shouldAllowToSliceCollectionOfDbRefs() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + template.insertAll(Arrays.asList(one, two)); + + WithCollectionDbRef source = new WithCollectionDbRef(); + source.refs = Arrays.asList(one, two); + + template.save(source); + + Query theQuery = query(where("id").is(source.id)); + theQuery.fields().slice("refs", 1, 1); + + WithCollectionDbRef target = template.findOne(theQuery, WithCollectionDbRef.class); + assertThat(target.getRefs()).containsExactly(two); + } + + @Test // GH-2191 + void shouldAllowToSliceCollectionOfLazyDbRefs() { + + JustSomeType one = new JustSomeType(); + one.value = "one"; + + JustSomeType two = new JustSomeType(); + two.value = "two"; + + template.insertAll(Arrays.asList(one, two)); + + WithCollectionDbRef source = new WithCollectionDbRef(); + source.lazyrefs = Arrays.asList(one, two); + + template.save(source); + + Query theQuery = query(where("id").is(source.id)); + theQuery.fields().slice("lazyrefs", 1, 1); + + WithCollectionDbRef target = template.findOne(theQuery, WithCollectionDbRef.class); + LazyLoadingTestUtils.assertProxyIsResolved(target.lazyrefs, false); + assertThat(target.getLazyrefs()).containsExactly(two); + } + @Document("cycle-with-different-type-root") static class RefCycleLoadingIntoDifferentTypeRoot { @Id String id; String content; @DBRef RefCycleLoadingIntoDifferentTypeIntermediate refToIntermediate; + + public String getId() { + return this.id; + } + + public String getContent() { + return this.content; + } + + public RefCycleLoadingIntoDifferentTypeIntermediate getRefToIntermediate() { + return this.refToIntermediate; + } + + public void setId(String id) { + this.id = id; + } + + public void setContent(String content) { + this.content = content; + } + + public void setRefToIntermediate(RefCycleLoadingIntoDifferentTypeIntermediate refToIntermediate) { + this.refToIntermediate = refToIntermediate; + } + + public String toString() { + return "MongoTemplateDbRefTests.RefCycleLoadingIntoDifferentTypeRoot(id=" + this.getId() + ", content=" + + this.getContent() + ", refToIntermediate=" + this.getRefToIntermediate() + ")"; + } } - @Data @Document("cycle-with-different-type-intermediate") static class RefCycleLoadingIntoDifferentTypeIntermediate { @Id String id; @DBRef RefCycleLoadingIntoDifferentTypeRootView refToRootView; + + public String getId() { + return this.id; + } + + public RefCycleLoadingIntoDifferentTypeRootView getRefToRootView() { + return this.refToRootView; + } + + public void setId(String id) { + this.id = id; + } + + public void setRefToRootView(RefCycleLoadingIntoDifferentTypeRootView refToRootView) { + this.refToRootView = refToRootView; + } + + public String toString() { + return "MongoTemplateDbRefTests.RefCycleLoadingIntoDifferentTypeIntermediate(id=" + this.getId() + + ", refToRootView=" + this.getRefToRootView() + ")"; + } } - @Data @Document("cycle-with-different-type-root") static class RefCycleLoadingIntoDifferentTypeRootView { @Id String id; String content; + + public String getId() { + return this.id; + } + + public String getContent() { + return this.content; + } + + public void setId(String id) { + this.id = id; + } + + public void setContent(String content) { + this.content = content; + } + + public String toString() { + return "MongoTemplateDbRefTests.RefCycleLoadingIntoDifferentTypeRootView(id=" + this.getId() + ", content=" + + this.getContent() + ")"; + } } + static class RawStringId { + + @MongoId String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RawStringId that = (RawStringId) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDbRefTests.RawStringId(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithCollectionDbRef { + + @Id String id; + + @DBRef List refs; + + @DBRef(lazy = true) List lazyrefs; + + public String getId() { + return this.id; + } + + public List getRefs() { + return this.refs; + } + + public List getLazyrefs() { + return this.lazyrefs; + } + + public void setId(String id) { + this.id = id; + } + + public void setRefs(List refs) { + this.refs = refs; + } + + public void setLazyrefs(List lazyrefs) { + this.lazyrefs = lazyrefs; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithCollectionDbRef(id=" + this.getId() + ", refs=" + this.getRefs() + + ", lazyrefs=" + this.getLazyrefs() + ")"; + } + } + + static class WithDBRefOnRawStringId { + + @Id String id; + @DBRef RawStringId value; + + public String getId() { + return this.id; + } + + public RawStringId getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(RawStringId value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithDBRefOnRawStringId(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithLazyDBRefOnRawStringId { + + @Id String id; + @DBRef(lazy = true) RawStringId value; + + public String getId() { + return this.id; + } + + public RawStringId getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(RawStringId value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithLazyDBRefOnRawStringId(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class WithRefToAnotherDb { + + @Id String id; + @DBRef(db = "mongo-template-dbref-tests-other-db") JustSomeType value; + + public String getId() { + return this.id; + } + + public JustSomeType getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(JustSomeType value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithLazyRefToAnotherDb { + + @Id String id; + @DBRef(lazy = true, db = "mongo-template-dbref-tests-other-db") JustSomeType value; + + public String getId() { + return this.id; + } + + public JustSomeType getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(JustSomeType value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithLazyRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithListRefToAnotherDb { + + @Id String id; + @DBRef(db = "mongo-template-dbref-tests-other-db") List value; + + public String getId() { + return this.id; + } + + public List getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(List value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithListRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithLazyListRefToAnotherDb { + + @Id String id; + @DBRef(lazy = true, db = "mongo-template-dbref-tests-other-db") List value; + + public String getId() { + return this.id; + } + + public List getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(List value) { + this.value = value; + } + + public String toString() { + return "MongoTemplateDbRefTests.WithLazyListRefToAnotherDb(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class JustSomeType { + + @Id String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + JustSomeType that = (JustSomeType) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDbRefTests.JustSomeType(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java new file mode 100644 index 0000000000..51b3b005a5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -0,0 +1,2315 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; +import org.springframework.data.annotation.Reference; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.model.Filters; + +/** + * {@link DocumentReference} related integration tests for {@link MongoTemplate}. + * + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateDocumentReferenceTests { + + public static final String DB_NAME = "document-reference-tests"; + + static @Client MongoClient client; + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureConversion(it -> { + it.customConverters(new ReferencableConverter(), new SimpleObjectRefWithReadingConverterToDocumentConverter(), + new DocumentToSimpleObjectRefWithReadingConverter()); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); + }); + + @BeforeEach + public void setUp() { + template.flushDatabase(); + } + + @Test // GH-3602 + void writeSimpleTypeReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.simpleValueRef = new SimpleObjectRef("ref-1", "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef")).isEqualTo("ref-1"); + } + + @Test // GH-3782 + void writeTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.customIdTargetRef = new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), + "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRef")).isEqualTo(expectedIdValue); + } + + @Test // GH-3602 + void writeMapTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.mapValueRef = new LinkedHashMap<>(); + source.mapValueRef.put("frodo", new SimpleObjectRef("ref-1", "me-the-1-referenced-object")); + source.mapValueRef.put("bilbo", new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("mapValueRef", Map.class)).containsEntry("frodo", "ref-1").containsEntry("bilbo", "ref-2"); + } + + @Test // GH-3782 + void writeMapOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefMap = Collections.singletonMap("frodo", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefMap", Map.class)).containsEntry("frodo", expectedIdValue); + } + + @Test // GH-3602 + void writeCollectionOfSimpleTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.simpleValueRef = Arrays.asList(new SimpleObjectRef("ref-1", "me-the-1-referenced-object"), + new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef", List.class)).containsExactly("ref-1", "ref-2"); + } + + @Test // GH-3782 + void writeListOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefList = Collections.singletonList( + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefList", List.class)).containsExactly(expectedIdValue); + } + + @Test // GH-3602 + void writeObjectTypeReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.objectValueRef = new ObjectRefOfDocument("ref-1", "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("objectValueRef")).isEqualTo(source.getObjectValueRef().toReference()); + } + + @Test // GH-3602 + void writeCollectionOfObjectTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.objectValueRef = Arrays.asList(new ObjectRefOfDocument("ref-1", "me-the-1-referenced-object"), + new ObjectRefOfDocument("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("objectValueRef", List.class)).containsExactly( + source.getObjectValueRef().get(0).toReference(), source.getObjectValueRef().get(1).toReference()); + } + + @Test // GH-3602 + void readSimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionOfSimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Collections.singletonList("ref-1")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readLazySimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleLazyValueRef", "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + + LazyLoadingTestUtils.assertProxy(result.simpleLazyValueRef, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + assertThat(result.getSimpleLazyValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readSimpleTypeObjectReferenceFromFieldWithCustomName() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simple-value-ref-annotated-field-name", + "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRefWithAnnotatedFieldName()) + .isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionTypeObjectReferenceFromFieldWithCustomName() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simple-value-ref-annotated-field-name", + Collections.singletonList("ref-1")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRefWithAnnotatedFieldName()) + .containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readObjectReferenceFromDocumentType() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOfDocument.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRef", + new Document("id", "ref-1").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRef()).isEqualTo(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionObjectReferenceFromDocumentType() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOfDocument.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRef", + Collections.singletonList(new Document("id", "ref-1").append("property", "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRef()) + .containsExactly(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readObjectReferenceFromDocumentDeclaringCollectionName() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = "object-ref-of-document-with-embedded-collection-name"; + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append( + "objectValueRefWithEmbeddedCollectionName", + new Document("id", "ref-1").append("collection", "object-ref-of-document-with-embedded-collection-name") + .append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRefWithEmbeddedCollectionName()) + .isEqualTo(new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-referenced-object")); + } + + @Test // GH-3602 + void readCollectionObjectReferenceFromDocumentDeclaringCollectionName() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = "object-ref-of-document-with-embedded-collection-name"; + Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object"); + Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append( + "objectValueRefWithEmbeddedCollectionName", + Arrays.asList( + new Document("id", "ref-2").append("collection", "object-ref-of-document-with-embedded-collection-name"), + new Document("id", "ref-1").append("collection", "object-ref-of-document-with-embedded-collection-name") + .append("property", "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRefWithEmbeddedCollectionName()).containsExactly( + new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-2", "me-the-2-referenced-object"), + new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-1-referenced-object")); + } + + @Test // GH-3602 + void useOrderFromAnnotatedSort() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object"); + Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document refSource3 = new Document("_id", "ref-3").append("value", "me-the-3-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleSortedValueRef", + Arrays.asList("ref-1", "ref-3", "ref-2")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + db.getCollection(refCollectionName).insertOne(refSource3); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleSortedValueRef()).containsExactly( + new SimpleObjectRef("ref-3", "me-the-3-referenced-object"), + new SimpleObjectRef("ref-2", "me-the-2-referenced-object"), + new SimpleObjectRef("ref-1", "me-the-1-referenced-object")); + } + + @Test // GH-3602 + void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRefOnNonIdFields", + new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRefOnNonIdFields()) + .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test // GH-3602 + void readLazyObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("lazyObjectValueRefOnNonIdFields", + new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + + LazyLoadingTestUtils.assertProxy(result.lazyObjectValueRefOnNonIdFields, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + assertThat(result.getLazyObjectValueRefOnNonIdFields()) + .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test // GH-3602 + void readCollectionObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRefOnNonIdFields", + Collections.singletonList(new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", + "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRefOnNonIdFields()) + .containsExactly(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test // GH-3602 + void readMapOfReferences() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + + Document refSource1 = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-1-referenced-object"); + + Document refSource2 = new Document("_id", "ref-2").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-2-referenced-object"); + + Map refmap = new LinkedHashMap<>(); + refmap.put("frodo", "ref-1"); + refmap.put("bilbo", "ref-2"); + + Document source = new Document("_id", "id-1").append("value", "v1").append("mapValueRef", refmap); + + template.execute(db -> { + + db.getCollection(rootCollectionName).insertOne(source); + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + + assertThat(result.getMapValueRef()) + .containsEntry("frodo", new SimpleObjectRef("ref-1", "me-the-1-referenced-object")) + .containsEntry("bilbo", new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Test // GH-3602 + void loadLazyCyclicReference() { + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.lazyToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + assertThat(loadedA).isNotNull(); + assertThat(loadedA.getToB()).isNotNull(); + LazyLoadingTestUtils.assertProxy(loadedA.getToB().lazyToA, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + } + + @Test // GH-3602 + void loadEagerCyclicReference() { + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.eagerToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + + assertThat(loadedA).isNotNull(); + assertThat(loadedA.getToB()).isNotNull(); + assertThat(loadedA.getToB().eagerToA).isSameAs(loadedA); + } + + @Test // GH-3602 + void loadAndStoreUnresolvedLazyDoesNotResolveTheProxy() { + + String collectionB = template.getCollectionName(WithRefB.class); + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.lazyToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + template.save(loadedA.getToB()); + + LazyLoadingTestUtils.assertProxy(loadedA.getToB().lazyToA, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + Document target = template.execute(db -> { + return db.getCollection(collectionB).find(Filters.eq("_id", "b")).first(); + }); + assertThat(target.get("lazyToA", Object.class)).isEqualTo("a"); + } + + @Test // GH-3602 + void loadCollectionReferenceWithMissingRefs() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + + // ref-1 is missing. + Document refSource = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Arrays.asList("ref-1", "ref-2")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Test // GH-3805 + void loadEmptyCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedValueRef", + Collections.emptyList()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + + @Test // GH-3805 + void loadEmptyMapReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedMapRef", + new Document()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedMapRef).isEmpty(); + } + + @Test // GH-3805 + void loadNoExistingCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // no reference array at all + Document source = new Document("_id", "id-1").append("value", "v1"); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + + @Test // GH-3806 + void resolveReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithRequiredArgsCtor source = new WithRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithRequiredArgsCtor.class); + assertThat(target.publisher).isNotNull(); + } + + @Test // GH-3806 + void resolveLazyReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithLazyRequiredArgsCtor source = new WithLazyRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithLazyRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithLazyRequiredArgsCtor.class); + + // proxy not yet resolved + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + // resolve the proxy by invoking a method on it + assertThat(target.getPublisher().getName()).isEqualTo("ppp"); + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + assertThat(proxy.isResolved()).isTrue(); + }); + } + + @Test // GH-3602 + void queryForReference() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + a.toB = b; + template.save(a); + + WithRefA a2 = new WithRefA(); + a2.id = "a2"; + template.save(a2); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("toB").is(b)).firstValue(); + assertThat(loadedA.getId()).isEqualTo(a.getId()); + } + + @Test // GH-3602 + void queryForReferenceInCollection() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + Document shouldBeFound = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Arrays.asList("ref-1", "ref-2")); + Document shouldNotBeFound = new Document("_id", "id-2").append("value", "v2").append("simpleValueRef", + Arrays.asList("ref-1")); + + template.execute(db -> { + + db.getCollection(rootCollectionName).insertOne(shouldBeFound); + db.getCollection(rootCollectionName).insertOne(shouldNotBeFound); + return null; + }); + + SimpleObjectRef objectRef = new SimpleObjectRef("ref-2", "some irrelevant value"); + + List loaded = template.query(CollectionRefRoot.class) + .matching(where("simpleValueRef").in(objectRef)).all(); + assertThat(loaded).map(CollectionRefRoot::getId).containsExactly("id-1"); + } + + @Test // GH-3602 + void queryForReferenceOnIdField() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + a.toB = b; + template.save(a); + + WithRefA a2 = new WithRefA(); + a2.id = "a2"; + template.save(a2); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("toB.id").is(b.id)).firstValue(); + assertThat(loadedA.getId()).isEqualTo(a.getId()); + } + + @Test // GH-3602 + void updateReferenceWithEntityHavingPointerConversion() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + template.save(a); + + template.update(WithRefA.class).apply(new Update().set("toB", b)).first(); + + String collectionA = template.getCollectionName(WithRefA.class); + + Document target = template.execute(db -> { + return db.getCollection(collectionA).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("toB", "b"); + } + + @Test // GH-3602 + void updateReferenceWithEntityWithoutPointerConversion() { + + String collectionName = template.getCollectionName(SingleRefRoot.class); + SingleRefRoot refRoot = new SingleRefRoot(); + refRoot.id = "root-1"; + + SimpleObjectRef ref = new SimpleObjectRef("ref-1", "me the referenced object"); + + template.save(refRoot); + + template.update(SingleRefRoot.class).apply(new Update().set("simpleValueRef", ref)).first(); + + Document target = template.execute(db -> { + return db.getCollection(collectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", "ref-1"); + } + + @Test // GH-3602 + void updateReferenceWithValue() { + + WithRefA a = new WithRefA(); + a.id = "a"; + template.save(a); + + template.update(WithRefA.class).apply(new Update().set("toB", "b")).first(); + + String collectionA = template.getCollectionName(WithRefA.class); + + Document target = template.execute(db -> { + return db.getCollection(collectionA).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("toB", "b"); + } + + @Test // GH-4041 + void updateReferenceWithPushToCollection() { + + WithListOfRefs a = new WithListOfRefs(); + a.id = "a"; + template.save(a); + + WithListOfRefs b = new WithListOfRefs(); + b.id = "b"; + template.save(b); + + template.update(WithListOfRefs.class).matching(where("id").is(a.id)) + .apply(new Update().push("refs").each(new Object[] { b })).first(); + + String collection = template.getCollectionName(WithListOfRefs.class); + + Document target = template.execute(db -> { + return db.getCollection(collection).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("refs", Collections.singletonList("b")); + } + + @Test // GH-3782 + void updateReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + template.save(root); + + template.update(SingleRefRoot.class).apply(new Update().set("customIdTargetRef", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "b"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("customIdTargetRef", expectedIdValue); + } + + @Test // GH-3602 + void updateReferenceCollectionWithEntity() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.simpleValueRef = Collections.singletonList(new SimpleObjectRef("ref-1", "beastie")); + + template.save(root); + + template.update(CollectionRefRoot.class) + .apply(new Update().push("simpleValueRef").value(new SimpleObjectRef("ref-2", "boys"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", Arrays.asList("ref-1", "ref-2")); + } + + @Test // GH-3602 + void updateReferenceCollectionWithValue() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.simpleValueRef = Collections.singletonList(new SimpleObjectRef("ref-1", "beastie")); + + template.save(root); + + template.update(CollectionRefRoot.class).apply(new Update().push("simpleValueRef").value("ref-2")).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", Arrays.asList("ref-1", "ref-2")); + } + + @Test // GH-3602 + @Disabled("Property path resolution does not work inside maps, the key is considered :/") + void updateReferenceMapWithEntity() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.mapValueRef = Collections.singletonMap("beastie", new SimpleObjectRef("ref-1", "boys")); + + template.save(root); + + template.update(CollectionRefRoot.class) + .apply(new Update().set("mapValueRef.rise", new SimpleObjectRef("ref-2", "against"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("mapValueRef", new Document("beastie", "ref-1").append("rise", "ref-2")); + } + + @Test // GH-3602 + void updateReferenceMapWithValue() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.mapValueRef = Collections.singletonMap("beastie", new SimpleObjectRef("ref-1", "boys")); + + template.save(root); + + template.update(CollectionRefRoot.class).apply(new Update().set("mapValueRef.rise", "ref-2")).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("mapValueRef", new Document("beastie", "ref-1").append("rise", "ref-2")); + } + + @Test // GH-3602 + void useReadingWriterConverterPairForLoading() { + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + root.withReadingConverter = new SimpleObjectRefWithReadingConverter("ref-1", "value-1"); + + template.save(root.withReadingConverter); + + template.save(root); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(SingleRefRoot.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("withReadingConverter", + new Document("ref-key-from-custom-write-converter", root.withReadingConverter.id)); + + SingleRefRoot loaded = template.findOne(query(where("id").is(root.id)), SingleRefRoot.class); + assertThat(loaded.withReadingConverter).isInstanceOf(SimpleObjectRefWithReadingConverter.class); + } + + @Test // GH-3602 + void deriveMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + book.publisher = publisher; + + template.save(book); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); + }); + + assertThat(target).containsEntry("publisher", new Document("acc", publisher.acronym).append("n", publisher.name)); + + Book result = template.findOne(query(where("id").is(book.id)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void updateDerivedMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + + template.save(book); + + template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher)) + .first(); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); + }); + + assertThat(target).containsEntry("publisher", new Document("acc", publisher.acronym).append("n", publisher.name)); + + Book result = template.findOne(query(where("id").is(book.id)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void queryDerivedMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + book.publisher = publisher; + + template.save(book); + book.publisher = publisher; + + Book result = template.findOne(query(where("publisher").is(publisher)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void allowsDirectUsageOfAtReference() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + UsingAtReference root = new UsingAtReference(); + root.id = "book-1"; + root.publisher = publisher; + + template.save(root); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("publisher", "p-1"); + + UsingAtReference result = template.findOne(query(where("id").is(root.id)), UsingAtReference.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void updateWhenUsingAtReferenceDirectly() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + UsingAtReference root = new UsingAtReference(); + root.id = "book-1"; + + template.save(root); + template.update(UsingAtReference.class).matching(where("id").is(root.id)).apply(new Update().set("publisher", publisher)).first(); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("publisher", "p-1"); + } + + @Test // GH-3798 + void allowsOneToMayStyleLookupsUsingSelfVariable() { + + OneToManyStyleBook book1 = new OneToManyStyleBook(); + book1.id = "id-1"; + book1.publisherId = "p-100"; + + OneToManyStyleBook book2 = new OneToManyStyleBook(); + book2.id = "id-2"; + book2.publisherId = "p-200"; + + OneToManyStyleBook book3 = new OneToManyStyleBook(); + book3.id = "id-3"; + book3.publisherId = "p-100"; + + template.save(book1); + template.save(book2); + template.save(book3); + + OneToManyStylePublisher publisher = new OneToManyStylePublisher(); + publisher.id = "p-100"; + + template.save(publisher); + + OneToManyStylePublisher target = template.findOne(query(where("id").is(publisher.id)), OneToManyStylePublisher.class); + assertThat(target.books).containsExactlyInAnyOrder(book1, book3); + } + + @Test // GH-3847 + void writeReferenceWithIdStringThatIsAnObjectId() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + ObjectId id = new ObjectId(); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.simpleValueRef = new SimpleObjectRef(id.toHexString(), "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef")).isEqualTo(id); + } + + @Test // GH-3847 + void readWithIdStringThatIsAnObjectId() { + + ObjectId id = new ObjectId(); + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", id).append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", id); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRef()).isEqualTo(new SimpleObjectRef(id.toHexString(), "me-the-referenced-object")); + } + + @Test // GH-3847 + void readWriteTypeReferenceHavingFixedStringIdTargetType() { + + ObjectId id = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + ObjectRefHavingStringIdTargetType customStringIdTargetRef = new ObjectRefHavingStringIdTargetType(id.toHexString(), + "me-the-referenced-object"); + template.save(customStringIdTargetRef); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.customStringIdTargetRef = customStringIdTargetRef; + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customStringIdTargetRef")).isEqualTo(id.toHexString()); + + SingleRefRoot result = template.findOne(query(where("id").is("root-1")), SingleRefRoot.class); + assertThat(result.getCustomStringIdTargetRef()) + .isEqualTo(new ObjectRefHavingStringIdTargetType(id.toHexString(), "me-the-referenced-object")); + } + + @Test // GH-4484 + void resolveReferenceForOneToManyLookupWithSelfVariableWhenUsedInCtorArgument() { + + OneToManyStylePublisherWithRequiredArgsCtor publisher = new OneToManyStylePublisherWithRequiredArgsCtor("p-100", null); + template.save(publisher); + + OneToManyStyleBook book1 = new OneToManyStyleBook(); + book1.id = "id-1"; + book1.publisherId = publisher.id; + + OneToManyStyleBook book2 = new OneToManyStyleBook(); + book2.id = "id-2"; + book2.publisherId = "p-200"; + + OneToManyStyleBook book3 = new OneToManyStyleBook(); + book3.id = "id-3"; + book3.publisherId = publisher.id; + + template.save(book1); + template.save(book2); + template.save(book3); + + OneToManyStylePublisherWithRequiredArgsCtor target = template.findOne(query(where("id").is(publisher.id)), OneToManyStylePublisherWithRequiredArgsCtor.class); + assertThat(target.books).containsExactlyInAnyOrder(book1, book3); + } + + static class SingleRefRoot { + + String id; + String value; + + @DocumentReference SimpleObjectRefWithReadingConverter withReadingConverter; + + @DocumentReference(lookup = "{ '_id' : ?#{#target} }") // + SimpleObjectRef simpleValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", lazy = true) // + SimpleObjectRef simpleLazyValueRef; + + @Field("simple-value-ref-annotated-field-name") // + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + SimpleObjectRef simpleValueRefWithAnnotatedFieldName; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }") // + ObjectRefOfDocument objectValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "#collection") // + ObjectRefOfDocumentWithEmbeddedCollectionName objectValueRefWithEmbeddedCollectionName; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // + ObjectRefOnNonIdField objectValueRefOnNonIdFields; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }", lazy = true) // + ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields; + + @DocumentReference ObjectRefHavingCustomizedIdTargetType customIdTargetRef; + + @DocumentReference ObjectRefHavingStringIdTargetType customStringIdTargetRef; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public SimpleObjectRefWithReadingConverter getWithReadingConverter() { + return this.withReadingConverter; + } + + public SimpleObjectRef getSimpleValueRef() { + return this.simpleValueRef; + } + + public SimpleObjectRef getSimpleLazyValueRef() { + return this.simpleLazyValueRef; + } + + public SimpleObjectRef getSimpleValueRefWithAnnotatedFieldName() { + return this.simpleValueRefWithAnnotatedFieldName; + } + + public ObjectRefOfDocument getObjectValueRef() { + return this.objectValueRef; + } + + public ObjectRefOfDocumentWithEmbeddedCollectionName getObjectValueRefWithEmbeddedCollectionName() { + return this.objectValueRefWithEmbeddedCollectionName; + } + + public ObjectRefOnNonIdField getObjectValueRefOnNonIdFields() { + return this.objectValueRefOnNonIdFields; + } + + public ObjectRefOnNonIdField getLazyObjectValueRefOnNonIdFields() { + return this.lazyObjectValueRefOnNonIdFields; + } + + public ObjectRefHavingCustomizedIdTargetType getCustomIdTargetRef() { + return this.customIdTargetRef; + } + + public ObjectRefHavingStringIdTargetType getCustomStringIdTargetRef() { + return this.customStringIdTargetRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setWithReadingConverter(SimpleObjectRefWithReadingConverter withReadingConverter) { + this.withReadingConverter = withReadingConverter; + } + + public void setSimpleValueRef(SimpleObjectRef simpleValueRef) { + this.simpleValueRef = simpleValueRef; + } + + public void setSimpleLazyValueRef(SimpleObjectRef simpleLazyValueRef) { + this.simpleLazyValueRef = simpleLazyValueRef; + } + + public void setSimpleValueRefWithAnnotatedFieldName(SimpleObjectRef simpleValueRefWithAnnotatedFieldName) { + this.simpleValueRefWithAnnotatedFieldName = simpleValueRefWithAnnotatedFieldName; + } + + public void setObjectValueRef(ObjectRefOfDocument objectValueRef) { + this.objectValueRef = objectValueRef; + } + + public void setObjectValueRefWithEmbeddedCollectionName( + ObjectRefOfDocumentWithEmbeddedCollectionName objectValueRefWithEmbeddedCollectionName) { + this.objectValueRefWithEmbeddedCollectionName = objectValueRefWithEmbeddedCollectionName; + } + + public void setObjectValueRefOnNonIdFields(ObjectRefOnNonIdField objectValueRefOnNonIdFields) { + this.objectValueRefOnNonIdFields = objectValueRefOnNonIdFields; + } + + public void setLazyObjectValueRefOnNonIdFields(ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields) { + this.lazyObjectValueRefOnNonIdFields = lazyObjectValueRefOnNonIdFields; + } + + public void setCustomIdTargetRef(ObjectRefHavingCustomizedIdTargetType customIdTargetRef) { + this.customIdTargetRef = customIdTargetRef; + } + + public void setCustomStringIdTargetRef(ObjectRefHavingStringIdTargetType customStringIdTargetRef) { + this.customStringIdTargetRef = customStringIdTargetRef; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.SingleRefRoot(id=" + this.getId() + ", value=" + this.getValue() + + ", withReadingConverter=" + this.getWithReadingConverter() + ", simpleValueRef=" + this.getSimpleValueRef() + + ", simpleLazyValueRef=" + this.getSimpleLazyValueRef() + ", simpleValueRefWithAnnotatedFieldName=" + + this.getSimpleValueRefWithAnnotatedFieldName() + ", objectValueRef=" + this.getObjectValueRef() + + ", objectValueRefWithEmbeddedCollectionName=" + this.getObjectValueRefWithEmbeddedCollectionName() + + ", objectValueRefOnNonIdFields=" + this.getObjectValueRefOnNonIdFields() + + ", lazyObjectValueRefOnNonIdFields=" + this.getLazyObjectValueRefOnNonIdFields() + ", customIdTargetRef=" + + this.getCustomIdTargetRef() + ", customStringIdTargetRef=" + this.getCustomStringIdTargetRef() + ")"; + } + } + + static class CollectionRefRoot { + + String id; + String value; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + List simpleValueRef; + + @DocumentReference + List simplePreinitializedValueRef = new ArrayList<>(); + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", sort = "{ '_id' : -1 } ") // + List simpleSortedValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + Map mapValueRef; + + @DocumentReference // + Map simplePreinitializedMapRef = new LinkedHashMap<>(); + + @Field("simple-value-ref-annotated-field-name") // + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + List simpleValueRefWithAnnotatedFieldName; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }") // + List objectValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") // + List objectValueRefWithEmbeddedCollectionName; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // + List objectValueRefOnNonIdFields; + + @DocumentReference List customIdTargetRefList; + + @DocumentReference Map customIdTargetRefMap; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public List getSimpleValueRef() { + return this.simpleValueRef; + } + + public List getSimplePreinitializedValueRef() { + return this.simplePreinitializedValueRef; + } + + public List getSimpleSortedValueRef() { + return this.simpleSortedValueRef; + } + + public Map getMapValueRef() { + return this.mapValueRef; + } + + public Map getSimplePreinitializedMapRef() { + return this.simplePreinitializedMapRef; + } + + public List getSimpleValueRefWithAnnotatedFieldName() { + return this.simpleValueRefWithAnnotatedFieldName; + } + + public List getObjectValueRef() { + return this.objectValueRef; + } + + public List getObjectValueRefWithEmbeddedCollectionName() { + return this.objectValueRefWithEmbeddedCollectionName; + } + + public List getObjectValueRefOnNonIdFields() { + return this.objectValueRefOnNonIdFields; + } + + public List getCustomIdTargetRefList() { + return this.customIdTargetRefList; + } + + public Map getCustomIdTargetRefMap() { + return this.customIdTargetRefMap; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setSimpleValueRef(List simpleValueRef) { + this.simpleValueRef = simpleValueRef; + } + + public void setSimplePreinitializedValueRef(List simplePreinitializedValueRef) { + this.simplePreinitializedValueRef = simplePreinitializedValueRef; + } + + public void setSimpleSortedValueRef(List simpleSortedValueRef) { + this.simpleSortedValueRef = simpleSortedValueRef; + } + + public void setMapValueRef(Map mapValueRef) { + this.mapValueRef = mapValueRef; + } + + public void setSimplePreinitializedMapRef(Map simplePreinitializedMapRef) { + this.simplePreinitializedMapRef = simplePreinitializedMapRef; + } + + public void setSimpleValueRefWithAnnotatedFieldName(List simpleValueRefWithAnnotatedFieldName) { + this.simpleValueRefWithAnnotatedFieldName = simpleValueRefWithAnnotatedFieldName; + } + + public void setObjectValueRef(List objectValueRef) { + this.objectValueRef = objectValueRef; + } + + public void setObjectValueRefWithEmbeddedCollectionName( + List objectValueRefWithEmbeddedCollectionName) { + this.objectValueRefWithEmbeddedCollectionName = objectValueRefWithEmbeddedCollectionName; + } + + public void setObjectValueRefOnNonIdFields(List objectValueRefOnNonIdFields) { + this.objectValueRefOnNonIdFields = objectValueRefOnNonIdFields; + } + + public void setCustomIdTargetRefList(List customIdTargetRefList) { + this.customIdTargetRefList = customIdTargetRefList; + } + + public void setCustomIdTargetRefMap(Map customIdTargetRefMap) { + this.customIdTargetRefMap = customIdTargetRefMap; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.CollectionRefRoot(id=" + this.getId() + ", value=" + this.getValue() + + ", simpleValueRef=" + this.getSimpleValueRef() + ", simplePreinitializedValueRef=" + + this.getSimplePreinitializedValueRef() + ", simpleSortedValueRef=" + this.getSimpleSortedValueRef() + + ", mapValueRef=" + this.getMapValueRef() + ", simplePreinitializedMapRef=" + + this.getSimplePreinitializedMapRef() + ", simpleValueRefWithAnnotatedFieldName=" + + this.getSimpleValueRefWithAnnotatedFieldName() + ", objectValueRef=" + this.getObjectValueRef() + + ", objectValueRefWithEmbeddedCollectionName=" + this.getObjectValueRefWithEmbeddedCollectionName() + + ", objectValueRefOnNonIdFields=" + this.getObjectValueRefOnNonIdFields() + ", customIdTargetRefList=" + + this.getCustomIdTargetRefList() + ", customIdTargetRefMap=" + this.getCustomIdTargetRefMap() + ")"; + } + } + + @FunctionalInterface + interface ReferenceAble { + Object toReference(); + } + + @org.springframework.data.mongodb.core.mapping.Document("simple-object-ref") + static class SimpleObjectRef { + + @Id String id; + String value; + + public SimpleObjectRef(String id, String value) { + this.id = id; + this.value = value; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SimpleObjectRef that = (SimpleObjectRef) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.SimpleObjectRef(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class SimpleObjectRefWithReadingConverter extends SimpleObjectRef { + + public SimpleObjectRefWithReadingConverter(String id, String value) { + super(id, value); + } + + } + + static class ObjectRefOfDocument implements ReferenceAble { + + @Id String id; + String value; + + public ObjectRefOfDocument(String id, String value) { + this.id = id; + this.value = value; + } + + @Override + public Object toReference() { + return new Document("id", id).append("property", "without-any-meaning"); + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefOfDocument that = (ObjectRefOfDocument) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefOfDocument(id=" + this.getId() + ", value=" + this.getValue() + + ")"; + } + } + + static class ObjectRefOfDocumentWithEmbeddedCollectionName implements ReferenceAble { + + @Id String id; + String value; + + public ObjectRefOfDocumentWithEmbeddedCollectionName(String id, String value) { + this.id = id; + this.value = value; + } + + @Override + public Object toReference() { + return new Document("id", id).append("collection", "object-ref-of-document-with-embedded-collection-name"); + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefOfDocumentWithEmbeddedCollectionName that = (ObjectRefOfDocumentWithEmbeddedCollectionName) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefOfDocumentWithEmbeddedCollectionName(id=" + this.getId() + + ", value=" + this.getValue() + ")"; + } + } + + static class ObjectRefOnNonIdField implements ReferenceAble { + + @Id String id; + String value; + String refKey1; + String refKey2; + + public ObjectRefOnNonIdField(String id, String value, String refKey1, String refKey2) { + this.id = id; + this.value = value; + this.refKey1 = refKey1; + this.refKey2 = refKey2; + } + + @Override + public Object toReference() { + return new Document("refKey1", refKey1).append("refKey2", refKey2); + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public String getRefKey1() { + return this.refKey1; + } + + public String getRefKey2() { + return this.refKey2; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setRefKey1(String refKey1) { + this.refKey1 = refKey1; + } + + public void setRefKey2(String refKey2) { + this.refKey2 = refKey2; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefOnNonIdField that = (ObjectRefOnNonIdField) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value) && Objects.equals(refKey1, that.refKey1) + && Objects.equals(refKey2, that.refKey2); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, refKey1, refKey2); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefOnNonIdField(id=" + this.getId() + ", value=" + + this.getValue() + ", refKey1=" + this.getRefKey1() + ", refKey2=" + this.getRefKey2() + ")"; + } + } + + static class ObjectRefHavingCustomizedIdTargetType { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + String name; + + public ObjectRefHavingCustomizedIdTargetType(String id, String name) { + this.id = id; + this.name = name; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefHavingCustomizedIdTargetType that = (ObjectRefHavingCustomizedIdTargetType) o; + return Objects.equals(id, that.id) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefHavingCustomizedIdTargetType(id=" + this.getId() + ", name=" + + this.getName() + ")"; + } + } + + static class ObjectRefHavingStringIdTargetType { + + @MongoId(targetType = FieldType.STRING) String id; + String name; + + public ObjectRefHavingStringIdTargetType(String id, String name) { + this.id = id; + this.name = name; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObjectRefHavingStringIdTargetType that = (ObjectRefHavingStringIdTargetType) o; + return Objects.equals(id, that.id) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.ObjectRefHavingStringIdTargetType(id=" + this.getId() + ", name=" + + this.getName() + ")"; + } + } + + static class ReferencableConverter implements Converter> { + + @Nullable + @Override + public DocumentPointer convert(ReferenceAble source) { + return source::toReference; + } + } + + @WritingConverter + static class DocumentToSimpleObjectRefWithReadingConverter + implements Converter, SimpleObjectRefWithReadingConverter> { + + @Nullable + @Override + public SimpleObjectRefWithReadingConverter convert(DocumentPointer source) { + + Document document = client.getDatabase(DB_NAME).getCollection("simple-object-ref") + .find(Filters.eq("_id", source.getPointer().get("ref-key-from-custom-write-converter"))).first(); + return new SimpleObjectRefWithReadingConverter(document.getString("_id"), document.getString("value")); + } + } + + @WritingConverter + static class SimpleObjectRefWithReadingConverterToDocumentConverter + implements Converter> { + + @Nullable + @Override + public DocumentPointer convert(SimpleObjectRefWithReadingConverter source) { + return () -> new Document("ref-key-from-custom-write-converter", source.getId()); + } + } + + static class WithRefA/* to B */ implements ReferenceAble { + + @Id String id; + @DocumentReference // + WithRefB toB; + + @Override + public Object toReference() { + return id; + } + + public String getId() { + return this.id; + } + + public WithRefB getToB() { + return this.toB; + } + + public void setId(String id) { + this.id = id; + } + + public void setToB(WithRefB toB) { + this.toB = toB; + } + } + + static class WithRefB/* to A */ implements ReferenceAble { + + @Id String id; + @DocumentReference(lazy = true) // + WithRefA lazyToA; + + @DocumentReference // + WithRefA eagerToA; + + @Override + public Object toReference() { + return id; + } + + public String getId() { + return this.id; + } + + public WithRefA getLazyToA() { + return this.lazyToA; + } + + public WithRefA getEagerToA() { + return this.eagerToA; + } + + public void setId(String id) { + this.id = id; + } + + public void setLazyToA(WithRefA lazyToA) { + this.lazyToA = lazyToA; + } + + public void setEagerToA(WithRefA eagerToA) { + this.eagerToA = eagerToA; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.WithRefB(id=" + this.getId() + ", lazyToA=" + this.getLazyToA() + + ", eagerToA=" + this.getEagerToA() + ")"; + } + } + + static class ReferencedObject {} + + class ToDocumentPointerConverter implements Converter> { + + @Nullable + @Override + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("", source); + } + } + + static class Book { + + String id; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") // + Publisher publisher; + + public String getId() { + return this.id; + } + + public Publisher getPublisher() { + return this.publisher; + } + + public void setId(String id) { + this.id = id; + } + + public void setPublisher(Publisher publisher) { + this.publisher = publisher; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.Book(id=" + this.getId() + ", publisher=" + this.getPublisher() + ")"; + } + } + + static class Publisher { + + String id; + String acronym; + String name; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getAcronym() { + return acronym; + } + + public void setAcronym(String acronym) { + this.acronym = acronym; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + static class UsingAtReference { + + String id; + + @Reference // + Publisher publisher; + + public String getId() { + return this.id; + } + + public Publisher getPublisher() { + return this.publisher; + } + + public void setId(String id) { + this.id = id; + } + + public void setPublisher(Publisher publisher) { + this.publisher = publisher; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.UsingAtReference(id=" + this.getId() + ", publisher=" + + this.getPublisher() + ")"; + } + } + + static class OneToManyStyleBook { + + @Id + String id; + + private String publisherId; + + public String getId() { + return this.id; + } + + public String getPublisherId() { + return this.publisherId; + } + + public void setId(String id) { + this.id = id; + } + + public void setPublisherId(String publisherId) { + this.publisherId = publisherId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + OneToManyStyleBook that = (OneToManyStyleBook) o; + return Objects.equals(id, that.id) && Objects.equals(publisherId, that.publisherId); + } + + @Override + public int hashCode() { + return Objects.hash(id, publisherId); + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.OneToManyStyleBook(id=" + this.getId() + ", publisherId=" + + this.getPublisherId() + ")"; + } + } + + static class OneToManyStylePublisher { + + @Id + String id; + + @ReadOnlyProperty + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") + List books; + + public String getId() { + return this.id; + } + + public List getBooks() { + return this.books; + } + + public void setId(String id) { + this.id = id; + } + + public void setBooks(List books) { + this.books = books; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.OneToManyStylePublisher(id=" + this.getId() + ", books=" + + this.getBooks() + ")"; + } + } + + static class WithRequiredArgsCtor { + + final String id; + + @DocumentReference final Publisher publisher; + + public WithRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + } + + static class WithLazyRequiredArgsCtor { + + final String id; + + @DocumentReference(lazy = true) final Publisher publisher; + + public WithLazyRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + + public String getId() { + return id; + } + + public Publisher getPublisher() { + return publisher; + } + } + + public static class WithListOfRefs { + + @Id private String id; + + @DocumentReference private List refs; + + public String getId() { + return this.id; + } + + public List getRefs() { + return this.refs; + } + + public void setId(String id) { + this.id = id; + } + + public void setRefs(List refs) { + this.refs = refs; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.WithListOfRefs(id=" + this.getId() + ", refs=" + this.getRefs() + ")"; + } + } + + static class OneToManyStylePublisherWithRequiredArgsCtor { + + @Id + String id; + + @ReadOnlyProperty + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") + List books; + + public OneToManyStylePublisherWithRequiredArgsCtor(String id, List books) { + this.id = id; + this.books = books; + } + + public String getId() { + return this.id; + } + + public List getBooks() { + return this.books; + } + + public void setId(String id) { + this.id = id; + } + + public void setBooks(List books) { + this.books = books; + } + + public String toString() { + return "MongoTemplateDocumentReferenceTests.OneToManyStylePublisherWithRequiredArgsCtor(id=" + this.getId() + ", book=" + + this.getBooks() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateFieldProjectionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateFieldProjectionTests.java new file mode 100644 index 0000000000..1cbb5ab519 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateFieldProjectionTests.java @@ -0,0 +1,268 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Objects; +import java.util.function.Consumer; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression; +import org.springframework.data.mongodb.core.aggregation.StringOperators; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link org.springframework.data.mongodb.core.query.Field}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Giacomo Baso + */ +@ExtendWith(MongoTemplateExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.4") +class MongoTemplateFieldProjectionTests { + + private static @Template MongoTestTemplate template; + + private Person luke; + + @BeforeEach + void beforeEach() { + + luke = new Person(); + luke.id = "luke"; + luke.firstname = "luke"; + luke.lastname = "skywalker"; + + template.save(luke); + } + + @AfterEach + void afterEach() { + template.flush(Person.class, Wrapper.class); + } + + @Test // GH-3583 + void usesMongoExpressionAsIs() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(MongoExpression.create("'$toUpper' : '$last_name'")) + .as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void usesMongoExpressionWithPlaceholdersAsIs() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(MongoExpression.create("'$toUpper' : '$?0'", "last_name")) + .as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-4821 + void usesMongoExpressionWithLineBreaksAsIs() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(MongoExpression.create(""" + { + '$toUpper' : '$last_name' + } + """)) + .as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsAggregationExpressionToDomainType() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(StringOperators.valueOf("lastname").toUpper()).as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsAggregationSpELExpressionToDomainType() { + + Person result = findLuke(fields -> { + fields.include("firstname").project(AggregationSpELExpression.expressionOf("toUpper(lastname)")).as("last_name"); + }); + + assertThat(result).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsNestedPathAggregationExpressionToDomainType() { + + Wrapper wrapper = new Wrapper(); + wrapper.id = "wrapper"; + wrapper.person = luke; + + template.save(wrapper); + + Query query = Query.query(Criteria.where("id").is(wrapper.id)); + query.fields().include("person.firstname", "person.id") + .project(StringOperators.valueOf("person.lastname").toUpper()).as("person.last_name"); + + Wrapper result = template.findOne(query, Wrapper.class); + assertThat(result.person).isEqualTo(luke.upperCaseLastnameClone()); + } + + @Test // GH-3583 + void mapsProjectionOnUnwrapped() { + + luke.address = new Address(); + luke.address.planet = "tatoine"; + + template.save(luke); + + Person result = findLuke(fields -> { + fields.project(StringOperators.valueOf("address.planet").toUpper()).as("planet"); + }); + + assertThat(result.address.planet).isEqualTo("TATOINE"); + } + + private Person findLuke(Consumer projection) { + + Query query = Query.query(Criteria.where("id").is(luke.id)); + projection.accept(query.fields()); + return template.findOne(query, Person.class); + } + + static class Wrapper { + + @Id String id; + Person person; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Wrapper wrapper = (Wrapper) o; + return Objects.equals(id, wrapper.id) && Objects.equals(person, wrapper.person); + } + + @Override + public int hashCode() { + return Objects.hash(id, person); + } + + public String toString() { + return "MongoTemplateFieldProjectionTests.Wrapper(id=" + this.id + ", person=" + this.person + ")"; + } + } + + static class Person { + + @Id String id; + String firstname; + + @Field("last_name") // + String lastname; + + @Unwrapped.Nullable Address address; + + Person toUpperCaseLastnameClone(Person source) { + + Person target = new Person(); + target.id = source.id; + target.firstname = source.firstname; + target.lastname = source.lastname.toUpperCase(); + target.address = source.address; + + return target; + } + + Person upperCaseLastnameClone() { + return toUpperCaseLastnameClone(this); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(address, person.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, address); + } + + public String toString() { + return "MongoTemplateFieldProjectionTests.Person(id=" + this.id + ", firstname=" + this.firstname + ", lastname=" + + this.lastname + ", address=" + this.address + ")"; + } + } + + static class Address { + + String planet; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(planet, address.planet); + } + + @Override + public int hashCode() { + return Objects.hash(planet); + } + + public String toString() { + return "MongoTemplateFieldProjectionTests.Address(planet=" + this.planet + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java index fcb6948e19..75fbbd516a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateMappingTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,21 +16,29 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Arrays; +import java.util.Objects; import org.bson.Document; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.dao.DataAccessException; +import org.springframework.data.annotation.Id; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.geojson.Geometry; +import com.mongodb.client.model.geojson.MultiPolygon; +import com.mongodb.client.model.geojson.PolygonCoordinates; +import com.mongodb.client.model.geojson.Position; /** * Integration test for {@link MongoTemplate}. @@ -39,7 +47,7 @@ * @author Thomas Risberg * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:template-mapping.xml") public class MongoTemplateMappingTests { @@ -47,8 +55,6 @@ public class MongoTemplateMappingTests { @Autowired @Qualifier("mongoTemplate2") MongoTemplate template2; - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void setUp() { template1.dropCollection(template1.getCollectionName(Person.class)); @@ -68,6 +74,122 @@ public void insertsEntityCorrectly2() { checkPersonPersisted(template2); } + @Test // DATAMONGO-2357 + public void writesAndReadsEntityWithNativeMongoGeoJsonTypesCorrectly() { + + WithMongoGeoJson source = new WithMongoGeoJson(); + source.id = "id-2"; + source.multiPolygon = new MultiPolygon(Arrays.asList(new PolygonCoordinates(Arrays.asList(new Position(0, 0), + new Position(0, 1), new Position(1, 1), new Position(1, 0), new Position(0, 0))))); + + template1.save(source); + + assertThat(template1.findOne(query(where("id").is(source.id)), WithMongoGeoJson.class)).isEqualTo(source); + } + + @Test // DATAMONGO-2357 + public void writesAndReadsEntityWithOpenNativeMongoGeoJsonTypesCorrectly() { + + WithOpenMongoGeoJson source = new WithOpenMongoGeoJson(); + source.id = "id-2"; + source.geometry = new MultiPolygon(Arrays.asList(new PolygonCoordinates(Arrays.asList(new Position(0, 0), + new Position(0, 1), new Position(1, 1), new Position(1, 0), new Position(0, 0))))); + + template1.save(source); + + assertThat(template1.findOne(query(where("id").is(source.id)), WithOpenMongoGeoJson.class)).isEqualTo(source); + } + + static class WithMongoGeoJson { + + @Id String id; + MultiPolygon multiPolygon; + + public String getId() { + return this.id; + } + + public MultiPolygon getMultiPolygon() { + return this.multiPolygon; + } + + public void setId(String id) { + this.id = id; + } + + public void setMultiPolygon(MultiPolygon multiPolygon) { + this.multiPolygon = multiPolygon; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithMongoGeoJson that = (WithMongoGeoJson) o; + return Objects.equals(id, that.id) && Objects.equals(multiPolygon, that.multiPolygon); + } + + @Override + public int hashCode() { + return Objects.hash(id, multiPolygon); + } + + public String toString() { + return "MongoTemplateMappingTests.WithMongoGeoJson(id=" + this.getId() + ", multiPolygon=" + + this.getMultiPolygon() + ")"; + } + } + + static class WithOpenMongoGeoJson { + + @Id String id; + Geometry geometry; + + public WithOpenMongoGeoJson() {} + + public String getId() { + return this.id; + } + + public Geometry getGeometry() { + return this.geometry; + } + + public void setId(String id) { + this.id = id; + } + + public void setGeometry(Geometry geometry) { + this.geometry = geometry; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithOpenMongoGeoJson that = (WithOpenMongoGeoJson) o; + return Objects.equals(id, that.id) && Objects.equals(geometry, that.geometry); + } + + @Override + public int hashCode() { + return Objects.hash(id, geometry); + } + + public String toString() { + return "MongoTemplateMappingTests.WithOpenMongoGeoJson(id=" + this.getId() + ", geometry=" + this.getGeometry() + + ")"; + } + } + private void addAndRetrievePerson(MongoTemplate template) { Person person = new Person("Oliver"); person.setAge(25); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateReplaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateReplaceTests.java new file mode 100644 index 0000000000..6b8e158e55 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateReplaceTests.java @@ -0,0 +1,297 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.ReplaceOptions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.bson.BsonInt64; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; +import com.mongodb.client.result.UpdateResult; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateReplaceTests { + + static final String DB_NAME = "mongo-template-replace-tests"; + static final String RESTAURANT_COLLECTION = "restaurant"; + + static @Client MongoClient client; + private MongoTemplate template; + + @BeforeEach + void beforeEach() { + + template = new MongoTemplate(client, DB_NAME); + template.setEntityLifecycleEventsEnabled(false); + + initTestData(); + } + + @AfterEach() + void afterEach() { + clearTestData(); + } + + @Test // GH-4462 + void replacesExistingDocument() { + + UpdateResult result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant("Central Pork Cafe", "Manhattan")); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesFirstOnMoreThanOneMatch() { + + UpdateResult result = template + .replace(query(where("violations").exists(true)), new Restaurant("Central Pork Cafe", "Manhattan")); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDoc() { + + UpdateResult result = template.replace(query(where("r-name").is("Central Perk Cafe")), + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), + template.getCollectionName(Restaurant.class)); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDocMappingQueryAgainstDomainType() { + + UpdateResult result = template.replace(query(where("name").is("Central Perk Cafe")), Restaurant.class, + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), ReplaceOptions.none(), template.getCollectionName(Restaurant.class)); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithMatchingId() { + + UpdateResult result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(1L, "Central Pork Cafe", "Manhattan", 0)); + + assertThat(result.getMatchedCount()).isEqualTo(1); + assertThat(result.getModifiedCount()).isEqualTo(1); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()); + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + } + + @Test // GH-4462 + void replacesExistingDocumentWithNewIdThrowsDataIntegrityViolationException() { + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(4L, "Central Pork Cafe", "Manhattan", 0))); + } + + @Test // GH-4462 + void doesNothingIfNoMatchFoundAndUpsertSetToFalse/* by default */() { + + UpdateResult result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(null, "Pizza Rat's Pizzaria", "Manhattan", 8)); + + assertThat(result.getMatchedCount()).isEqualTo(0); + assertThat(result.getModifiedCount()).isEqualTo(0); + + Document document = retrieve(collection -> collection.find(Filters.eq("r-name", "Pizza Rat's Pizzaria")).first()); + assertThat(document).isNull(); + } + + @Test // GH-4462 + void insertsIfNoMatchFoundAndUpsertSetToTrue() { + + UpdateResult result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(4L, "Pizza Rat's Pizzaria", "Manhattan", 8), replaceOptions().upsert()); + + assertThat(result.getMatchedCount()).isEqualTo(0); + assertThat(result.getModifiedCount()).isEqualTo(0); + assertThat(result.getUpsertedId()).isEqualTo(new BsonInt64(4L)); + + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 4)).first()); + assertThat(document).containsEntry("r-name", "Pizza Rat's Pizzaria"); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + void replaceConsidersSort() { + + UpdateResult result = template.replace(new Query().with(Sort.by(Direction.DESC, "name")), new Restaurant("resist", "Manhattan")); + + assertThat(result.getModifiedCount()).isOne(); + Document document = retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()); + assertThat(document).containsEntry("r-name", "resist"); + } + + void initTestData() { + + List testData = Stream.of( // + "{ '_id' : 1, 'r-name' : 'Central Perk Cafe', 'Borough' : 'Manhattan' }", + "{ '_id' : 2, 'r-name' : 'Rock A Feller Bar and Grill', 'Borough' : 'Queens', 'violations' : 2 }", + "{ '_id' : 3, 'r-name' : 'Empire State Pub', 'Borough' : 'Brooklyn', 'violations' : 0 }") // + .map(Document::parse).collect(Collectors.toList()); + + doInCollection(collection -> collection.insertMany(testData)); + } + + void clearTestData() { + doInCollection(collection -> collection.deleteMany(new Document())); + } + + void doInCollection(Consumer> consumer) { + retrieve(collection -> { + consumer.accept(collection); + return "done"; + }); + } + + T retrieve(Function, T> fkt) { + return fkt.apply(client.getDatabase(DB_NAME).getCollection(RESTAURANT_COLLECTION)); + } + + @org.springframework.data.mongodb.core.mapping.Document(RESTAURANT_COLLECTION) + static class Restaurant { + + Long id; + + @Field("r-name") String name; + String borough; + Integer violations; + + Restaurant() {} + + Restaurant(String name, String borough) { + + this.name = name; + this.borough = borough; + } + + Restaurant(Long id, String name, String borough, Integer violations) { + + this.id = id; + this.name = name; + this.borough = borough; + this.violations = violations; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getRName() { + return name; + } + + public void setRName(String rName) { + this.name = rName; + } + + public String getBorough() { + return borough; + } + + public void setBorough(String borough) { + this.borough = borough; + } + + public int getViolations() { + return violations; + } + + public void setViolations(int violations) { + this.violations = violations; + } + + @Override + public String toString() { + return "Restaurant{" + "id=" + id + ", name='" + name + '\'' + ", borough='" + borough + '\'' + ", violations=" + + violations + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Restaurant that = (Restaurant) o; + return violations == that.violations && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(borough, that.borough); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, borough, violations); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateScrollTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateScrollTests.java new file mode 100644 index 0000000000..766929c732 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateScrollTests.java @@ -0,0 +1,582 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.lang.reflect.Proxy; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.data.annotation.PersistenceCreator; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.MongoTemplateTests.PersonWithIdPropertyOfTypeUUIDListener; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for {@link org.springframework.data.domain.Window} queries. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +class MongoTemplateScrollTests { + + static @Client MongoClient client; + + public static final String DB_NAME = "mongo-template-scroll-tests"; + + ConfigurableApplicationContext context = new GenericApplicationContext(); + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); + + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + it.addEventListener(new PersonWithIdPropertyOfTypeUUIDListener()); + }); + + cfg.configureAuditing(it -> { + it.auditingHandler(ctx -> { + return new IsNewAwareAuditingHandler(PersistentEntities.of(ctx)); + }); + }); + }); + + private static int compareProxies(PersonInterfaceProjection actual, PersonInterfaceProjection expected) { + if (actual.getAge() != expected.getAge()) { + return -1; + } + if (!ObjectUtils.nullSafeEquals(actual.getFirstName(), expected.getFirstName())) { + return -1; + } + + return 0; + } + + @BeforeEach + void setUp() { + template.remove(Person.class).all(); + template.remove(WithNestedDocument.class).all(); + template.remove(WithRenamedField.class).all(); + } + + @Test // GH-4308 + void shouldUseKeysetScrollingWithNestedSort() { + + WithNestedDocument john20 = new WithNestedDocument(null, "John", 120, new WithNestedDocument("John", 20), + new Document("name", "bar")); + WithNestedDocument john40 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 40), + new Document("name", "baz")); + WithNestedDocument john41 = new WithNestedDocument(null, "John", 141, new WithNestedDocument("John", 41), + new Document("name", "foo")); + + template.insertAll(Arrays.asList(john20, john40, john41)); + + Query q = new Query(where("name").regex("J.*")).with(Sort.by("nested.name", "nested.age", "document.name")) + .limit(2); + q.with(ScrollPosition.keyset()); + + Window window = template.scroll(q, WithNestedDocument.class); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(john20, john40); + + window = template.scroll(q.with(window.positionAt(window.size() - 1)), WithNestedDocument.class); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsOnly(john41); + } + + @Test // GH-4308 + void shouldErrorOnNullValueForQuery() { + + WithNestedDocument john20 = new WithNestedDocument(null, "John", 120, new WithNestedDocument("John", 20), + new Document("name", "bar")); + WithNestedDocument john40 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john41 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john42 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john43 = new WithNestedDocument(null, "John", 140, new WithNestedDocument("John", 41), + new Document()); + WithNestedDocument john44 = new WithNestedDocument(null, "John", 141, new WithNestedDocument("John", 41), + new Document("name", "foo")); + + template.insertAll(Arrays.asList(john20, john40, john41, john42, john43, john44)); + } + + @Test // GH-4308 + void shouldAllowReverseSort() { + + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)); + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")); + q.with(ScrollPosition.keyset()).limit(6); + + Window window = template.scroll(q, Person.class); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(6); + + KeysetScrollPosition scrollPosition = (KeysetScrollPosition) window.positionAt(window.size() - 2); + window = template.scroll(q.with(scrollPosition.backward()).limit(2), Person.class); + + assertThat(window).hasSize(2); + assertThat(window).containsOnly(jane_42, john20); + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + + window = template.scroll(q.with(window.positionAt(0)).limit(2), Person.class); + + assertThat(window).hasSize(2); + assertThat(window).containsOnly(jane_20, jane_40); + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + } + + @Test // GH-4413 + void shouldAllowInitialBackwardSort() { + + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)); + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")); + q.with(ScrollPosition.keyset().backward()).limit(3); + + Window window = template.scroll(q, Person.class); + assertThat(window).containsExactly(john20, john40_1, john40_2); + + window = template.scroll(q.with(window.positionAt(0)).limit(3), Person.class); + assertThat(window).containsExactly(jane_20, jane_40, jane_42); + } + + @ParameterizedTest // GH-4308 + @MethodSource("positions") + public void shouldApplyCursoringCorrectly(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter, @Nullable Comparator comparator) { + + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)); + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")).limit(2); + + Window window = template.query(Person.class).inCollection("person").as(resultType).matching(q) + .scroll(scrollPosition); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertWindow(window, comparator).containsOnly(assertionConverter.apply(jane_20), assertionConverter.apply(jane_40)); + + window = template.query(Person.class).inCollection("person").as(resultType).matching(q.limit(3)) + .scroll(window.positionAt(window.size() - 1)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(3); + assertWindow(window, comparator).contains(assertionConverter.apply(jane_42), assertionConverter.apply(john20)); + assertWindow(window, comparator).containsAnyOf(assertionConverter.apply(john40_1), + assertionConverter.apply(john40_2)); + + window = template.query(Person.class).inCollection("person").as(resultType).matching(q.limit(1)) + .scroll(window.positionAt(window.size() - 1)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertWindow(window, comparator).containsAnyOf(assertionConverter.apply(john40_1), + assertionConverter.apply(john40_2)); + } + + @ParameterizedTest // GH-4308 + @MethodSource("renamedFieldProjectTargets") + void scrollThroughResultsWithRenamedField(Class resultType, Function assertionConverter) { + + WithRenamedField one = new WithRenamedField("id-1", "v1", null); + WithRenamedField two = new WithRenamedField("id-2", "v2", null); + WithRenamedField three = new WithRenamedField("id-3", "v3", null); + + template.insertAll(Arrays.asList(one, two, three)); + + Query q = new Query(where("value").regex("v.*")).with(Sort.by(Sort.Direction.DESC, "value")).limit(2); + q.with(ScrollPosition.keyset()); + + Window window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(ScrollPosition.keyset()); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(assertionConverter.apply(three), assertionConverter.apply(two)); + + window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(window.positionAt(window.size() - 1)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsOnly(assertionConverter.apply(one)); + } + + static Stream positions() { + + return Stream.of(args(ScrollPosition.keyset(), Person.class, Function.identity()), // + args(ScrollPosition.keyset(), Document.class, MongoTemplateScrollTests::toDocument), // + args(ScrollPosition.offset(), Person.class, Function.identity()), // + args(ScrollPosition.offset(), PersonDtoProjection.class, MongoTemplateScrollTests::toPersonDtoProjection), // + args(ScrollPosition.offset(), PersonInterfaceProjection.class, + MongoTemplateScrollTests::toPersonInterfaceProjection, MongoTemplateScrollTests::compareProxies)); + } + + static Stream renamedFieldProjectTargets() { + return Stream.of(Arguments.of(WithRenamedField.class, Function.identity()), + Arguments.of(Document.class, new Function() { + @Override + public Document apply(WithRenamedField withRenamedField) { + return new Document("_id", withRenamedField.getId()).append("_val", withRenamedField.getValue()) + .append("_class", WithRenamedField.class.getName()); + } + })); + } + + static org.assertj.core.api.IterableAssert assertWindow(Window window, @Nullable Comparator comparator) { + return comparator != null ? assertThat(window).usingElementComparator(comparator) : assertThat(window); + } + + private static Arguments args(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter) { + return args(scrollPosition, resultType, assertionConverter, null); + } + + private static Arguments args(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter, @Nullable Comparator comparator) { + return Arguments.of(scrollPosition, resultType, assertionConverter, comparator); + } + + static Document toDocument(Person person) { + + return new Document("_class", person.getClass().getName()).append("_id", person.getId()).append("active", true) + .append("firstName", person.getFirstName()).append("age", person.getAge()); + } + + static PersonDtoProjection toPersonDtoProjection(Person person) { + + PersonDtoProjection dto = new PersonDtoProjection(); + dto.firstName = person.getFirstName(); + dto.age = person.getAge(); + return dto; + } + + static PersonInterfaceProjection toPersonInterfaceProjection(Person person) { + + return new PersonInterfaceProjectionImpl(person); + } + + static class PersonDtoProjection { + + String firstName; + int age; + + public String getFirstName() { + return this.firstName; + } + + public int getAge() { + return this.age; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonDtoProjection that = (PersonDtoProjection) o; + return age == that.age && Objects.equals(firstName, that.firstName); + } + + @Override + public int hashCode() { + return Objects.hash(firstName, age); + } + + public String toString() { + return "MongoTemplateScrollTests.PersonDtoProjection(firstName=" + this.getFirstName() + ", age=" + this.getAge() + + ")"; + } + } + + interface PersonInterfaceProjection { + String getFirstName(); + + int getAge(); + } + + static class PersonInterfaceProjectionImpl implements PersonInterfaceProjection { + + final Person delegate; + + public PersonInterfaceProjectionImpl(Person delegate) { + this.delegate = delegate; + } + + @Override + public String getFirstName() { + return delegate.getFirstName(); + } + + @Override + public int getAge() { + return delegate.getAge(); + } + + @Override + public boolean equals(Object o) { + if (o instanceof Proxy) { + return true; + } + return false; + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(delegate); + } + } + + static class WithRenamedField { + + String id; + + @Field("_val") String value; + + WithRenamedField nested; + + public WithRenamedField(String id, String value, WithRenamedField nested) { + this.id = id; + this.value = value; + this.nested = nested; + } + + public WithRenamedField() {} + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public WithRenamedField getNested() { + return this.nested; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setNested(WithRenamedField nested) { + this.nested = nested; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithRenamedField that = (WithRenamedField) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value) && Objects.equals(nested, that.nested); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, nested); + } + + public String toString() { + return "MongoTemplateScrollTests.WithRenamedField(id=" + this.getId() + ", value=" + this.getValue() + ", nested=" + + this.getNested() + ")"; + } + } + + class WithNestedDocument { + + String id; + + String name; + + int age; + + WithNestedDocument nested; + + Document document; + + public WithNestedDocument(String name, int age) { + this.name = name; + this.age = age; + } + + @PersistenceCreator + public WithNestedDocument(String id, String name, int age, WithNestedDocument nested, Document document) { + this.id = id; + this.name = name; + this.age = age; + this.nested = nested; + this.document = document; + } + + public WithNestedDocument() {} + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public int getAge() { + return this.age; + } + + public WithNestedDocument getNested() { + return this.nested; + } + + public Document getDocument() { + return this.document; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setAge(int age) { + this.age = age; + } + + public void setNested(WithNestedDocument nested) { + this.nested = nested; + } + + public void setDocument(Document document) { + this.document = document; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithNestedDocument that = (WithNestedDocument) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(nested, that.nested) && Objects.equals(document, that.document); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, age, nested, document); + } + + public String toString() { + return "MongoTemplateScrollTests.WithNestedDocument(id=" + this.getId() + ", name=" + this.getName() + ", age=" + + this.getAge() + ", nested=" + this.getNested() + ", document=" + this.getDocument() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index 9aa7990b5f..6aaec4011e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,92 +15,101 @@ */ package org.springframework.data.mongodb.core; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; -import static org.hamcrest.Matchers.*; -import static org.hamcrest.Matchers.not; -import static org.junit.Assert.*; -import static org.junit.Assert.assertThat; -import static org.junit.Assume.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.core.query.Update.*; -import lombok.Data; -import lombok.EqualsAndHashCode; -import lombok.NoArgsConstructor; - import java.lang.reflect.InvocationTargetException; import java.math.BigDecimal; import java.math.BigInteger; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; import java.util.*; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; +import org.bson.Document; import org.bson.types.ObjectId; -import org.hamcrest.collection.IsMapContaining; -import org.joda.time.DateTime; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.convert.converter.Converter; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DuplicateKeyException; +import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.annotation.Version; -import org.springframework.data.convert.CustomConversions; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.context.PersistentEntities; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators; +import org.springframework.data.mongodb.core.aggregation.ObjectOperators; +import org.springframework.data.mongodb.core.aggregation.ReplaceWithOperation; +import org.springframework.data.mongodb.core.aggregation.StringOperators; import org.springframework.data.mongodb.core.convert.LazyLoadingProxy; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.data.mongodb.core.mapping.Field; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.data.mongodb.test.util.MongoVersion; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.mongodb.util.MongoClientVersion; -import org.springframework.data.util.CloseableIterator; +import org.springframework.lang.Nullable; import org.springframework.test.annotation.DirtiesContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.util.ClassUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; import com.mongodb.DBRef; -import com.mongodb.Mongo; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; import com.mongodb.client.FindIterable; import com.mongodb.client.ListIndexesIterable; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; @@ -117,103 +126,76 @@ * @author Mark Paluch * @author Laszlo Csontos * @author duozhilin + * @author Jakub Zurawa + * @author Florian Lüdiger */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoClientExtension.class) public class MongoTemplateTests { - @Autowired MongoTemplate template; - @Autowired MongoDbFactory factory; + public static final String DB_NAME = "mongo-template-tests"; - ConfigurableApplicationContext context; - MongoTemplate mappingTemplate; + static @Client MongoClient client; - @Rule public ExpectedException thrown = ExpectedException.none(); - @Rule public MongoVersionRule mongoVersion = MongoVersionRule.any(); + ConfigurableApplicationContext context = new GenericApplicationContext(); - @Autowired - public void setApplicationContext(ConfigurableApplicationContext context) { + MongoTestTemplate template = new MongoTestTemplate(cfg -> { - this.context = context; + cfg.configureDatabaseFactory(it -> { - context.addApplicationListener(new PersonWithIdPropertyOfTypeUUIDListener()); - } + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + it.initialEntitySet(AuditablePerson.class); + }); - @Autowired - public void setMongo(Mongo mongo) throws Exception { + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + it.addEventListener(new PersonWithIdPropertyOfTypeUUIDListener()); + }); - CustomConversions conversions = new MongoCustomConversions( - Arrays.asList(DateToDateTimeConverter.INSTANCE, DateTimeToDateConverter.INSTANCE)); + cfg.configureAuditing(it -> { + it.auditingHandler(ctx -> { + return new IsNewAwareAuditingHandler(PersistentEntities.of(ctx)); + }); + }); + }); - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(new HashSet>( - Arrays.asList(PersonWith_idPropertyOfTypeObjectId.class, PersonWith_idPropertyOfTypeString.class, - PersonWithIdPropertyOfTypeObjectId.class, PersonWithIdPropertyOfTypeString.class, - PersonWithIdPropertyOfTypeInteger.class, PersonWithIdPropertyOfTypeBigInteger.class, - PersonWithIdPropertyOfPrimitiveInt.class, PersonWithIdPropertyOfTypeLong.class, - PersonWithIdPropertyOfPrimitiveLong.class, PersonWithIdPropertyOfTypeUUID.class))); - mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); - mappingContext.initialize(); + MongoTestTemplate mappingTemplate = new MongoTestTemplate(cfg -> { - DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); - MappingMongoConverter mappingConverter = new MappingMongoConverter(dbRefResolver, mappingContext); - mappingConverter.setCustomConversions(conversions); - mappingConverter.afterPropertiesSet(); + cfg.configureDatabaseFactory(it -> { - this.mappingTemplate = new MongoTemplate(factory, mappingConverter); - } + it.client(client); + it.defaultDb(DB_NAME); + }); - @Before - public void setUp() { + cfg.configureConversion(it -> { + it.customConverters(DateToDateTimeConverter.INSTANCE, DateTimeToDateConverter.INSTANCE); + }); - cleanDb(); + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); - this.mappingTemplate.setApplicationContext(context); - } + cfg.configureApplicationContext(it -> { + it.applicationContext(new GenericApplicationContext()); + it.addEventListener(new PersonWithIdPropertyOfTypeUUIDListener()); + }); + }); + + MongoDatabaseFactory factory = template.getMongoDatabaseFactory(); - @After + @AfterEach public void cleanUp() { - cleanDb(); - } - protected void cleanDb() { + template.flush(); + template.flush("collection", "personX", "findandreplace"); + + mappingTemplate.flush(); + template.dropCollection(Person.class); - template.dropCollection(PersonWithAList.class); - template.dropCollection(PersonWith_idPropertyOfTypeObjectId.class); - template.dropCollection(PersonWith_idPropertyOfTypeString.class); - template.dropCollection(PersonWithIdPropertyOfTypeObjectId.class); - template.dropCollection(PersonWithIdPropertyOfTypeString.class); - template.dropCollection(PersonWithIdPropertyOfTypeInteger.class); - template.dropCollection(PersonWithIdPropertyOfTypeBigInteger.class); - template.dropCollection(PersonWithIdPropertyOfPrimitiveInt.class); - template.dropCollection(PersonWithIdPropertyOfTypeLong.class); - template.dropCollection(PersonWithIdPropertyOfPrimitiveLong.class); - template.dropCollection(PersonWithIdPropertyOfTypeUUID.class); - template.dropCollection(PersonWithVersionPropertyOfTypeInteger.class); - template.dropCollection(TestClass.class); - template.dropCollection(Sample.class); - template.dropCollection(MyPerson.class); - template.dropCollection(TypeWithFieldAnnotation.class); - template.dropCollection(TypeWithDate.class); - template.dropCollection("collection"); - template.dropCollection("personX"); - template.dropCollection(Document.class); - template.dropCollection(ObjectWith3AliasedFields.class); - template.dropCollection(ObjectWith3AliasedFieldsAndNestedAddress.class); - template.dropCollection(BaseDoc.class); - template.dropCollection(ObjectWithEnumValue.class); - template.dropCollection(DocumentWithCollection.class); - template.dropCollection(DocumentWithCollectionOfSimpleType.class); - template.dropCollection(DocumentWithMultipleCollections.class); - template.dropCollection(DocumentWithNestedCollection.class); - template.dropCollection(DocumentWithEmbeddedDocumentWithCollection.class); - template.dropCollection(DocumentWithNestedList.class); - template.dropCollection(DocumentWithDBRefCollection.class); - template.dropCollection(SomeContent.class); - template.dropCollection(SomeTemplate.class); - template.dropCollection(Address.class); - template.dropCollection(DocumentWithCollectionOfSamples.class); - template.dropCollection(WithGeoJson.class); } @Test @@ -224,8 +206,8 @@ public void insertsSimpleEntityCorrectly() throws Exception { template.insert(person); List result = template.find(new Query(Criteria.where("_id").is(person.getId())), Person.class); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(person)); + assertThat(result.size()).isEqualTo(1); + assertThat(result).contains(person); } @Test @@ -256,9 +238,9 @@ public void throwsExceptionForDuplicateIds() { try { template.insert(person); - fail("Expected DataIntegrityViolationException!"); + fail("Expected DataIntegrityViolationException"); } catch (DataIntegrityViolationException e) { - assertThat(e.getMessage(), containsString("E11000 duplicate key error")); + assertThat(e.getMessage()).contains("E11000 duplicate key error"); } } @@ -274,14 +256,12 @@ public void throwsExceptionForUpdateWithInvalidPushOperator() { template.insert(person); - thrown.expect(DataIntegrityViolationException.class); - thrown.expectMessage("array"); - thrown.expectMessage("age"); - // thrown.expectMessage("failed"); - Query query = new Query(Criteria.where("firstName").is("Amol")); Update upd = new Update().push("age", 29); - template.updateFirst(query, upd, Person.class); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.updateFirst(query, upd, Person.class)).withMessageContaining("array") + .withMessageContaining("age"); } @Test // DATAMONGO-480 @@ -301,18 +281,15 @@ public void throwsExceptionForIndexViolationIfConfigured() { try { template.save(person); - fail("Expected DataIntegrityViolationException!"); + fail("Expected DataIntegrityViolationException"); } catch (DataIntegrityViolationException e) { - assertThat(e.getMessage(), containsString("E11000 duplicate key error")); + assertThat(e.getMessage()).contains("E11000 duplicate key error"); } } @Test // DATAMONGO-480 public void rejectsDuplicateIdInInsertAll() { - thrown.expect(DataIntegrityViolationException.class); - thrown.expectMessage("E11000 duplicate key error"); - MongoTemplate template = new MongoTemplate(factory); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); @@ -320,11 +297,12 @@ public void rejectsDuplicateIdInInsertAll() { Person person = new Person(id, "Amol"); person.setAge(28); - List records = new ArrayList(); + List records = new ArrayList<>(); records.add(person); records.add(person); - template.insertAll(records); + assertThatExceptionOfType(DataIntegrityViolationException.class).isThrownBy(() -> template.insertAll(records)) + .withMessageContaining("E11000 duplicate key error"); } @Test // DATAMONGO-1687 @@ -334,7 +312,7 @@ public void createCappedCollection() { org.bson.Document collectionOptions = getCollectionInfo(template.getCollectionName(Person.class)).get("options", org.bson.Document.class); - assertThat(collectionOptions.get("capped"), is(true)); + assertThat(collectionOptions.get("capped")).isEqualTo(true); } private org.bson.Document getCollectionInfo(String collectionName) { @@ -348,7 +326,6 @@ private org.bson.Document getCollectionInfo(String collectionName) { } @Test - @SuppressWarnings("deprecation") public void testEnsureIndex() throws Exception { Person p1 = new Person("Oliver"); @@ -361,10 +338,10 @@ public void testEnsureIndex() throws Exception { template.indexOps(Person.class).ensureIndex(new Index().on("age", Direction.DESC).unique()); MongoCollection coll = template.getCollection(template.getCollectionName(Person.class)); - List indexInfo = new ArrayList(); + List indexInfo = new ArrayList<>(); coll.listIndexes().into(indexInfo); - assertThat(indexInfo.size(), is(2)); + assertThat(indexInfo.size()).isEqualTo(2); Object indexKey = null; boolean unique = false; for (org.bson.Document ix : indexInfo) { @@ -372,35 +349,34 @@ public void testEnsureIndex() throws Exception { if ("age_-1".equals(ix.get("name"))) { indexKey = ix.get("key"); unique = (Boolean) ix.get("unique"); - assertThat(ix.get("dropDups"), is(nullValue())); + assertThat(ix.get("dropDups")).isNull(); } } - assertThat(((org.bson.Document) indexKey), IsMapContaining. hasEntry("age", -1)); - assertThat(unique, is(true)); + assertThat(((org.bson.Document) indexKey)).containsEntry("age", -1); + assertThat(unique).isTrue(); List indexInfoList = template.indexOps(Person.class).getIndexInfo(); - assertThat(indexInfoList.size(), is(2)); + assertThat(indexInfoList.size()).isEqualTo(2); IndexInfo ii = indexInfoList.get(1); - assertThat(ii.isUnique(), is(true)); - assertThat(ii.isSparse(), is(false)); + assertThat(ii.isUnique()).isTrue(); + assertThat(ii.isSparse()).isFalse(); List indexFields = ii.getIndexFields(); IndexField field = indexFields.get(0); - assertThat(field, is(IndexField.create("age", Direction.DESC))); + assertThat(field).isEqualTo(IndexField.create("age", Direction.DESC)); } - @Test // DATAMONGO-746 + @Test // DATAMONGO-746, DATAMONGO-2264 public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() throws Exception { - String command = "db." + template.getCollectionName(Person.class) - + ".createIndex({'age':-1}, {'unique':true, 'sparse':true}), 1"; - template.indexOps(Person.class).dropAllIndexes(); + template.dropCollection(Person.class); - assertThat(template.indexOps(Person.class).getIndexInfo().isEmpty(), is(true)); + assertThat(template.indexOps(Person.class).getIndexInfo().isEmpty()).isTrue(); - factory.getDb().runCommand(new org.bson.Document("eval", command)); + factory.getMongoDatabase().getCollection(template.getCollectionName(Person.class)) + .createIndex(new org.bson.Document("age", -1), new IndexOptions().name("age_-1").unique(true).sparse(true)); ListIndexesIterable indexInfo = template.getCollection(template.getCollectionName(Person.class)) .listIndexes(); @@ -419,17 +395,17 @@ public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() throws Exc } } - assertThat(indexKey, IsMapContaining. hasEntry("age", -1D)); - assertThat(unique, is(true)); + assertThat(indexKey).containsEntry("age", -1); + assertThat(unique).isTrue(); IndexInfo info = template.indexOps(Person.class).getIndexInfo().get(1); - assertThat(info.isUnique(), is(true)); - assertThat(info.isSparse(), is(true)); + assertThat(info.isUnique()).isTrue(); + assertThat(info.isSparse()).isTrue(); List indexFields = info.getIndexFields(); IndexField field = indexFields.get(0); - assertThat(field, is(IndexField.create("age", Direction.DESC))); + assertThat(field).isEqualTo(IndexField.create("age", Direction.DESC)); } @Test @@ -447,11 +423,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p1); // also try save mongoTemplate.save(p1); - assertThat(p1.getId(), notNullValue()); + assertThat(p1.getId()).isNotNull(); PersonWithIdPropertyOfTypeString p1q = mongoTemplate.findOne(new Query(where("id").is(p1.getId())), PersonWithIdPropertyOfTypeString.class); - assertThat(p1q, notNullValue()); - assertThat(p1q.getId(), is(p1.getId())); + assertThat(p1q).isNotNull(); + assertThat(p1q.getId()).isEqualTo(p1.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeString.class, 1); // String id - provided @@ -463,11 +439,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p2); // also try save mongoTemplate.save(p2); - assertThat(p2.getId(), notNullValue()); + assertThat(p2.getId()).isNotNull(); PersonWithIdPropertyOfTypeString p2q = mongoTemplate.findOne(new Query(where("id").is(p2.getId())), PersonWithIdPropertyOfTypeString.class); - assertThat(p2q, notNullValue()); - assertThat(p2q.getId(), is(p2.getId())); + assertThat(p2q).isNotNull(); + assertThat(p2q.getId()).isEqualTo(p2.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeString.class, 2); // String _id - generated @@ -478,11 +454,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p3); // also try save mongoTemplate.save(p3); - assertThat(p3.get_id(), notNullValue()); + assertThat(p3.get_id()).isNotNull(); PersonWith_idPropertyOfTypeString p3q = mongoTemplate.findOne(new Query(where("_id").is(p3.get_id())), PersonWith_idPropertyOfTypeString.class); - assertThat(p3q, notNullValue()); - assertThat(p3q.get_id(), is(p3.get_id())); + assertThat(p3q).isNotNull(); + assertThat(p3q.get_id()).isEqualTo(p3.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeString.class, 1); // String _id - provided @@ -494,11 +470,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p4); // also try save mongoTemplate.save(p4); - assertThat(p4.get_id(), notNullValue()); + assertThat(p4.get_id()).isNotNull(); PersonWith_idPropertyOfTypeString p4q = mongoTemplate.findOne(new Query(where("_id").is(p4.get_id())), PersonWith_idPropertyOfTypeString.class); - assertThat(p4q, notNullValue()); - assertThat(p4q.get_id(), is(p4.get_id())); + assertThat(p4q).isNotNull(); + assertThat(p4q.get_id()).isEqualTo(p4.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeString.class, 2); // ObjectId id - generated @@ -509,11 +485,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p5); // also try save mongoTemplate.save(p5); - assertThat(p5.getId(), notNullValue()); + assertThat(p5.getId()).isNotNull(); PersonWithIdPropertyOfTypeObjectId p5q = mongoTemplate.findOne(new Query(where("id").is(p5.getId())), PersonWithIdPropertyOfTypeObjectId.class); - assertThat(p5q, notNullValue()); - assertThat(p5q.getId(), is(p5.getId())); + assertThat(p5q).isNotNull(); + assertThat(p5q.getId()).isEqualTo(p5.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeObjectId.class, 1); // ObjectId id - provided @@ -525,11 +501,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p6); // also try save mongoTemplate.save(p6); - assertThat(p6.getId(), notNullValue()); + assertThat(p6.getId()).isNotNull(); PersonWithIdPropertyOfTypeObjectId p6q = mongoTemplate.findOne(new Query(where("id").is(p6.getId())), PersonWithIdPropertyOfTypeObjectId.class); - assertThat(p6q, notNullValue()); - assertThat(p6q.getId(), is(p6.getId())); + assertThat(p6q).isNotNull(); + assertThat(p6q.getId()).isEqualTo(p6.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeObjectId.class, 2); // ObjectId _id - generated @@ -540,11 +516,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p7); // also try save mongoTemplate.save(p7); - assertThat(p7.get_id(), notNullValue()); + assertThat(p7.get_id()).isNotNull(); PersonWith_idPropertyOfTypeObjectId p7q = mongoTemplate.findOne(new Query(where("_id").is(p7.get_id())), PersonWith_idPropertyOfTypeObjectId.class); - assertThat(p7q, notNullValue()); - assertThat(p7q.get_id(), is(p7.get_id())); + assertThat(p7q).isNotNull(); + assertThat(p7q.get_id()).isEqualTo(p7.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeObjectId.class, 1); // ObjectId _id - provided @@ -556,11 +532,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p8); // also try save mongoTemplate.save(p8); - assertThat(p8.get_id(), notNullValue()); + assertThat(p8.get_id()).isNotNull(); PersonWith_idPropertyOfTypeObjectId p8q = mongoTemplate.findOne(new Query(where("_id").is(p8.get_id())), PersonWith_idPropertyOfTypeObjectId.class); - assertThat(p8q, notNullValue()); - assertThat(p8q.get_id(), is(p8.get_id())); + assertThat(p8q).isNotNull(); + assertThat(p8q.get_id()).isEqualTo(p8.get_id()); checkCollectionContents(PersonWith_idPropertyOfTypeObjectId.class, 2); // Integer id - provided @@ -572,11 +548,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p9); // also try save mongoTemplate.save(p9); - assertThat(p9.getId(), notNullValue()); + assertThat(p9.getId()).isNotNull(); PersonWithIdPropertyOfTypeInteger p9q = mongoTemplate.findOne(new Query(where("id").in(p9.getId())), PersonWithIdPropertyOfTypeInteger.class); - assertThat(p9q, notNullValue()); - assertThat(p9q.getId(), is(p9.getId())); + assertThat(p9q).isNotNull(); + assertThat(p9q.getId()).isEqualTo(p9.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeInteger.class, 1); // DATAMONGO-602 @@ -589,11 +565,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p9bi); // also try save mongoTemplate.save(p9bi); - assertThat(p9bi.getId(), notNullValue()); + assertThat(p9bi.getId()).isNotNull(); PersonWithIdPropertyOfTypeBigInteger p9qbi = mongoTemplate.findOne(new Query(where("id").in(p9bi.getId())), PersonWithIdPropertyOfTypeBigInteger.class); - assertThat(p9qbi, notNullValue()); - assertThat(p9qbi.getId(), is(p9bi.getId())); + assertThat(p9qbi).isNotNull(); + assertThat(p9qbi.getId()).isEqualTo(p9bi.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeBigInteger.class, 1); // int id - provided @@ -605,11 +581,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p10); // also try save mongoTemplate.save(p10); - assertThat(p10.getId(), notNullValue()); + assertThat(p10.getId()).isNotNull(); PersonWithIdPropertyOfPrimitiveInt p10q = mongoTemplate.findOne(new Query(where("id").in(p10.getId())), PersonWithIdPropertyOfPrimitiveInt.class); - assertThat(p10q, notNullValue()); - assertThat(p10q.getId(), is(p10.getId())); + assertThat(p10q).isNotNull(); + assertThat(p10q.getId()).isEqualTo(p10.getId()); checkCollectionContents(PersonWithIdPropertyOfPrimitiveInt.class, 1); // Long id - provided @@ -621,11 +597,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p11); // also try save mongoTemplate.save(p11); - assertThat(p11.getId(), notNullValue()); + assertThat(p11.getId()).isNotNull(); PersonWithIdPropertyOfTypeLong p11q = mongoTemplate.findOne(new Query(where("id").in(p11.getId())), PersonWithIdPropertyOfTypeLong.class); - assertThat(p11q, notNullValue()); - assertThat(p11q.getId(), is(p11.getId())); + assertThat(p11q).isNotNull(); + assertThat(p11q.getId()).isEqualTo(p11.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeLong.class, 1); // long id - provided @@ -637,11 +613,11 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p12); // also try save mongoTemplate.save(p12); - assertThat(p12.getId(), notNullValue()); + assertThat(p12.getId()).isNotNull(); PersonWithIdPropertyOfPrimitiveLong p12q = mongoTemplate.findOne(new Query(where("id").in(p12.getId())), PersonWithIdPropertyOfPrimitiveLong.class); - assertThat(p12q, notNullValue()); - assertThat(p12q.getId(), is(p12.getId())); + assertThat(p12q).isNotNull(); + assertThat(p12q.getId()).isEqualTo(p12.getId()); checkCollectionContents(PersonWithIdPropertyOfPrimitiveLong.class, 1); // DATAMONGO-1617 @@ -653,16 +629,16 @@ private void testProperHandlingOfDifferentIdTypes(MongoTemplate mongoTemplate) t mongoTemplate.insert(p13); // also try save mongoTemplate.save(p13); - assertThat(p13.getId(), notNullValue()); + assertThat(p13.getId()).isNotNull(); PersonWithIdPropertyOfTypeUUID p13q = mongoTemplate.findOne(new Query(where("id").in(p13.getId())), PersonWithIdPropertyOfTypeUUID.class); - assertThat(p13q, notNullValue()); - assertThat(p13q.getId(), is(p13.getId())); + assertThat(p13q).isNotNull(); + assertThat(p13q.getId()).isEqualTo(p13.getId()); checkCollectionContents(PersonWithIdPropertyOfTypeUUID.class, 1); } private void checkCollectionContents(Class entityClass, int count) { - assertThat(template.findAll(entityClass).size(), is(count)); + assertThat(template.findAll(entityClass).size()).isEqualTo(count); } @Test // DATAMONGO-234 @@ -675,28 +651,28 @@ public void testFindAndUpdate() { Query query = new Query(Criteria.where("firstName").is("Harry")); Update update = new Update().inc("age", 1); Person p = template.findAndModify(query, update, Person.class); // return old - assertThat(p.getFirstName(), is("Harry")); - assertThat(p.getAge(), is(23)); + assertThat(p.getFirstName()).isEqualTo("Harry"); + assertThat(p.getAge()).isEqualTo(23); p = template.findOne(query, Person.class); - assertThat(p.getAge(), is(24)); + assertThat(p.getAge()).isEqualTo(24); p = template.findAndModify(query, update, Person.class, "person"); - assertThat(p.getAge(), is(24)); + assertThat(p.getAge()).isEqualTo(24); p = template.findOne(query, Person.class); - assertThat(p.getAge(), is(25)); + assertThat(p.getAge()).isEqualTo(25); p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class); - assertThat(p.getAge(), is(26)); + assertThat(p.getAge()).isEqualTo(26); p = template.findAndModify(query, update, new FindAndModifyOptions(), Person.class, "person"); - assertThat(p.getAge(), is(26)); + assertThat(p.getAge()).isEqualTo(26); p = template.findOne(query, Person.class); - assertThat(p.getAge(), is(27)); + assertThat(p.getAge()).isEqualTo(27); Query query2 = new Query(Criteria.where("firstName").is("Mary")); p = template.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class); - assertThat(p.getFirstName(), is("Mary")); - assertThat(p.getAge(), is(1)); + assertThat(p.getFirstName()).isEqualTo("Mary"); + assertThat(p.getAge()).isEqualTo(1); } @@ -708,8 +684,8 @@ public void testFindAndUpdateUpsert() { Update update = new Update().set("age", 23); Person p = template.findAndModify(query, update, new FindAndModifyOptions().upsert(true).returnNew(true), Person.class); - assertThat(p.getFirstName(), is("Harry")); - assertThat(p.getAge(), is(23)); + assertThat(p.getFirstName()).isEqualTo("Harry"); + assertThat(p.getAge()).isEqualTo(23); } @Test @@ -725,9 +701,9 @@ public void testFindAndRemove() throws Exception { Message found2 = template.findAndRemove(q, Message.class); Message notFound = template.findAndRemove(q, Message.class); - assertThat(found1, notNullValue()); - assertThat(found2, notNullValue()); - assertThat(notFound, nullValue()); + assertThat(found1).isNotNull(); + assertThat(found2).isNotNull(); + assertThat(notFound).isNull(); } @Test // DATAMONGO-1761 @@ -757,7 +733,7 @@ public void testDistinct() { assertThat(template.findDistinct(new BasicQuery("{'address.state' : 'PA'}"), "name", MyPerson.class, String.class)) .containsExactlyInAnyOrder(person1.getName(), person2.getName()); assertThat(template.findDistinct(new BasicQuery("{'address.state' : 'PA'}"), "name", - template.determineCollectionName(MyPerson.class), MyPerson.class, String.class)) + template.getCollectionName(MyPerson.class), MyPerson.class, String.class)) .containsExactlyInAnyOrder(person1.getName(), person2.getName()); } @@ -766,8 +742,7 @@ public void testDistinctCovertsResultToPropertyTargetTypeCorrectly() { template.insert(new Person("garvin")); - assertThat(template.findDistinct("firstName", Person.class, Object.class)) - .allSatisfy(val -> instanceOf(String.class)); + assertThat(template.findDistinct("firstName", Person.class, Object.class)).allSatisfy(String.class::isInstance); } @Test // DATAMONGO-1761 @@ -827,9 +802,9 @@ public void testUsingAnInQueryWithObjectId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); Query q3 = new Query(Criteria.where("id").in(p3.getId())); List results3 = template.find(q3, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(1)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(1); } @Test @@ -860,9 +835,9 @@ public void testUsingAnInQueryWithStringId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfTypeString.class); Query q3 = new Query(Criteria.where("id").in(p3.getId(), p4.getId())); List results3 = template.find(q3, PersonWithIdPropertyOfTypeString.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test @@ -897,9 +872,9 @@ public void testUsingAnInQueryWithLongId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfTypeLong.class); Query q3 = new Query(Criteria.where("id").in(1001L, 1004L)); List results3 = template.find(q3, PersonWithIdPropertyOfTypeLong.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test // DATAMONGO-602 @@ -935,9 +910,9 @@ public void testUsingAnInQueryWithBigIntegerId() throws Exception { Query q3 = new Query(Criteria.where("id").in(new BigInteger("2666666666666666665069473312490162649510603601"), new BigInteger("2666666666666666665069473312490162649510603604"))); List results3 = template.find(q3, PersonWithIdPropertyOfTypeBigInteger.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test @@ -972,9 +947,9 @@ public void testUsingAnInQueryWithPrimitiveIntId() throws Exception { List results2 = template.find(q2, PersonWithIdPropertyOfPrimitiveInt.class); Query q3 = new Query(Criteria.where("id").in(1001, 1003)); List results3 = template.find(q3, PersonWithIdPropertyOfPrimitiveInt.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(2)); - assertThat(results3.size(), is(2)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(2); + assertThat(results3.size()).isEqualTo(2); } @Test @@ -999,7 +974,7 @@ public void testUsingInQueryWithList() throws Exception { p4.setAge(41); template.insert(p4); - List l1 = new ArrayList(); + List l1 = new ArrayList<>(); l1.add(11); l1.add(21); l1.add(41); @@ -1007,10 +982,10 @@ public void testUsingInQueryWithList() throws Exception { List results1 = template.find(q1, PersonWithIdPropertyOfTypeObjectId.class); Query q2 = new Query(Criteria.where("age").in(l1.toArray())); List results2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results1.size(), is(3)); - assertThat(results2.size(), is(3)); + assertThat(results1.size()).isEqualTo(3); + assertThat(results2.size()).isEqualTo(3); try { - List l2 = new ArrayList(); + List l2 = new ArrayList<>(); l2.add(31); Query q3 = new Query(Criteria.where("age").in(l1, l2)); template.find(q3, PersonWithIdPropertyOfTypeObjectId.class); @@ -1044,8 +1019,8 @@ public void testUsingRegexQueryWithOptions() throws Exception { List results1 = template.find(q1, PersonWithIdPropertyOfTypeObjectId.class); Query q2 = new Query(Criteria.where("firstName").regex("S.*", "i")); List results2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results1.size(), is(1)); - assertThat(results2.size(), is(2)); + assertThat(results1.size()).isEqualTo(1); + assertThat(results2.size()).isEqualTo(2); } @Test @@ -1072,9 +1047,9 @@ public void testUsingAnOrQuery() throws Exception { Query orQuery = new Query(new Criteria().orOperator(where("age").in(11, 21), where("age").is(31))); List results = template.find(orQuery, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(results.size(), is(3)); + assertThat(results.size()).isEqualTo(3); for (PersonWithIdPropertyOfTypeObjectId p : results) { - assertThat(p.getAge(), isOneOf(11, 21, 31)); + assertThat(p.getAge()).isIn(11, 21, 31); } } @@ -1097,18 +1072,18 @@ public void testUsingUpdateWithMultipleSet() throws Exception { UpdateResult wr = template.updateMulti(new Query(), u, PersonWithIdPropertyOfTypeObjectId.class); if (wr.wasAcknowledged()) { - assertThat(wr.getModifiedCount(), is(2L)); + assertThat(wr.getModifiedCount()).isEqualTo(2L); } Query q1 = new Query(Criteria.where("age").in(11, 21)); List r1 = template.find(q1, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(r1.size(), is(0)); + assertThat(r1.size()).isEqualTo(0); Query q2 = new Query(Criteria.where("age").is(10)); List r2 = template.find(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(r2.size(), is(2)); + assertThat(r2.size()).isEqualTo(2); for (PersonWithIdPropertyOfTypeObjectId p : r2) { - assertThat(p.getAge(), is(10)); - assertThat(p.getFirstName(), is("Bob")); + assertThat(p.getAge()).isEqualTo(10); + assertThat(p.getFirstName()).isEqualTo("Bob"); } } @@ -1122,11 +1097,11 @@ public void testRemovingDocument() throws Exception { Query q1 = new Query(Criteria.where("id").is(p1.getId())); PersonWithIdPropertyOfTypeObjectId found1 = template.findOne(q1, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(found1, notNullValue()); + assertThat(found1).isNotNull(); Query _q = new Query(Criteria.where("_id").is(p1.getId())); template.remove(_q, PersonWithIdPropertyOfTypeObjectId.class); PersonWithIdPropertyOfTypeObjectId notFound1 = template.findOne(q1, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(notFound1, nullValue()); + assertThat(notFound1).isNull(); PersonWithIdPropertyOfTypeObjectId p2 = new PersonWithIdPropertyOfTypeObjectId(); p2.setFirstName("Bubba_to_be_removed"); @@ -1135,10 +1110,10 @@ public void testRemovingDocument() throws Exception { Query q2 = new Query(Criteria.where("id").is(p2.getId())); PersonWithIdPropertyOfTypeObjectId found2 = template.findOne(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(found2, notNullValue()); + assertThat(found2).isNotNull(); template.remove(q2, PersonWithIdPropertyOfTypeObjectId.class); PersonWithIdPropertyOfTypeObjectId notFound2 = template.findOne(q2, PersonWithIdPropertyOfTypeObjectId.class); - assertThat(notFound2, nullValue()); + assertThat(notFound2).isNull(); } @Test @@ -1150,16 +1125,16 @@ public void testAddingToList() { Query q1 = new Query(Criteria.where("id").is(p.getId())); PersonWithAList p2 = template.findOne(q1, PersonWithAList.class); - assertThat(p2, notNullValue()); - assertThat(p2.getWishList().size(), is(0)); + assertThat(p2).isNotNull(); + assertThat(p2.getWishList().size()).isEqualTo(0); - p2.addToWishList("please work!"); + p2.addToWishList("please work"); template.save(p2); PersonWithAList p3 = template.findOne(q1, PersonWithAList.class); - assertThat(p3, notNullValue()); - assertThat(p3.getWishList().size(), is(1)); + assertThat(p3).isNotNull(); + assertThat(p3.getWishList().size()).isEqualTo(1); Friend f = new Friend(); p.setFirstName("Erik"); @@ -1169,9 +1144,9 @@ public void testAddingToList() { template.save(p3); PersonWithAList p4 = template.findOne(q1, PersonWithAList.class); - assertThat(p4, notNullValue()); - assertThat(p4.getWishList().size(), is(1)); - assertThat(p4.getFriends().size(), is(1)); + assertThat(p4).isNotNull(); + assertThat(p4.getWishList().size()).isEqualTo(1); + assertThat(p4.getFriends().size()).isEqualTo(1); } @@ -1196,10 +1171,10 @@ public void testFindOneWithSort() { Query q2 = new Query(Criteria.where("age").gt(10)); q2.with(Sort.by(Direction.DESC, "age")); PersonWithAList p5 = template.findOne(q2, PersonWithAList.class); - assertThat(p5.getFirstName(), is("Mark")); + assertThat(p5.getFirstName()).isEqualTo("Mark"); } - @Test + @Test // DATAMONGO-2572 public void testUsingReadPreference() throws Exception { this.template.execute("readPref", new CollectionCallback() { public Object doInCollection(MongoCollection collection) @@ -1210,21 +1185,21 @@ public Object doInCollection(MongoCollection collection) return null; } }); - MongoTemplate slaveTemplate = new MongoTemplate(factory); - slaveTemplate.setReadPreference(ReadPreference.secondary()); - slaveTemplate.execute("readPref", new CollectionCallback() { + MongoTemplate secondaryTemplate = new MongoTemplate(factory); + secondaryTemplate.setReadPreference(ReadPreference.secondary()); + secondaryTemplate.execute("readPref", new CollectionCallback() { public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - assertThat(collection.getReadPreference(), is(ReadPreference.secondary())); + assertThat(collection.getReadPreference()).isEqualTo(ReadPreference.secondary()); // assertThat(collection.getDB().getOptions(), is(0)); return null; } }); } - @Test(expected = IllegalArgumentException.class) // DATADOC-166, DATAMONGO-1762 + @Test // DATADOC-166, DATAMONGO-1762 public void removingNullIsANoOp() { - template.remove((Object) null); + assertThatIllegalArgumentException().isThrownBy(() -> template.remove((Object) null)); } @Test // DATADOC-240, DATADOC-212 @@ -1240,9 +1215,9 @@ public void updatesObjectIdsCorrectly() { PersonWithIdPropertyOfTypeObjectId result = template.findById(person.getId(), PersonWithIdPropertyOfTypeObjectId.class); - assertThat(result, is(notNullValue())); - assertThat(result.getId(), is(person.getId())); - assertThat(result.getFirstName(), is("Carter")); + assertThat(result).isNotNull(); + assertThat(result.getId()).isEqualTo(person.getId()); + assertThat(result.getFirstName()).isEqualTo("Carter"); } @Test @@ -1254,22 +1229,23 @@ public void testWriteConcernResolver() { template.setWriteConcern(WriteConcern.UNACKNOWLEDGED); template.save(person); - UpdateResult result = template.updateFirst(query(where("id").is(person.getId())), update("firstName", "Carter"), + template.updateFirst(query(where("id").is(person.getId())), update("firstName", "Carter"), PersonWithIdPropertyOfTypeObjectId.class); FsyncSafeWriteConcernResolver resolver = new FsyncSafeWriteConcernResolver(); template.setWriteConcernResolver(resolver); Query q = query(where("_id").is(person.getId())); Update u = update("firstName", "Carter"); - result = template.updateFirst(q, u, PersonWithIdPropertyOfTypeObjectId.class); + template.updateFirst(q, u, PersonWithIdPropertyOfTypeObjectId.class); MongoAction lastMongoAction = resolver.getMongoAction(); - assertThat(lastMongoAction.getCollectionName(), is("personWithIdPropertyOfTypeObjectId")); - assertThat(lastMongoAction.getDefaultWriteConcern(), equalTo(WriteConcern.UNACKNOWLEDGED)); - assertThat(lastMongoAction.getDocument(), notNullValue()); - assertThat(lastMongoAction.getEntityType().toString(), is(PersonWithIdPropertyOfTypeObjectId.class.toString())); - assertThat(lastMongoAction.getMongoActionOperation(), is(MongoActionOperation.UPDATE)); - assertThat(lastMongoAction.getQuery(), equalTo(q.getQueryObject())); + assertThat(lastMongoAction.getCollectionName()).isEqualTo("personWithIdPropertyOfTypeObjectId"); + assertThat(lastMongoAction.getDefaultWriteConcern()).isEqualTo(WriteConcern.UNACKNOWLEDGED); + assertThat(lastMongoAction.getDocument()).isNotNull(); + assertThat(lastMongoAction.getEntityType().toString()) + .isEqualTo(PersonWithIdPropertyOfTypeObjectId.class.toString()); + assertThat(lastMongoAction.getMongoActionOperation()).isEqualTo(MongoActionOperation.UPDATE); + assertThat(lastMongoAction.getQuery()).isEqualTo(q.getQueryObject()); } private class FsyncSafeWriteConcernResolver implements WriteConcernResolver { @@ -1278,7 +1254,7 @@ private class FsyncSafeWriteConcernResolver implements WriteConcernResolver { public WriteConcern resolve(MongoAction action) { this.mongoAction = action; - return WriteConcern.FSYNC_SAFE; + return WriteConcern.JOURNALED; } public MongoAction getMongoAction() { @@ -1301,10 +1277,11 @@ class ClassWithDBRefs { @Test // DATADOC-202 public void executeDocument() { + template.insert(new Person("Tom")); template.insert(new Person("Dick")); template.insert(new Person("Harry")); - final List names = new ArrayList(); + final List names = new ArrayList<>(); template.executeQuery(new Query(), template.getCollectionName(Person.class), new DocumentCallbackHandler() { public void processDocument(org.bson.Document document) { String name = (String) document.get("firstName"); @@ -1313,7 +1290,7 @@ public void processDocument(org.bson.Document document) { } } }); - assertEquals(3, names.size()); + assertThat(names.size()).isEqualTo(3); // template.remove(new Query(), Person.class); } @@ -1322,7 +1299,7 @@ public void executeDocumentWithCursorPreparer() { template.insert(new Person("Tom")); template.insert(new Person("Dick")); template.insert(new Person("Harry")); - final List names = new ArrayList(); + final List names = new ArrayList<>(); template.executeQuery(new Query(), template.getCollectionName(Person.class), new DocumentCallbackHandler() { public void processDocument(org.bson.Document document) { String name = (String) document.get("firstName"); @@ -1332,20 +1309,20 @@ public void processDocument(org.bson.Document document) { } }, new CursorPreparer() { - public FindIterable prepare(FindIterable cursor) { - cursor.limit(1); - return cursor; + public FindIterable prepare(FindIterable iterable) { + iterable.limit(1); + return iterable; } }); - assertEquals(1, names.size()); + assertThat(names.size()).isEqualTo(1); template.remove(new Query(), Person.class); } @Test // DATADOC-183 public void countsDocumentsCorrectly() { - assertThat(template.count(new Query(), Person.class), is(0L)); + assertThat(template.count(new Query(), Person.class)).isEqualTo(0L); Person dave = new Person("Dave"); Person carter = new Person("Carter"); @@ -1353,36 +1330,36 @@ public void countsDocumentsCorrectly() { template.save(dave); template.save(carter); - assertThat(template.count(new Query(), Person.class), is(2L)); - assertThat(template.count(query(where("firstName").is("Carter")), Person.class), is(1L)); + assertThat(template.count(new Query(), Person.class)).isEqualTo(2L); + assertThat(template.count(query(where("firstName").is("Carter")), Person.class)).isEqualTo(1L); } - @Test(expected = IllegalArgumentException.class) // DATADOC-183 + @Test // DATADOC-183 public void countRejectsNullEntityClass() { - template.count(null, (Class) null); + assertThatIllegalArgumentException().isThrownBy(() -> template.count(null, (Class) null)); } - @Test(expected = IllegalArgumentException.class) // DATADOC-183 + @Test // DATADOC-183 public void countRejectsEmptyCollectionName() { - template.count(null, ""); + assertThatIllegalArgumentException().isThrownBy(() -> template.count(null, "")); } - @Test(expected = IllegalArgumentException.class) // DATADOC-183 + @Test // DATADOC-183 public void countRejectsNullCollectionName() { - template.count(null, (String) null); + assertThatIllegalArgumentException().isThrownBy(() -> template.count(null, (String) null)); } @Test public void returnsEntityWhenQueryingForDateTime() { - DateTime dateTime = new DateTime(2011, 3, 3, 12, 0, 0, 0); + LocalDateTime dateTime = LocalDateTime.of(2011, 3, 3, 12, 0, 0, 0); TestClass testClass = new TestClass(dateTime); mappingTemplate.save(testClass); - List testClassList = mappingTemplate.find(new Query(Criteria.where("myDate").is(dateTime.toDate())), + List testClassList = mappingTemplate.find(new Query(Criteria.where("myDate").is(dateTime)), TestClass.class); - assertThat(testClassList.size(), is(1)); - assertThat(testClassList.get(0).myDate, is(testClass.myDate)); + assertThat(testClassList.size()).isEqualTo(1); + assertThat(testClassList.get(0).myDate).isEqualTo(testClass.myDate); } @Test // DATADOC-230 @@ -1393,10 +1370,10 @@ public void removesEntityFromCollection() { Person person = new Person("Dave"); template.save(person, "mycollection"); - assertThat(template.findAll(TestClass.class, "mycollection").size(), is(1)); + assertThat(template.findAll(TestClass.class, "mycollection").size()).isEqualTo(1); template.remove(person, "mycollection"); - assertThat(template.findAll(Person.class, "mycollection").isEmpty(), is(true)); + assertThat(template.findAll(Person.class, "mycollection").isEmpty()).isTrue(); } @Test // DATADOC-349 @@ -1409,10 +1386,10 @@ public void removesEntityWithAnnotatedIdIfIdNeedsMassaging() { template.save(sample); - assertThat(template.findOne(query(where("id").is(id)), Sample.class).id, is(id)); + assertThat(template.findOne(query(where("id").is(id)), Sample.class).id).isEqualTo(id); template.remove(sample); - assertThat(template.findOne(query(where("id").is(id)), Sample.class), is(nullValue())); + assertThat(template.findOne(query(where("id").is(id)), Sample.class)).isNull(); } @Test // DATAMONGO-423 @@ -1430,22 +1407,25 @@ public void executesQueryWithNegatedRegexCorrectly() { Query query = query(where("field").not().regex("Matthews")); List result = template.find(query, Sample.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).field, is("Beauford")); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).field).isEqualTo("Beauford"); } - @Test // DATAMONGO-447 + @Test // DATAMONGO-447, GH-4707 public void storesAndRemovesTypeWithComplexId() { MyId id = new MyId(); + id.id = Instant.now().minusSeconds(2); id.first = "foo"; id.second = "bar"; + id.id = Instant.now().minusSeconds(3); TypeWithMyId source = new TypeWithMyId(); source.id = id; template.save(source); - template.remove(query(where("id").is(id)), TypeWithMyId.class); + assertThat(template.remove(query(where("id").is(id)), TypeWithMyId.class)).extracting(DeleteResult::getDeletedCount) + .isEqualTo(1L); } @Test // DATAMONGO-506 @@ -1464,17 +1444,17 @@ public void exceutesBasicQueryCorrectly() { Query query = new BasicQuery("{'name' : 'Oleg'}"); List result = template.find(query, MyPerson.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0), hasProperty("name", is("Oleg"))); + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Oleg"); query = new BasicQuery("{'address.state' : 'PA' }"); result = template.find(query, MyPerson.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0), hasProperty("name", is("Oleg"))); + assertThat(result).hasSize(1); + assertThat(result.get(0).getName()).isEqualTo("Oleg"); } - @Test(expected = OptimisticLockingFailureException.class) // DATAMONGO-279 + @Test // DATAMONGO-279 public void optimisticLockingHandling() { // Init version @@ -1486,8 +1466,8 @@ public void optimisticLockingHandling() { List result = template .findAll(PersonWithVersionPropertyOfTypeInteger.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0).version, is(0)); + assertThat(result).hasSize(1); + assertThat(result.get(0).version).isEqualTo(0); // Version change person = result.get(0); @@ -1495,18 +1475,20 @@ public void optimisticLockingHandling() { template.save(person); - assertThat(person.version, is(1)); + assertThat(person.version).isEqualTo(1); result = mappingTemplate.findAll(PersonWithVersionPropertyOfTypeInteger.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0).version, is(1)); + assertThat(result).hasSize(1); + assertThat(result.get(0).version).isEqualTo(1); // Optimistic lock exception person.version = 0; person.firstName = "Patryk3"; - template.save(person); + final PersonWithVersionPropertyOfTypeInteger toBeSaved = person; + + assertThatExceptionOfType(OptimisticLockingFailureException.class).isThrownBy(() -> template.save(toBeSaved)); } @Test // DATAMONGO-562 @@ -1525,7 +1507,7 @@ public void doesNotFailOnVersionInitForUnversionedEntity() { org.bson.Document document = new org.bson.Document(); document.put("firstName", "Oliver"); - template.insert(document, template.determineCollectionName(PersonWithVersionPropertyOfTypeInteger.class)); + template.insert(document, template.getCollectionName(PersonWithVersionPropertyOfTypeInteger.class)); } @Test // DATAMONGO-1617 @@ -1536,7 +1518,7 @@ public void doesNotFailOnInsertForEntityWithNonAutogeneratableId() { person.setAge(33); template.insert(person); - assertThat(person.getId(), is(notNullValue())); + assertThat(person.getId()).isNotNull(); } @Test // DATAMONGO-539 @@ -1548,29 +1530,29 @@ public void removesObjectFromExplicitCollection() { PersonWithConvertedId person = new PersonWithConvertedId(); person.name = "Dave"; template.save(person, collectionName); - assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty(), is(false)); + assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty()).isFalse(); template.remove(person, collectionName); - assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty(), is(true)); + assertThat(template.findAll(PersonWithConvertedId.class, collectionName).isEmpty()).isTrue(); } // DATAMONGO-549 public void savesMapCorrectly() { - Map map = new HashMap(); + Map map = new HashMap<>(); map.put("key", "value"); template.save(map, "maps"); } - @Test(expected = MappingException.class) // DATAMONGO-549, DATAMONGO-1730 + @Test // DATAMONGO-549, DATAMONGO-1730 public void savesMongoPrimitiveObjectCorrectly() { - template.save(new Object(), "collection"); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save(new Object(), "collection")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-549 + @Test // DATAMONGO-549 public void rejectsNullObjectToBeSaved() { - template.save(null); + assertThatIllegalArgumentException().isThrownBy(() -> template.save(null)); } @Test // DATAMONGO-550 @@ -1579,16 +1561,17 @@ public void savesPlainDocumentCorrectly() { org.bson.Document document = new org.bson.Document("foo", "bar"); template.save(document, "collection"); - assertThat(document.containsKey("_id"), is(true)); + assertThat(document.containsKey("_id")).isTrue(); } - @Test(expected = MappingException.class) // DATAMONGO-550, DATAMONGO-1730 + @Test // DATAMONGO-550, DATAMONGO-1730 public void rejectsPlainObjectWithOutExplicitCollection() { org.bson.Document document = new org.bson.Document("foo", "bar"); template.save(document, "collection"); - template.findById(document.get("_id"), org.bson.Document.class); + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> template.findById(document.get("_id"), org.bson.Document.class)); } @Test // DATAMONGO-550 @@ -1598,8 +1581,8 @@ public void readsPlainDocumentById() { template.save(document, "collection"); org.bson.Document result = template.findById(document.get("_id"), org.bson.Document.class, "collection"); - assertThat(result.get("foo"), is(document.get("foo"))); - assertThat(result.get("_id"), is(document.get("_id"))); + assertThat(result.get("foo")).isEqualTo(document.get("foo")); + assertThat(result.get("_id")).isEqualTo(document.get("_id")); } @Test // DATAMONGO-551 @@ -1607,9 +1590,9 @@ public void writesPlainString() { template.save("{ 'foo' : 'bar' }", "collection"); } - @Test(expected = MappingException.class) // DATAMONGO-551 + @Test // DATAMONGO-551 public void rejectsNonJsonStringForSave() { - template.save("Foobar!", "collection"); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save("Foobar", "collection")); } @Test // DATAMONGO-588 @@ -1620,7 +1603,25 @@ public void initializesVersionOnInsert() { template.insert(person); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); + } + + @Test // DATAMONGO-2195 + public void removeVersionedEntityConsidersVersion() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insert(person); + assertThat(person.version).isEqualTo(0); + template.update(PersonWithVersionPropertyOfTypeInteger.class).matching(query(where("id").is(person.id))) + .apply(new Update().set("firstName", "Walter")).first(); + + DeleteResult deleteResult = template.remove(person); + + assertThat(deleteResult.wasAcknowledged()).isTrue(); + assertThat(deleteResult.getDeletedCount()).isZero(); + assertThat(template.count(new Query(), PersonWithVersionPropertyOfTypeInteger.class)).isOne(); } @Test // DATAMONGO-588 @@ -1631,12 +1632,28 @@ public void initializesVersionOnBatchInsert() { template.insertAll(Arrays.asList(person)); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); + } + + @Test // DATAMONGO-1992 + public void initializesIdAndVersionAndOfImmutableObject() { + + ImmutableVersioned versioned = new ImmutableVersioned(); + + ImmutableVersioned saved = template.insert(versioned); + + assertThat(saved).isNotSameAs(versioned); + assertThat(versioned.id).isNull(); + assertThat(versioned.version).isNull(); + + assertThat(saved.id).isNotNull(); + assertThat(saved.version).isEqualTo(0L); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-568, DATAMONGO-1762 + @Test // DATAMONGO-568, DATAMONGO-1762 public void queryCantBeNull() { - template.find(null, PersonWithIdPropertyOfTypeObjectId.class); + assertThatIllegalArgumentException() + .isThrownBy(() -> template.find(null, PersonWithIdPropertyOfTypeObjectId.class)); } @Test // DATAMONGO-620 @@ -1646,10 +1663,10 @@ public void versionsObjectIntoDedicatedCollection() { person.firstName = "Dave"; template.save(person, "personX"); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); template.save(person, "personX"); - assertThat(person.version, is(1)); + assertThat(person.version).isEqualTo(1); } @Test // DATAMONGO-621 @@ -1659,22 +1676,22 @@ public void correctlySetsLongVersionProperty() { person.firstName = "Dave"; template.save(person); - assertThat(person.version, is(0L)); + assertThat(person.version).isEqualTo(0L); } - @Test(expected = DuplicateKeyException.class) // DATAMONGO-622 + @Test // DATAMONGO-622 public void preventsDuplicateInsert() { - template.setWriteConcern(WriteConcern.SAFE); + template.setWriteConcern(WriteConcern.ACKNOWLEDGED); PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; template.save(person); - assertThat(person.version, is(0)); + assertThat(person.version).isEqualTo(0); person.version = null; - template.save(person); + assertThatExceptionOfType(DuplicateKeyException.class).isThrownBy(() -> template.save(person)); } @Test // DATAMONGO-629 @@ -1686,11 +1703,11 @@ public void countAndFindWithoutTypeInformation() { Query query = query(where("_id").is(person.getId())); String collectionName = template.getCollectionName(Person.class); - assertThat(template.find(query, HashMap.class, collectionName), hasSize(1)); - assertThat(template.count(query, collectionName), is(1L)); + assertThat(template.find(query, HashMap.class, collectionName)).hasSize(1); + assertThat(template.count(query, collectionName)).isEqualTo(1L); } - @Test // DATAMONGO-571 + @Test // DATAMONGO-571, GH-3407 public void nullsPropertiesForVersionObjectUpdates() { VersionedPerson person = new VersionedPerson(); @@ -1698,13 +1715,19 @@ public void nullsPropertiesForVersionObjectUpdates() { person.lastname = "Matthews"; template.save(person); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); + person.firstname = null; person.lastname = null; template.save(person); person = template.findOne(query(where("id").is(person.id)), VersionedPerson.class); - assertThat(person.lastname, is(nullValue())); + assertThat(person.firstname).isNull(); + assertThat(person.lastname).isNull(); + + org.bson.Document document = template.findOne(query(where("_id").is(person.id)), org.bson.Document.class, + "versionedPerson"); + assertThat(document).doesNotContainKey("firstname").containsEntry("lastname", null); } @Test // DATAMONGO-571 @@ -1717,7 +1740,7 @@ public void nullsValuesForUpdatesOfUnversionedEntity() { template.save(person); person = template.findOne(query(where("id").is(person.getId())), Person.class); - assertThat(person.getFirstName(), is(nullValue())); + assertThat(person.getFirstName()).isNull(); } @Test // DATAMONGO-679 @@ -1728,8 +1751,8 @@ public void savesJsonStringCorrectly() { template.save(document, "collection"); List result = template.findAll(org.bson.Document.class, "collection"); - assertThat(result.size(), is(1)); - assertThat(result.get(0).containsKey("first"), is(true)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).containsKey("first")).isTrue(); } @Test @@ -1740,9 +1763,9 @@ public void executesExistsCorrectly() { Query query = query(where("id").is(sample.id)); - assertThat(template.exists(query, Sample.class), is(true)); - assertThat(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class)), is(true)); - assertThat(template.exists(query, Sample.class, template.getCollectionName(Sample.class)), is(true)); + assertThat(template.exists(query, Sample.class)).isTrue(); + assertThat(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class))).isTrue(); + assertThat(template.exists(query, Sample.class, template.getCollectionName(Sample.class))).isTrue(); } @Test // DATAMONGO-675 @@ -1758,7 +1781,7 @@ public void updateConsidersMappingAnnotations() { FindAndModifyOptions options = new FindAndModifyOptions().returnNew(true); TypeWithFieldAnnotation result = template.findAndModify(query, update, options, TypeWithFieldAnnotation.class); - assertThat(result.emailAddress, is("new")); + assertThat(result.emailAddress).isEqualTo("new"); } @Test // DATAMONGO-671 @@ -1771,8 +1794,32 @@ public void findsEntityByDateReference() { Query query = query(where("date").lt(new Date())); List result = template.find(query, TypeWithDate.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0).date, is(notNullValue())); + assertThat(result).hasSize(1); + assertThat(result.get(0).date).isNotNull(); + } + + @Test // GH-4390 + void nativeDriverDateTimeCodecShouldBeApplied/*when configured*/() { + + MongoTestTemplate ops = new MongoTestTemplate(cfg -> { + cfg.configureConversion(conversion -> { + conversion.customConversions( + MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs)); + }); + }); + + TypeWithDate source = new TypeWithDate(); + source.id = "id-1"; + source.date = Date.from(Instant.now()); + + ops.save(source); + + var dbDate = ops.execute(TypeWithDate.class, + collection -> collection.find(new org.bson.Document("_id", source.id)).first().get("date")); + + TypeWithDate target = ops.findOne(query(where("date").is(source.date)), TypeWithDate.class); + + assertThat(target.date).isEqualTo(source.date).isEqualTo(dbDate); } @Test // DATAMONGO-540 @@ -1787,9 +1834,9 @@ public void findOneAfterUpsertForNonExistingObjectReturnsTheInsertedObject() { template.upsert(query, update, Sample.class); Sample result = template.findOne(query, Sample.class); - assertThat(result, is(notNullValue())); - assertThat(result.field, is(fieldValue)); - assertThat(result.id, is(idValue)); + assertThat(result).isNotNull(); + assertThat(result.field).isEqualTo(fieldValue); + assertThat(result.id).isEqualTo(idValue); } @Test // DATAMONGO-392 @@ -1807,10 +1854,10 @@ public void updatesShouldRetainTypeInformation() { Document result = template.findOne(query, Document.class); - assertThat(result, is(notNullValue())); - assertThat(result.id, is(doc.id)); - assertThat(result.model, is(notNullValue())); - assertThat(result.model.value(), is(newModelValue)); + assertThat(result).isNotNull(); + assertThat(result.id).isEqualTo(doc.id); + assertThat(result.model).isNotNull(); + assertThat(result.model.value()).isEqualTo(newModelValue); } @Test // DATAMONGO-702 @@ -1831,13 +1878,13 @@ public void queryShouldSupportRealAndAliasedPropertyNamesForFieldInclusions() { ObjectWith3AliasedFields result = template.findOne(query, ObjectWith3AliasedFields.class); - assertThat(result.id, is(obj.id)); - assertThat(result.property1, is(nullValue())); - assertThat(result.property2, is(obj.property2)); - assertThat(result.property3, is(obj.property3)); + assertThat(result.id).isEqualTo(obj.id); + assertThat(result.property1).isNull(); + assertThat(result.property2).isEqualTo(obj.property2); + assertThat(result.property3).isEqualTo(obj.property3); } - @Test // DATAMONGO-702 + @Test // DATAMONGO-702, DATAMONGO-2294 public void queryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() { ObjectWith3AliasedFields obj = new ObjectWith3AliasedFields(); @@ -1850,15 +1897,14 @@ public void queryShouldSupportRealAndAliasedPropertyNamesForFieldExclusions() { Query query = new Query(Criteria.where("id").is(obj.id)); query.fields() // - .exclude("property2") // real property name - .exclude("prop3"); // aliased property name + .exclude("property2", "prop3"); // real property name, aliased property name ObjectWith3AliasedFields result = template.findOne(query, ObjectWith3AliasedFields.class); - assertThat(result.id, is(obj.id)); - assertThat(result.property1, is(obj.property1)); - assertThat(result.property2, is(nullValue())); - assertThat(result.property3, is(nullValue())); + assertThat(result.id).isEqualTo(obj.id); + assertThat(result.property1).isEqualTo(obj.property1); + assertThat(result.property2).isNull(); + assertThat(result.property3).isNull(); } @Test // DATAMONGO-702 @@ -1885,22 +1931,22 @@ public void findMultipleWithQueryShouldSupportRealAndAliasedPropertyNamesForFiel List results = template.find(query, ObjectWith3AliasedFields.class); - assertThat(results, is(notNullValue())); - assertThat(results.size(), is(2)); + assertThat(results).isNotNull(); + assertThat(results.size()).isEqualTo(2); ObjectWith3AliasedFields result0 = results.get(0); - assertThat(result0, is(notNullValue())); - assertThat(result0.id, is(obj0.id)); - assertThat(result0.property1, is(obj0.property1)); - assertThat(result0.property2, is(nullValue())); - assertThat(result0.property3, is(nullValue())); + assertThat(result0).isNotNull(); + assertThat(result0.id).isEqualTo(obj0.id); + assertThat(result0.property1).isEqualTo(obj0.property1); + assertThat(result0.property2).isNull(); + assertThat(result0.property3).isNull(); ObjectWith3AliasedFields result1 = results.get(1); - assertThat(result1, is(notNullValue())); - assertThat(result1.id, is(obj1.id)); - assertThat(result1.property1, is(obj1.property1)); - assertThat(result1.property2, is(nullValue())); - assertThat(result1.property3, is(nullValue())); + assertThat(result1).isNotNull(); + assertThat(result1.id).isEqualTo(obj1.id); + assertThat(result1.property1).isEqualTo(obj1.property1); + assertThat(result1.property2).isNull(); + assertThat(result1.property3).isNull(); } @Test // DATAMONGO-702 @@ -1927,13 +1973,13 @@ public void queryShouldSupportNestedPropertyNamesForFieldInclusions() { ObjectWith3AliasedFieldsAndNestedAddress result = template.findOne(query, ObjectWith3AliasedFieldsAndNestedAddress.class); - assertThat(result.id, is(obj.id)); - assertThat(result.property1, is(nullValue())); - assertThat(result.property2, is(obj.property2)); - assertThat(result.property3, is(nullValue())); - assertThat(result.address, is(notNullValue())); - assertThat(result.address.city, is(nullValue())); - assertThat(result.address.state, is(stateValue)); + assertThat(result.id).isEqualTo(obj.id); + assertThat(result.property1).isNull(); + assertThat(result.property2).isEqualTo(obj.property2); + assertThat(result.property3).isNull(); + assertThat(result.address).isNotNull(); + assertThat(result.address.city).isNull(); + assertThat(result.address.state).isEqualTo(stateValue); } @Test // DATAMONGO-709 @@ -1957,9 +2003,9 @@ public void aQueryRestrictedWithOneRestrictedResultTypeShouldReturnOnlyInstances Query query = Query.query(where("value").is("foo")).restrict(SpecialDoc.class); List result = template.find(query, BaseDoc.class); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(instanceOf(SpecialDoc.class))); + assertThat(result).isNotNull(); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0)).isInstanceOf(SpecialDoc.class); } @Test // DATAMONGO-709 @@ -1983,10 +2029,10 @@ public void aQueryRestrictedWithMultipleRestrictedResultTypesShouldReturnOnlyIns Query query = Query.query(where("value").is("foo")).restrict(BaseDoc.class, VerySpecialDoc.class); List result = template.find(query, BaseDoc.class); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(2)); - assertThat(result.get(0).getClass(), is((Object) BaseDoc.class)); - assertThat(result.get(1).getClass(), is((Object) VerySpecialDoc.class)); + assertThat(result).isNotNull(); + assertThat(result.size()).isEqualTo(2); + assertThat(result.get(0).getClass()).isEqualTo((Object) BaseDoc.class); + assertThat(result.get(1).getClass()).isEqualTo((Object) VerySpecialDoc.class); } @Test // DATAMONGO-709 @@ -2010,11 +2056,11 @@ public void aQueryWithNoRestrictedResultTypesShouldReturnAllInstancesWithinTheGi Query query = Query.query(where("value").is("foo")); List result = template.find(query, BaseDoc.class); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(3)); - assertThat(result.get(0).getClass(), is((Object) BaseDoc.class)); - assertThat(result.get(1).getClass(), is((Object) SpecialDoc.class)); - assertThat(result.get(2).getClass(), is((Object) VerySpecialDoc.class)); + assertThat(result).isNotNull(); + assertThat(result.size()).isEqualTo(3); + assertThat(result.get(0).getClass()).isEqualTo((Object) BaseDoc.class); + assertThat(result.get(1).getClass()).isEqualTo((Object) SpecialDoc.class); + assertThat(result.get(2).getClass()).isEqualTo((Object) VerySpecialDoc.class); } @Test // DATAMONGO-771 @@ -2027,9 +2073,21 @@ public void allowInsertWithPlainJsonString() { template.insert(json, "sample"); List result = template.findAll(Sample.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).id, is(id)); - assertThat(result.get(0).field, is(value)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).id).isEqualTo(id); + assertThat(result.get(0).field).isEqualTo(value); + } + + @Test // DATAMONGO-2028 + public void allowInsertOfDbObjectWithMappedTypes() { + + DBObject dbObject = new BasicDBObject("_id", "foo").append("duration", Duration.ofSeconds(100)); + template.insert(dbObject, "sample"); + List result = template.findAll(org.bson.Document.class, "sample"); + + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getString("_id")).isEqualTo("foo"); + assertThat(result.get(0).getString("duration")).isEqualTo("PT1M40S"); } @Test // DATAMONGO-816 @@ -2047,11 +2105,11 @@ public void shouldExecuteQueryShouldMapQueryBeforeQueryExecution() { @Override public void processDocument(org.bson.Document document) throws MongoException, DataAccessException { - assertThat(document, is(notNullValue())); + assertThat(document).isNotNull(); ObjectWithEnumValue result = template.getConverter().read(ObjectWithEnumValue.class, document); - assertThat(result.value, is(EnumValue.VALUE2)); + assertThat(result.value).isEqualTo(EnumValue.VALUE2); } }); } @@ -2063,17 +2121,17 @@ public void updateFirstShouldIncreaseVersionForVersionedEntity() { person.firstname = "Dave"; person.lastname = "Matthews"; template.save(person); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); Query qry = query(where("id").is(person.id)); VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterFirstSave.version, is(0L)); + assertThat(personAfterFirstSave.version).isEqualTo(0L); template.updateFirst(qry, Update.update("lastname", "Bubu"), VersionedPerson.class); VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterUpdateFirst.version, is(1L)); - assertThat(personAfterUpdateFirst.lastname, is("Bubu")); + assertThat(personAfterUpdateFirst.version).isEqualTo(1L); + assertThat(personAfterUpdateFirst.lastname).isEqualTo("Bubu"); } @Test // DATAMONGO-811 @@ -2093,9 +2151,9 @@ public void updateFirstShouldIncreaseVersionOnlyForFirstMatchingEntity() { for (VersionedPerson p : template.find(q, VersionedPerson.class)) { if ("Metthews".equals(p.lastname)) { - assertThat(p.version, equalTo(Long.valueOf(1))); + assertThat(p.version).isEqualTo(Long.valueOf(1)); } else { - assertThat(p.version, equalTo(Long.valueOf(0))); + assertThat(p.version).isEqualTo(Long.valueOf(0)); } } } @@ -2116,7 +2174,7 @@ public void updateMultiShouldIncreaseVersionOfAllUpdatedEntities() { template.updateMulti(q, Update.update("lastname", "Metthews"), VersionedPerson.class); for (VersionedPerson p : template.find(q, VersionedPerson.class)) { - assertThat(p.version, equalTo(Long.valueOf(1))); + assertThat(p.version).isEqualTo(Long.valueOf(1)); } } @@ -2132,12 +2190,12 @@ public void itShouldBePossibleToReuseAnExistingQuery() { Query query = new Query(); query.addCriteria(where("_id").in("42", "43")); - assertThat(template.count(query, Sample.class), is(1L)); + assertThat(template.count(query, Sample.class)).isEqualTo(1L); query.with(PageRequest.of(0, 10)); query.with(Sort.by("field")); - assertThat(template.find(query, Sample.class), is(not(empty()))); + assertThat(template.find(query, Sample.class)).isNotEmpty(); } @Test // DATAMONGO-807 @@ -2153,8 +2211,8 @@ public void findAndModifyShouldRetrainTypeInformationWithinUpdatedType() { template.findAndModify(query, update, Document.class); Document retrieved = template.findOne(query, Document.class); - assertThat(retrieved.model, instanceOf(ModelA.class)); - assertThat(retrieved.model.value(), equalTo("value2")); + assertThat(retrieved.model).isInstanceOf(ModelA.class); + assertThat(retrieved.model.value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 @@ -2162,7 +2220,7 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW DocumentWithNestedCollection doc = new DocumentWithNestedCollection(); - Map entry = new HashMap(); + Map entry = new HashMap<>(); entry.put("key1", new ModelA("value1")); doc.models.add(entry); @@ -2173,22 +2231,22 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW Query query = query(where("id").is(doc.id)); Update update = Update.update("models", Collections.singletonList(entry)); - assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithNestedCollection.class); DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0).entrySet(), hasSize(2)); + assertThat(retrieved.models.get(0).entrySet()).hasSize(2); - assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get("key1")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key1").value()).isEqualTo("value1"); - assertThat(retrieved.models.get(0).get("key2"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key2").value(), equalTo("value2")); + assertThat(retrieved.models.get(0).get("key2")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key2").value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 @@ -2196,7 +2254,7 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW DocumentWithNestedCollection doc = new DocumentWithNestedCollection(); - Map entry = new HashMap(); + Map entry = new HashMap<>(); entry.put("key1", new ModelA("value1")); doc.models.add(entry); @@ -2207,22 +2265,22 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW Query query = query(where("id").is(doc.id)); Update update = Update.update("models.0", entry); - assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithNestedCollection.class); DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0).entrySet(), hasSize(2)); + assertThat(retrieved.models.get(0).entrySet()).hasSize(2); - assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get("key1")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key1").value()).isEqualTo("value1"); - assertThat(retrieved.models.get(0).get("key2"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key2").value(), equalTo("value2")); + assertThat(retrieved.models.get(0).get("key2")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key2").value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 @@ -2230,7 +2288,7 @@ public void findAndModifyShouldAddTypeInformationOnDocumentWithNestedCollectionO DocumentWithNestedCollection doc = new DocumentWithNestedCollection(); - Map entry = new HashMap(); + Map entry = new HashMap<>(); entry.put("key1", new ModelA("value1")); doc.models.add(entry); @@ -2239,30 +2297,30 @@ public void findAndModifyShouldAddTypeInformationOnDocumentWithNestedCollectionO Query query = query(where("id").is(doc.id)); Update update = Update.update("models.1", Collections.singletonMap("key2", new ModelA("value2"))); - assertThat(template.findOne(query, DocumentWithNestedCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithNestedCollection.class); DocumentWithNestedCollection retrieved = template.findOne(query, DocumentWithNestedCollection.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); - assertThat(retrieved.models.get(0).entrySet(), hasSize(1)); - assertThat(retrieved.models.get(1).entrySet(), hasSize(1)); + assertThat(retrieved.models.get(0).entrySet()).hasSize(1); + assertThat(retrieved.models.get(1).entrySet()).hasSize(1); - assertThat(retrieved.models.get(0).get("key1"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get("key1").value(), equalTo("value1")); + assertThat(retrieved.models.get(0).get("key1")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get("key1").value()).isEqualTo("value1"); - assertThat(retrieved.models.get(1).get("key2"), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(1).get("key2").value(), equalTo("value2")); + assertThat(retrieved.models.get(1).get("key2")).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(1).get("key2").value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenUpdatingPositionedElement() throws Exception { - List models = new ArrayList(); + List models = new ArrayList<>(); models.add(new ModelA("value1")); DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection( @@ -2273,23 +2331,23 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnEmbeddedD Query query = query(where("id").is(doc.id)); Update update = Update.update("embeddedDocument.models.0", new ModelA("value2")); - assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class); DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.embeddedDocument.models, hasSize(1)); - assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.embeddedDocument.models).hasSize(1); + assertThat(retrieved.embeddedDocument.models.get(0).value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocumentWithCollectionWhenUpdatingSecondElement() throws Exception { - List models = new ArrayList(); + List models = new ArrayList<>(); models.add(new ModelA("value1")); DocumentWithEmbeddedDocumentWithCollection doc = new DocumentWithEmbeddedDocumentWithCollection( @@ -2300,17 +2358,17 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocu Query query = query(where("id").is(doc.id)); Update update = Update.update("embeddedDocument.models.1", new ModelA("value2")); - assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class); DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.embeddedDocument.models, hasSize(2)); - assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value1")); - assertThat(retrieved.embeddedDocument.models.get(1).value(), is("value2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.embeddedDocument.models).hasSize(2); + assertThat(retrieved.embeddedDocument.models.get(0).value()).isEqualTo("value1"); + assertThat(retrieved.embeddedDocument.models.get(1).value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 @@ -2328,16 +2386,16 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnEmbeddedDocu Update update = Update.update("embeddedDocument", new DocumentWithCollection(Arrays. asList(new ModelA("value2")))); - assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class)).isNotNull(); template.findAndModify(query, update, DocumentWithEmbeddedDocumentWithCollection.class); DocumentWithEmbeddedDocumentWithCollection retrieved = template.findOne(query, DocumentWithEmbeddedDocumentWithCollection.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.embeddedDocument.models, hasSize(1)); - assertThat(retrieved.embeddedDocument.models.get(0).value(), is("value2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.embeddedDocument.models).hasSize(1); + assertThat(retrieved.embeddedDocument.models.get(0).value()).isEqualTo("value2"); } @Test // DATAMONGO-1210 @@ -2345,7 +2403,7 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWith DocumentWithNestedList doc = new DocumentWithNestedList(); - List entry = new ArrayList(); + List entry = new ArrayList<>(); entry.add(new ModelA("value1")); doc.models.add(entry); @@ -2353,7 +2411,7 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWith Query query = query(where("id").is(doc.id)); - assertThat(template.findOne(query, DocumentWithNestedList.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithNestedList.class)).isNotNull(); Update update = Update.update("models.0.1", new ModelA("value2")); @@ -2361,16 +2419,211 @@ public void findAndModifyShouldAddTypeInformationWithinUpdatedTypeOnDocumentWith DocumentWithNestedList retrieved = template.findOne(query, DocumentWithNestedList.class); - assertThat(retrieved, is(notNullValue())); - assertThat(retrieved.id, is(doc.id)); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.id).isEqualTo(doc.id); + + assertThat(retrieved.models.get(0)).hasSize(2); + + assertThat(retrieved.models.get(0).get(0)).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get(0).value()).isEqualTo("value1"); + + assertThat(retrieved.models.get(0).get(1)).isInstanceOf(ModelA.class); + assertThat(retrieved.models.get(0).get(1).value()).isEqualTo("value2"); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldReplaceDocument() { + + org.bson.Document doc = new org.bson.Document("foo", "bar"); + template.save(doc, "findandreplace"); + + org.bson.Document replacement = new org.bson.Document("foo", "baz"); + org.bson.Document previous = template.findAndReplace(query(where("foo").is("bar")), replacement, + FindAndReplaceOptions.options(), org.bson.Document.class, "findandreplace"); + + assertThat(previous).containsEntry("foo", "bar"); + assertThat(template.findOne(query(where("foo").is("baz")), org.bson.Document.class, "findandreplace")).isNotNull(); + } + + @Test // DATAMONGO-1827 + @MongoVersion(asOf = "3.6") + public void findAndReplaceShouldErrorOnIdPresent() { + + template.save(new MyPerson("Walter")); + + MyPerson replacement = new MyPerson("Heisenberg"); + replacement.id = "invalid-id"; + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> template.findAndReplace(query(where("name").is("Walter")), replacement)); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldErrorOnSkip() { + + assertThatIllegalArgumentException().isThrownBy( + () -> template.findAndReplace(query(where("name").is("Walter")).skip(10), new MyPerson("Heisenberg"))); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldErrorOnLimit() { + + assertThatIllegalArgumentException().isThrownBy( + () -> template.findAndReplace(query(where("name").is("Walter")).limit(10), new MyPerson("Heisenberg"))); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldConsiderSortAndUpdateFirstIfMultipleFound() { + + MyPerson walter1 = new MyPerson("Walter 1"); + MyPerson walter2 = new MyPerson("Walter 2"); + + template.save(walter1); + template.save(walter2); + + MyPerson replacement = new MyPerson("Heisenberg"); + + template.findAndReplace(query(where("name").regex("Walter.*")).with(Sort.by(Direction.DESC, "name")), replacement); + + assertThat(template.findAll(MyPerson.class)).hasSize(2).contains(walter1).doesNotContain(walter2); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldReplaceObject() { + + MyPerson person = new MyPerson("Walter"); + template.save(person); + + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")); + + assertThat(previous.getName()).isEqualTo("Walter"); + assertThat(template.findOne(query(where("id").is(person.id)), MyPerson.class)).hasFieldOrPropertyWithValue("name", + "Heisenberg"); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldConsiderFields() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person); + + Query query = query(where("name").is("Walter")); + query.fields().include("address"); + + MyPerson previous = template.findAndReplace(query, new MyPerson("Heisenberg")); + + assertThat(previous.getName()).isNull(); + assertThat(previous.getAddress()).isEqualTo(person.address); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceNonExistingWithUpsertFalse() { + + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")); + + assertThat(previous).isNull(); + assertThat(template.findAll(MyPerson.class)).isEmpty(); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceNonExistingWithUpsertTrue() { + + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().upsert()); + + assertThat(previous).isNull(); + assertThat(template.findAll(MyPerson.class)).hasSize(1); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldReplaceObjectReturingNew() { + + MyPerson person = new MyPerson("Walter"); + template.save(person); + + MyPerson updated = template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().returnNew()); + + assertThat(updated.getName()).isEqualTo("Heisenberg"); + } + + @Test // DATAMONGO-1827 + public void findAndReplaceShouldProjectReturnedObjectCorrectly() { + + template.save(new MyPerson("Walter")); + + MyPersonProjection projection = template.findAndReplace(query(where("name").is("Walter")), + new MyPerson("Heisenberg"), FindAndReplaceOptions.empty(), MyPerson.class, MyPersonProjection.class); + + assertThat(projection.getName()).isEqualTo("Walter"); + } + + @Test // GH-4707 + public void findAndReplaceUpsertsObjectWithComplexId() { + + MyId id = new MyId(); + id.id = Instant.now().minusSeconds(2); + id.first = "foo"; + id.second = "bar"; + id.time = Instant.now().minusSeconds(3); + + TypeWithMyId replacement = new TypeWithMyId(); + replacement.value = "spring"; + + template.findAndReplace(query(where("id").is(id)), replacement, FindAndReplaceOptions.options().upsert()); + template.doInCollection(TypeWithMyId.class, collection -> { + + org.bson.Document dbValue = collection.find(new org.bson.Document("_id.first", "foo")).first(); + + assertThat(dbValue).isNotNull(); + assertThat(dbValue.getEmbedded(List.of("_id", "_id"), Object.class)).isInstanceOf(Date.class); + assertThat(dbValue.getEmbedded(List.of("_id", "t"), Object.class)).isInstanceOf(Date.class); + }); + } + + @Test // GH-4609 + public void shouldReadNestedProjection() { + + MyPerson walter = new MyPerson("Walter"); + walter.address = new Address("spring", "data"); + template.save(walter); + + PersonPWA result = template.query(MyPerson.class) + .as(PersonPWA.class) + .matching(where("id").is(walter.id)) + .firstValue(); + + assertThat(result.getAddress().getCity()).isEqualTo("data"); + } + + interface PersonPWA { + String getName(); + AdressProjection getAddress(); + } + + interface AdressProjection { + String getCity(); + } + + @Test // GH-4300 + public void findAndReplaceShouldAllowNativeDomainTypesAndReturnAProjection() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person); - assertThat(retrieved.models.get(0), hasSize(2)); + MyPerson previous = template.findAndReplace(query(where("name").is("Walter")), + new org.bson.Document("name", "Heisenberg"), FindAndReplaceOptions.options(), org.bson.Document.class, + "myPerson", MyPerson.class); - assertThat(retrieved.models.get(0).get(0), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get(0).value(), equalTo("value1")); + assertThat(previous).isNotNull(); + assertThat(previous.getAddress()).isEqualTo(person.address); - assertThat(retrieved.models.get(0).get(1), instanceOf(ModelA.class)); - assertThat(retrieved.models.get(0).get(1).value(), equalTo("value2")); + org.bson.Document loaded = template.execute(MyPerson.class, collection -> { + return collection.find(new org.bson.Document("name", "Heisenberg")).first(); + }); + assertThat(loaded.get("_id")).isEqualTo(new ObjectId(person.id)); } @Test // DATAMONGO-407 @@ -2391,11 +2644,11 @@ public void updatesShouldRetainTypeInformationEvenForCollections() { Query findQuery = new Query(Criteria.where("id").is(doc.id)); DocumentWithCollection result = template.findOne(findQuery, DocumentWithCollection.class); - assertThat(result, is(notNullValue())); - assertThat(result.id, is(doc.id)); - assertThat(result.models, is(notNullValue())); - assertThat(result.models, hasSize(1)); - assertThat(result.models.get(0).value(), is(newModelValue)); + assertThat(result).isNotNull(); + assertThat(result.id).isEqualTo(doc.id); + assertThat(result.models).isNotNull(); + assertThat(result.models).hasSize(1); + assertThat(result.models.get(0).value()).isEqualTo(newModelValue); } @Test // DATAMONGO-812 @@ -2405,12 +2658,12 @@ public void updateMultiShouldAddValuesCorrectlyWhenUsingPushEachWithComplexTypes DocumentWithCollection document = new DocumentWithCollection(Collections. emptyList()); template.save(document); Query query = query(where("id").is(document.id)); - assumeThat(template.findOne(query, DocumentWithCollection.class).models, hasSize(1)); + assertThat(template.findOne(query, DocumentWithCollection.class).models).isEmpty(); Update update = new Update().push("models").each(new ModelA("model-b"), new ModelA("model-c")); template.updateMulti(query, update, DocumentWithCollection.class); - assertThat(template.findOne(query, DocumentWithCollection.class).models, hasSize(3)); + assertThat(template.findOne(query, DocumentWithCollection.class).models).hasSize(2); } @Test // DATAMONGO-812 @@ -2422,12 +2675,12 @@ public void updateMultiShouldAddValuesCorrectlyWhenUsingPushEachWithSimpleTypes( template.save(document); Query query = query(where("id").is(document.id)); - assumeThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(1)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(1); Update update = new Update().push("values").each("data", "mongodb"); template.updateMulti(query, update, DocumentWithCollectionOfSimpleType.class); - assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(3)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(3); } @Test // DATAMONOGO-828 @@ -2436,31 +2689,7 @@ public void updateFirstShouldDoNothingWhenCalledForEntitiesThatDoNotExist() { Query q = query(where("id").is(Long.MIN_VALUE)); template.updateFirst(q, Update.update("lastname", "supercalifragilisticexpialidocious"), VersionedPerson.class); - assertThat(template.findOne(q, VersionedPerson.class), nullValue()); - } - - @Test // DATAMONGO-354, DATAMONGO-1824 - @MongoVersion(until = "3.6") - @SuppressWarnings("deprecation") - public void testUpdateShouldAllowMultiplePushAll() { - - DocumentWithMultipleCollections doc = new DocumentWithMultipleCollections(); - doc.id = "1234"; - doc.string1 = Arrays.asList("spring"); - doc.string2 = Arrays.asList("one"); - - template.save(doc); - - Update update = new Update().pushAll("string1", new Object[] { "data", "mongodb" }); - update.pushAll("string2", new String[] { "two", "three" }); - - Query findQuery = new Query(Criteria.where("id").is(doc.id)); - template.updateFirst(findQuery, update, DocumentWithMultipleCollections.class); - - DocumentWithMultipleCollections result = template.findOne(findQuery, DocumentWithMultipleCollections.class); - assertThat(result.string1, hasItems("spring", "data", "mongodb")); - assertThat(result.string2, hasItems("one", "two", "three")); - + assertThat(template.findOne(q, VersionedPerson.class)).isNull(); } @Test // DATAMONGO-404 @@ -2486,10 +2715,10 @@ public void updateWithPullShouldRemoveNestedItemFromDbRefAnnotatedCollection() { DocumentWithDBRefCollection result = template.findOne(qry, DocumentWithDBRefCollection.class); - assertThat(result, is(notNullValue())); - assertThat(result.dbRefAnnotatedList, hasSize(1)); - assertThat(result.dbRefAnnotatedList.get(0), is(notNullValue())); - assertThat(result.dbRefAnnotatedList.get(0).id, is((Object) "1")); + assertThat(result).isNotNull(); + assertThat(result.dbRefAnnotatedList).hasSize(1); + assertThat(result.dbRefAnnotatedList.get(0)).isNotNull(); + assertThat(result.dbRefAnnotatedList.get(0).id).isEqualTo((Object) "1"); } @Test // DATAMONGO-404 @@ -2515,10 +2744,10 @@ public void updateWithPullShouldRemoveNestedItemFromDbRefAnnotatedCollectionWhen DocumentWithDBRefCollection result = template.findOne(qry, DocumentWithDBRefCollection.class); - assertThat(result, is(notNullValue())); - assertThat(result.dbRefAnnotatedList, hasSize(1)); - assertThat(result.dbRefAnnotatedList.get(0), is(notNullValue())); - assertThat(result.dbRefAnnotatedList.get(0).id, is((Object) "1")); + assertThat(result).isNotNull(); + assertThat(result.dbRefAnnotatedList).hasSize(1); + assertThat(result.dbRefAnnotatedList.get(0)).isNotNull(); + assertThat(result.dbRefAnnotatedList.get(0).id).isEqualTo((Object) "1"); } @Test // DATAMONGO-852 @@ -2528,17 +2757,17 @@ public void updateShouldNotBumpVersionNumberIfVersionPropertyIncludedInUpdate() person.firstname = "Dave"; person.lastname = "Matthews"; template.save(person); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); Query qry = query(where("id").is(person.id)); VersionedPerson personAfterFirstSave = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterFirstSave.version, is(0L)); + assertThat(personAfterFirstSave.version).isEqualTo(0L); template.updateFirst(qry, Update.update("lastname", "Bubu").set("version", 100L), VersionedPerson.class); VersionedPerson personAfterUpdateFirst = template.findOne(qry, VersionedPerson.class); - assertThat(personAfterUpdateFirst.version, is(100L)); - assertThat(personAfterUpdateFirst.lastname, is("Bubu")); + assertThat(personAfterUpdateFirst.version).isEqualTo(100L); + assertThat(personAfterUpdateFirst.lastname).isEqualTo("Bubu"); } @Test // DATAMONGO-468 @@ -2561,10 +2790,10 @@ public void shouldBeAbleToUpdateDbRefPropertyWithDomainObject() { DocumentWithDBRefCollection updatedDoc = template.findOne(qry, DocumentWithDBRefCollection.class); - assertThat(updatedDoc, is(notNullValue())); - assertThat(updatedDoc.dbRefProperty, is(notNullValue())); - assertThat(updatedDoc.dbRefProperty.id, is(sample2.id)); - assertThat(updatedDoc.dbRefProperty.field, is(sample2.field)); + assertThat(updatedDoc).isNotNull(); + assertThat(updatedDoc.dbRefProperty).isNotNull(); + assertThat(updatedDoc.dbRefProperty.id).isEqualTo(sample2.id); + assertThat(updatedDoc.dbRefProperty.field).isEqualTo(sample2.field); } @Test // DATAMONGO-862 @@ -2580,7 +2809,7 @@ public void testUpdateShouldWorkForPathsOnInterfaceMethods() { template.findAndModify(query, update, DocumentWithCollection.class); DocumentWithCollection result = template.findOne(query(where("id").is(document.id)), DocumentWithCollection.class); - assertThat(result.models.get(0).value(), is("mongodb")); + assertThat(result.models.get(0).value()).isEqualTo("mongodb"); } @Test // DATAMONGO-773 @@ -2600,11 +2829,11 @@ public void testShouldSupportQueryWithIncludedDbRefField() { List result = template.find(qry, DocumentWithDBRefCollection.class); - assertThat(result, is(notNullValue())); - assertThat(result, hasSize(1)); - assertThat(result.get(0), is(notNullValue())); - assertThat(result.get(0).dbRefProperty, is(notNullValue())); - assertThat(result.get(0).dbRefProperty.field, is(sample.field)); + assertThat(result).isNotNull(); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNotNull(); + assertThat(result.get(0).dbRefProperty).isNotNull(); + assertThat(result.get(0).dbRefProperty.field).isEqualTo(sample.field); } @Test // DATAMONGO-566 @@ -2618,11 +2847,13 @@ public void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { Query qry = query(where("field").in("spring", "mongodb")); List result = template.findAllAndRemove(qry, Sample.class); - assertThat(result, hasSize(2)); + assertThat(result).hasSize(2); - assertThat(template.getDb().getCollection("sample").count( - new org.bson.Document("field", new org.bson.Document("$in", Arrays.asList("spring", "mongodb")))), is(0L)); - assertThat(template.getDb().getCollection("sample").count(new org.bson.Document("field", "data")), is(1L)); + assertThat(template.getDb().getCollection("sample").countDocuments( + new org.bson.Document("field", new org.bson.Document("$in", Arrays.asList("spring", "mongodb"))))) + .isEqualTo(0L); + assertThat(template.getDb().getCollection("sample").countDocuments(new org.bson.Document("field", "data"))) + .isEqualTo(1L); } @Test // DATAMONGO-1001 @@ -2649,7 +2880,7 @@ public void shouldAllowSavingOfLazyLoadedDbRefs() { loadedContent.setText("data"); template.save(loadedContent); - assertThat(template.findById(content.id, SomeContent.class).getText(), is("data")); + assertThat(template.findById(content.id, SomeContent.class).getText()).isEqualTo("data"); } @@ -2682,8 +2913,8 @@ public void savingAndReassigningLazyLoadingProxies() { SomeMessage savedMessage = template.findById(message.id, SomeMessage.class); - assertThat(savedMessage.dbrefContent.text, is(content.text)); - assertThat(savedMessage.normalContent.text, is(content.text)); + assertThat(savedMessage.dbrefContent.text).isEqualTo(content.text); + assertThat(savedMessage.normalContent.text).isEqualTo(content.text); } @Test // DATAMONGO-884 @@ -2707,9 +2938,9 @@ public void callingNonObjectMethodsOnLazyLoadingProxyShouldReturnNullIfUnderlyin template.remove(content); - assertThat(savedTmpl.getContent().toString(), is("someContent:C1$LazyLoadingProxy")); - assertThat(savedTmpl.getContent(), is(instanceOf(LazyLoadingProxy.class))); - assertThat(savedTmpl.getContent().getText(), is(nullValue())); + assertThat(savedTmpl.getContent().toString()).isEqualTo("someContent:C1$LazyLoadingProxy"); + assertThat(savedTmpl.getContent()).isInstanceOf(LazyLoadingProxy.class); + assertThat(savedTmpl.getContent().getText()).isNull(); } @Test // DATAMONGO-471 @@ -2720,12 +2951,12 @@ public void updateMultiShouldAddValuesCorrectlyWhenUsingAddToSetWithEach() { template.save(document); Query query = query(where("id").is(document.id)); - assumeThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(1)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(1); Update update = new Update().addToSet("values").each("data", "mongodb"); template.updateMulti(query, update, DocumentWithCollectionOfSimpleType.class); - assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values, hasSize(3)); + assertThat(template.findOne(query, DocumentWithCollectionOfSimpleType.class).values).hasSize(3); } @Test // DATAMONGO-1210 @@ -2738,7 +2969,7 @@ public void findAndModifyAddToSetWithEachShouldNotAddDuplicatesNorTypeHintForSim Query query = query(where("id").is(doc.id)); - assertThat(template.findOne(query, DocumentWithCollectionOfSamples.class), notNullValue()); + assertThat(template.findOne(query, DocumentWithCollectionOfSamples.class)).isNotNull(); Update update = new Update().addToSet("samples").each(new Sample(null, "sample2"), new Sample(null, "sample1")); @@ -2746,10 +2977,10 @@ public void findAndModifyAddToSetWithEachShouldNotAddDuplicatesNorTypeHintForSim DocumentWithCollectionOfSamples retrieved = template.findOne(query, DocumentWithCollectionOfSamples.class); - assertThat(retrieved, notNullValue()); - assertThat(retrieved.samples, hasSize(2)); - assertThat(retrieved.samples.get(0).field, is("sample1")); - assertThat(retrieved.samples.get(1).field, is("sample2")); + assertThat(retrieved).isNotNull(); + assertThat(retrieved.samples).hasSize(2); + assertThat(retrieved.samples.get(0).field).isEqualTo("sample1"); + assertThat(retrieved.samples.get(1).field).isEqualTo("sample2"); } @Test // DATAMONGO-888 @@ -2767,7 +2998,7 @@ public void sortOnIdFieldPropertyShouldBeMappedCorrectly() { template.save(two); Query query = query(where("_id").in("1", "2")).with(Sort.by(Direction.DESC, "someIdKey")); - assertThat(template.find(query, DoucmentWithNamedIdField.class), contains(two, one)); + assertThat(template.find(query, DoucmentWithNamedIdField.class)).containsExactly(two, one); } @Test // DATAMONGO-888 @@ -2785,7 +3016,7 @@ public void sortOnAnnotatedFieldPropertyShouldBeMappedCorrectly() { template.save(two); Query query = query(where("_id").in("1", "2")).with(Sort.by(Direction.DESC, "value")); - assertThat(template.find(query, DoucmentWithNamedIdField.class), contains(two, one)); + assertThat(template.find(query, DoucmentWithNamedIdField.class)).containsExactly(two, one); } @Test // DATAMONGO-913 @@ -2806,11 +3037,11 @@ public void shouldRetrieveInitializedValueFromDbRefAssociationAfterLoad() { SomeTemplate result = template.findOne(query(where("content").is(tmpl.getContent())), SomeTemplate.class); - assertThat(result, is(notNullValue())); - assertThat(result.getContent(), is(notNullValue())); - assertThat(result.getContent().getId(), is(notNullValue())); - assertThat(result.getContent().getName(), is(notNullValue())); - assertThat(result.getContent().getText(), is(content.getText())); + assertThat(result).isNotNull(); + assertThat(result.getContent()).isNotNull(); + assertThat(result.getContent().getId()).isNotNull(); + assertThat(result.getContent().getName()).isNotNull(); + assertThat(result.getContent().getText()).isEqualTo(content.getText()); } @Test // DATAMONGO-913 @@ -2834,8 +3065,8 @@ public void shouldReuseExistingDBRefInQueryFromDbRefAssociationAfterLoad() { // Use lazy-loading-proxy in query result = template.findOne(query(where("content").is(result.getContent())), SomeTemplate.class); - assertNotNull(result.getContent().getName()); - assertThat(result.getContent().getName(), is(content.getName())); + assertThat(result.getContent().getName()).isNotNull(); + assertThat(result.getContent().getName()).isEqualTo(content.getName()); } @Test // DATAMONGO-970 @@ -2844,11 +3075,11 @@ public void insertsAndRemovesBasicDocumentCorrectly() { org.bson.Document object = new org.bson.Document("key", "value"); template.insert(object, "collection"); - assertThat(object.get("_id"), is(notNullValue())); - assertThat(template.findAll(Document.class, "collection"), hasSize(1)); + assertThat(object.get("_id")).isNotNull(); + assertThat(template.findAll(Document.class, "collection")).hasSize(1); template.remove(object, "collection"); - assertThat(template.findAll(Document.class, "collection"), hasSize(0)); + assertThat(template.findAll(Document.class, "collection")).hasSize(0); } @Test // DATAMONGO-1207 @@ -2861,8 +3092,8 @@ public void ignoresNullElementsForInsertAll() { List
          result = template.findAll(Address.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(newYork, washington)); + assertThat(result).hasSize(2); + assertThat(result).contains(newYork, washington); } @Test // DATAMONGO-1176 @@ -2875,9 +3106,21 @@ public void generatesIdForInsertAll() { List result = template.findAll(Person.class); - assertThat(result, hasSize(2)); - assertThat(walter.getId(), is(notNullValue())); - assertThat(jesse.getId(), is(notNullValue())); + assertThat(result).hasSize(2); + assertThat(walter.getId()).isNotNull(); + assertThat(jesse.getId()).isNotNull(); + } + + @Test // GH-4944 + public void insertAllShouldConvertIdToTargetTypeBeforeSave() { + + RawStringId walter = new RawStringId(); + walter.value = "walter"; + + RawStringId returned = template.insertAll(List.of(walter)).iterator().next(); + org.bson.Document document = template.execute(RawStringId.class, collection -> collection.find().first()); + + assertThat(returned.id).isEqualTo(document.get("_id")); } @Test // DATAMONGO-1208 @@ -2890,10 +3133,9 @@ public void takesSortIntoAccountWhenStreaming() { Query q = new Query(); q.with(Sort.by(Direction.ASC, "age")); - CloseableIterator stream = template.stream(q, Person.class); + List streamResults = template.stream(q, Person.class).map(Person::getAge).toList(); - assertThat(stream.next().getAge(), is(youngestPerson.getAge())); - assertThat(stream.next().getAge(), is(oldestPerson.getAge())); + assertThat(streamResults).containsExactly(youngestPerson.getAge(), oldestPerson.getAge()); } @Test // DATAMONGO-1208 @@ -2906,10 +3148,10 @@ public void takesLimitIntoAccountWhenStreaming() { Query q = new Query(); q.with(PageRequest.of(0, 1, Sort.by(Direction.ASC, "age"))); - CloseableIterator stream = template.stream(q, Person.class); + Iterator stream = template.stream(q, Person.class).iterator(); - assertThat(stream.next().getAge(), is(youngestPerson.getAge())); - assertThat(stream.hasNext(), is(false)); + assertThat(stream.next().getAge()).isEqualTo(youngestPerson.getAge()); + assertThat(stream.hasNext()).isFalse(); } @Test // DATAMONGO-1204 @@ -2930,12 +3172,12 @@ public void resolvesCyclicDBRefCorrectly() { SomeMessage messageLoaded = template.findOne(query(where("id").is(message.id)), SomeMessage.class); SomeContent contentLoaded = template.findOne(query(where("id").is(content.id)), SomeContent.class); - assertThat(messageLoaded.dbrefContent.id, is(contentLoaded.id)); - assertThat(contentLoaded.dbrefMessage.id, is(messageLoaded.id)); + assertThat(messageLoaded.dbrefContent.id).isEqualTo(contentLoaded.id); + assertThat(contentLoaded.dbrefMessage.id).isEqualTo(messageLoaded.id); } - @Test // DATAMONGO-1287 - public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstrcutorArgument() { + @Test // DATAMONGO-1287, DATAMONGO-2004 + public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstructorArgument() { Document docInCtor = new Document(); docInCtor.id = "doc-in-ctor"; @@ -2948,8 +3190,8 @@ public void shouldReuseAlreadyResolvedLazyLoadedDBRefWhenUsedAsPersistenceConstr DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)), DocumentWithLazyDBrefUsedInPresistenceConstructor.class); - assertThat(loaded.refToDocUsedInCtor, not(instanceOf(LazyLoadingProxy.class))); - assertThat(loaded.refToDocNotUsedInCtor, nullValue()); + assertThat(loaded.refToDocUsedInCtor).isInstanceOf(LazyLoadingProxy.class); + assertThat(loaded.refToDocNotUsedInCtor).isNull(); } @Test // DATAMONGO-1287 @@ -2967,12 +3209,12 @@ public void shouldNotReuseLazyLoadedDBRefWhenTypeUsedInPersistenceConstrcutorBut DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)), DocumentWithLazyDBrefUsedInPresistenceConstructor.class); - assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class)); - assertThat(loaded.refToDocUsedInCtor, nullValue()); + assertThat(loaded.refToDocNotUsedInCtor).isInstanceOf(LazyLoadingProxy.class); + assertThat(loaded.refToDocUsedInCtor).isNull(); } - @Test // DATAMONGO-1287 - public void shouldRespectParamterValueWhenAttemptingToReuseLazyLoadedDBRefUsedInPersistenceConstrcutor() { + @Test // DATAMONGO-1287, DATAMONGO-2004 + public void shouldRespectParameterValueWhenAttemptingToReuseLazyLoadedDBRefUsedInPersistenceConstructor() { Document docInCtor = new Document(); docInCtor.id = "doc-in-ctor"; @@ -2990,8 +3232,8 @@ public void shouldRespectParamterValueWhenAttemptingToReuseLazyLoadedDBRefUsedIn DocumentWithLazyDBrefUsedInPresistenceConstructor loaded = template.findOne(query(where("id").is(source.id)), DocumentWithLazyDBrefUsedInPresistenceConstructor.class); - assertThat(loaded.refToDocUsedInCtor, not(instanceOf(LazyLoadingProxy.class))); - assertThat(loaded.refToDocNotUsedInCtor, instanceOf(LazyLoadingProxy.class)); + assertThat(loaded.refToDocUsedInCtor).isInstanceOf(LazyLoadingProxy.class); + assertThat(loaded.refToDocNotUsedInCtor).isInstanceOf(LazyLoadingProxy.class); } @Test // DATAMONGO-1401 @@ -3007,7 +3249,7 @@ public void updateShouldWorkForTypesContainingGeoJsonTypes() { wgj.description = "datamongo-1401-update"; template.save(wgj); - assertThat(template.findOne(query(where("id").is(wgj.id)), WithGeoJson.class).point, is(equalTo(wgj.point))); + assertThat(template.findOne(query(where("id").is(wgj.id)), WithGeoJson.class).point).isEqualTo(wgj.point); } @Test // DATAMONGO-1404 @@ -3022,7 +3264,7 @@ public void updatesDateValueCorrectlyWhenUsingMinOperator() { template.updateFirst(query(where("id").is(twd.id)), new Update().min("date", cal.getTime()), TypeWithDate.class); TypeWithDate loaded = template.find(query(where("id").is(twd.id)), TypeWithDate.class).get(0); - assertThat(loaded.date, equalTo(cal.getTime())); + assertThat(loaded.date).isEqualTo(cal.getTime()); } @Test // DATAMONGO-1404 @@ -3055,13 +3297,13 @@ public void updatesNumericValueCorrectlyWhenUsingMinOperator() { template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); - assertThat(loaded.byteVal, equalTo(byteVal)); - assertThat(loaded.doubleVal, equalTo(190D)); - assertThat(loaded.floatVal, equalTo(290F)); - assertThat(loaded.intVal, equalTo(390)); - assertThat(loaded.longVal, equalTo(490L)); - assertThat(loaded.bigIntegerVal, equalTo(new BigInteger("590"))); - assertThat(loaded.bigDeciamVal, equalTo(new BigDecimal("690"))); + assertThat(loaded.byteVal).isEqualTo(byteVal); + assertThat(loaded.doubleVal).isEqualTo(190D); + assertThat(loaded.floatVal).isEqualTo(290F); + assertThat(loaded.intVal).isEqualTo(390); + assertThat(loaded.longVal).isEqualTo(490L); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("590")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("690")); } @Test // DATAMONGO-1404 @@ -3078,7 +3320,7 @@ public void updatesDateValueCorrectlyWhenUsingMaxOperator() { template.updateFirst(query(where("id").is(twd.id)), new Update().max("date", cal.getTime()), TypeWithDate.class); TypeWithDate loaded = template.find(query(where("id").is(twd.id)), TypeWithDate.class).get(0); - assertThat(loaded.date, equalTo(cal.getTime())); + assertThat(loaded.date).isEqualTo(cal.getTime()); } @Test // DATAMONGO-1404 @@ -3111,13 +3353,13 @@ public void updatesNumericValueCorrectlyWhenUsingMaxOperator() { template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); - assertThat(loaded.byteVal, equalTo(byteVal)); - assertThat(loaded.doubleVal, equalTo(290D)); - assertThat(loaded.floatVal, equalTo(390F)); - assertThat(loaded.intVal, equalTo(490)); - assertThat(loaded.longVal, equalTo(590L)); - assertThat(loaded.bigIntegerVal, equalTo(new BigInteger("690"))); - assertThat(loaded.bigDeciamVal, equalTo(new BigDecimal("790"))); + assertThat(loaded.byteVal).isEqualTo(byteVal); + assertThat(loaded.doubleVal).isEqualTo(290D); + assertThat(loaded.floatVal).isEqualTo(390F); + assertThat(loaded.intVal).isEqualTo(490); + assertThat(loaded.longVal).isEqualTo(590L); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("690")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("790")); } @Test // DATAMONGO-1404 @@ -3140,8 +3382,8 @@ public void updatesBigNumberValueUsingStringComparisonWhenUsingMaxOperator() { template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); - assertThat(loaded.bigIntegerVal, equalTo(new BigInteger("70"))); - assertThat(loaded.bigDeciamVal, equalTo(new BigDecimal("80"))); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("70")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("80")); } @Test // DATAMONGO-1404 @@ -3164,11 +3406,11 @@ public void updatesBigNumberValueUsingStringComparisonWhenUsingMinOperator() { template.updateFirst(query(where("id").is(twn.id)), update, TypeWithNumbers.class); TypeWithNumbers loaded = template.find(query(where("id").is(twn.id)), TypeWithNumbers.class).get(0); - assertThat(loaded.bigIntegerVal, equalTo(new BigInteger("700"))); - assertThat(loaded.bigDeciamVal, equalTo(new BigDecimal("800"))); + assertThat(loaded.bigIntegerVal).isEqualTo(new BigInteger("700")); + assertThat(loaded.bigDeciamVal).isEqualTo(new BigDecimal("800")); } - @Test // DATAMONGO-1431 + @Test // DATAMONGO-1431, DATAMONGO-2323 public void streamExecutionUsesExplicitCollectionName() { template.remove(new Query(), "some_special_collection"); @@ -3178,15 +3420,15 @@ public void streamExecutionUsesExplicitCollectionName() { template.insert(document, "some_special_collection"); - CloseableIterator stream = template.stream(new Query(), Document.class); + Stream stream = template.stream(new Query(), Document.class); + assertThat(stream).isEmpty(); - assertThat(stream.hasNext(), is(false)); + Iterator stream2 = template + .stream(new Query(where("_id").is(document.id)), org.bson.Document.class, "some_special_collection").iterator(); - stream = template.stream(new Query(), Document.class, "some_special_collection"); - - assertThat(stream.hasNext(), is(true)); - assertThat(stream.next().id, is(document.id)); - assertThat(stream.hasNext(), is(false)); + assertThat(stream2.hasNext()).isTrue(); + assertThat(stream2.next().get("_id")).isEqualTo(new ObjectId(document.id)); + assertThat(stream2.hasNext()).isFalse(); } @Test // DATAMONGO-1194 @@ -3203,7 +3445,7 @@ public void shouldFetchListOfReferencesCorrectly() { template.save(source); - assertThat(template.findOne(query(where("id").is(source.id)), DocumentWithDBRefCollection.class), is(source)); + assertThat(template.findOne(query(where("id").is(source.id)), DocumentWithDBRefCollection.class)).isEqualTo(source); } @Test // DATAMONGO-1194 @@ -3223,8 +3465,8 @@ public void shouldFetchListOfLazyReferencesCorrectly() { DocumentWithDBRefCollection target = template.findOne(query(where("id").is(source.id)), DocumentWithDBRefCollection.class); - assertThat(target.lazyDbRefAnnotatedList, instanceOf(LazyLoadingProxy.class)); - assertThat(target.getLazyDbRefAnnotatedList(), contains(two, one)); + assertThat(target.lazyDbRefAnnotatedList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyDbRefAnnotatedList()).containsExactly(two, one); } @Test // DATAMONGO-1194 @@ -3237,7 +3479,7 @@ public void shouldFetchMapOfLazyReferencesCorrectly() { template.save(two); DocumentWithDBRefCollection source = new DocumentWithDBRefCollection(); - source.lazyDbRefAnnotatedMap = new LinkedHashMap(); + source.lazyDbRefAnnotatedMap = new LinkedHashMap<>(); source.lazyDbRefAnnotatedMap.put("tyrion", two); source.lazyDbRefAnnotatedMap.put("jon", one); template.save(source); @@ -3245,8 +3487,75 @@ public void shouldFetchMapOfLazyReferencesCorrectly() { DocumentWithDBRefCollection target = template.findOne(query(where("id").is(source.id)), DocumentWithDBRefCollection.class); - assertThat(target.lazyDbRefAnnotatedMap, instanceOf(LazyLoadingProxy.class)); - assertThat(target.lazyDbRefAnnotatedMap.values(), contains(two, one)); + assertThat(target.lazyDbRefAnnotatedMap).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.lazyDbRefAnnotatedMap.values()).containsExactly(two, one); + } + + @Test // DATAMONGO-2004 + public void shouldFetchLazyReferenceWithConstructorCreationCorrectly() { + + Sample one = new Sample("1", "jon snow"); + + template.save(one); + + DocumentWithLazyDBRefsAndConstructorCreation source = new DocumentWithLazyDBRefsAndConstructorCreation(null, one, + null, null); + + template.save(source); + + DocumentWithLazyDBRefsAndConstructorCreation target = template.findOne(query(where("id").is(source.id)), + DocumentWithLazyDBRefsAndConstructorCreation.class); + + assertThat(target.lazyDbRefProperty).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.lazyDbRefProperty).isEqualTo(one); + } + + @Test // DATAMONGO-2004 + public void shouldFetchMapOfLazyReferencesWithConstructorCreationCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + Map map = new LinkedHashMap<>(); + map.put("tyrion", two); + map.put("jon", one); + + DocumentWithLazyDBRefsAndConstructorCreation source = new DocumentWithLazyDBRefsAndConstructorCreation(null, null, + null, map); + + template.save(source); + + DocumentWithLazyDBRefsAndConstructorCreation target = template.findOne(query(where("id").is(source.id)), + DocumentWithLazyDBRefsAndConstructorCreation.class); + + assertThat(target.lazyDbRefAnnotatedMap).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.lazyDbRefAnnotatedMap.values()).containsExactly(two, one); + } + + @Test // DATAMONGO-2004 + public void shouldFetchListOfLazyReferencesWithConstructorCreationCorrectly() { + + Sample one = new Sample("1", "jon snow"); + Sample two = new Sample("2", "tyrion lannister"); + + template.save(one); + template.save(two); + + List list = Arrays.asList(two, one); + + DocumentWithLazyDBRefsAndConstructorCreation source = new DocumentWithLazyDBRefsAndConstructorCreation(null, null, + list, null); + + template.save(source); + + DocumentWithLazyDBRefsAndConstructorCreation target = template.findOne(query(where("id").is(source.id)), + DocumentWithLazyDBRefsAndConstructorCreation.class); + + assertThat(target.lazyDbRefAnnotatedList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyDbRefAnnotatedList()).containsExactly(two, one); } @Test // DATAMONGO-1513 @@ -3265,7 +3574,36 @@ public void onBeforeSave(BeforeSaveEvent event) { template.insertAll(Collections.singletonList(document)); - assertThat(document.id, is(notNullValue())); + assertThat(document.id).isNotNull(); + } + + @Test // DATAMONGO-2189 + @DirtiesContext + public void afterSaveEventContainsSavedObjectUsingInsertAll() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insertAll(Collections.singletonList(source)); + + assertThat(saved.get()).isNotNull(); + assertThat(saved.get()).isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + + } + + @Test // DATAMONGO-2189 + @DirtiesContext + public void afterSaveEventContainsSavedObjectUsingInsert() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insert(source); + + assertThat(saved.get()).isNotNull(); + assertThat(saved.get()).isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); } @Test // DATAMONGO-1509 @@ -3277,7 +3615,7 @@ public void findsByGenericNestedListElements() { template.insert(dwc); Query query = query(where("models").is(modelList)); - assertThat(template.findOne(query, DocumentWithCollection.class), is(equalTo(dwc))); + assertThat(template.findOne(query, DocumentWithCollection.class)).isEqualTo(dwc); } @Test // DATAMONGO-1517 @@ -3285,8 +3623,6 @@ public void findsByGenericNestedListElements() { public void decimal128TypeShouldBeSavedAndLoadedCorrectly() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { - assumeThat(MongoClientVersion.isMongo34Driver(), is(true)); - Class decimal128Type = ClassUtils.resolveClassName("org.bson.types.Decimal128", null); WithObjectTypeProperty source = new WithObjectTypeProperty(); @@ -3296,7 +3632,7 @@ public void decimal128TypeShouldBeSavedAndLoadedCorrectly() template.save(source); WithObjectTypeProperty loaded = template.findOne(query(where("id").is(source.id)), WithObjectTypeProperty.class); - assertThat(loaded.getValue(), instanceOf(decimal128Type)); + assertThat(loaded.getValue()).isInstanceOf(decimal128Type); } @Test // DATAMONGO-1718 @@ -3311,11 +3647,11 @@ public void findAndRemoveAllWithoutExplicitDomainTypeShouldRemoveAndReturnEntiti template.save(rickon); List result = template.findAllAndRemove(query(where("field").regex(".*stark$")), - template.determineCollectionName(Sample.class)); + template.getCollectionName(Sample.class)); - assertThat(result, hasSize(2)); - assertThat(result, containsInAnyOrder(bran, rickon)); - assertThat(template.count(new BasicQuery("{}"), template.determineCollectionName(Sample.class)), is(equalTo(1L))); + assertThat(result).hasSize(2); + assertThat(result).contains(bran, rickon); + assertThat(template.count(new BasicQuery("{}"), template.getCollectionName(Sample.class))).isEqualTo(1L); } @Test // DATAMONGO-1779 @@ -3336,7 +3672,7 @@ public void removeShouldConsiderLimit() { .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // .collect(Collectors.toList()); - template.insertAll(samples); + template.bulkOps(BulkMode.UNORDERED, Sample.class).insert(samples).execute(); DeleteResult wr = template.remove(query(where("field").is("lannister")).limit(25), Sample.class); @@ -3351,7 +3687,7 @@ public void removeShouldConsiderSkipAndSort() { .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // .collect(Collectors.toList()); - template.insertAll(samples); + template.bulkOps(BulkMode.UNORDERED, Sample.class).insert(samples).execute(); DeleteResult wr = template.remove(new Query().skip(25).with(Sort.by("field")), Sample.class); @@ -3361,46 +3697,442 @@ public void removeShouldConsiderSkipAndSort() { assertThat(template.count(query(where("field").is("stark")), Sample.class)).isEqualTo(0L); } - static class TypeWithNumbers { + @Test // DATAMONGO-1988 + public void findByNestedDocumentWithStringIdMappingToObjectIdMatchesDocumentsCorrectly() { - @Id String id; - Integer intVal; - Float floatVal; - Long longVal; - Double doubleVal; - BigDecimal bigDeciamVal; - BigInteger bigIntegerVal; - Byte byteVal; + DocumentWithNestedTypeHavingStringIdProperty source = new DocumentWithNestedTypeHavingStringIdProperty(); + source.id = "id-1"; + source.sample = new Sample(); + source.sample.id = new ObjectId().toHexString(); + + template.save(source); + + DocumentWithNestedTypeHavingStringIdProperty target = template + .query(DocumentWithNestedTypeHavingStringIdProperty.class) + .matching(query(where("sample.id").is(source.sample.id))).firstValue(); + + assertThat(target).isEqualTo(source); } - static class DoucmentWithNamedIdField { + @Test // DATAMONGO-1992 + public void writesAuditingMetadataForImmutableTypes() { - @Id String someIdKey; + ImmutableAudited source = new ImmutableAudited(null, null); + ImmutableAudited result = template.save(source); - @Field(value = "val") // - String value; + assertThat(result).isNotSameAs(source).describedAs("Expected a different instances to be returned"); + assertThat(result.modified).isNotNull().describedAs("Auditing field must not be null"); - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (someIdKey == null ? 0 : someIdKey.hashCode()); - result = prime * result + (value == null ? 0 : value.hashCode()); - return result; - } + ImmutableAudited read = template.findOne(query(where("id").is(result.getId())), ImmutableAudited.class); - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof DoucmentWithNamedIdField)) { - return false; - } - DoucmentWithNamedIdField other = (DoucmentWithNamedIdField) obj; + assertThat(read.modified).isEqualTo(result.modified.truncatedTo(ChronoUnit.MILLIS)) + .describedAs("Expected auditing information to be read"); + } + + @Test // DATAMONGO-1798 + public void saveAndLoadStringThatIsAnObjectIdAsString() { + + RawStringId source = new RawStringId(); + source.id = new ObjectId().toHexString(); + source.value = "new value"; + + template.save(source); + + org.bson.Document result = template + .execute(db -> (org.bson.Document) db.getCollection(template.getCollectionName(RawStringId.class)) + .find(Filters.eq("_id", source.id)).limit(1).into(new ArrayList()).iterator().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("_id")).isEqualTo(source.id); + + RawStringId target = template.findOne(query(where("id").is(source.id)), RawStringId.class); + assertThat(target).isEqualTo(source); + } + + @Test // GH-4184 + void insertHonorsExistingRawId() { + + RawStringId source = new RawStringId(); + source.id = "abc"; + source.value = "new value"; + + template.insert(source); + + org.bson.Document result = template + .execute(db -> db.getCollection(template.getCollectionName(RawStringId.class)).find().limit(1).cursor().next()); + + assertThat(result).isNotNull(); + assertThat(result.get("_id")).isEqualTo("abc"); + + RawStringId target = template.findOne(query(where("id").is(source.id)), RawStringId.class); + assertThat(target).isEqualTo(source); + } + + @Test // GH-4026 + void saveShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.save(source); + + template.execute(RawStringId.class, collection -> { + + org.bson.Document first = collection.find(new org.bson.Document()).first(); + assertThat(first.get("_id")).isInstanceOf(String.class); + return null; + }); + } + + @Test // GH-4026 + void insertShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.insert(source); + + template.execute(RawStringId.class, collection -> { + + org.bson.Document first = collection.find(new org.bson.Document()).first(); + assertThat(first.get("_id")).isInstanceOf(String.class); + return null; + }); + } + + @Test // DATAMONGO-2193 + public void shouldNotConvertStringToObjectIdForNonIdField() { + + ObjectId outerId = new ObjectId(); + String innerId = new ObjectId().toHexString(); + + org.bson.Document source = new org.bson.Document() // + .append("_id", outerId) // + .append("inner", new org.bson.Document("id", innerId).append("value", "boooh")); + + template.getDb().getCollection(template.getCollectionName(Outer.class)).insertOne(source); + + Outer target = template.findOne(query(where("inner.id").is(innerId)), Outer.class); + assertThat(target).isNotNull(); + assertThat(target.id).isEqualTo(outerId); + assertThat(target.inner.id).isEqualTo(innerId); + } + + @Test // DATAMONGO-2294 + public void shouldProjectWithCollections() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person); + + Query queryByChainedInclude = query(where("name").is("Walter")); + queryByChainedInclude.fields().include("id").include("name"); + + Query queryByCollectionInclude = query(where("name").is("Walter")); + queryByCollectionInclude.fields().include("id", "name"); + + MyPerson first = template.findAndReplace(queryByChainedInclude, new MyPerson("Walter")); + MyPerson second = template.findAndReplace(queryByCollectionInclude, new MyPerson("Walter")); + + assertThat(first).isEqualTo(second); + assertThat(first.address).isNull(); + assertThat(second.address).isNull(); + } + + @Test // DATAMONGO-2451 + public void sortOnIdFieldWithExplicitTypeShouldWork() { + + template.dropCollection(WithIdAndFieldAnnotation.class); + + WithIdAndFieldAnnotation f = new WithIdAndFieldAnnotation(); + f.id = new ObjectId().toHexString(); + f.value = "value"; + + template.save(f); + + assertThat(template.find(new BasicQuery("{}").with(Sort.by("id")), WithIdAndFieldAnnotation.class)).isNotEmpty(); + } + + @Test // GH-3407 + void shouldWriteSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + doc.subdocument = subdoc; + + template.save(doc); + + org.bson.Document loaded = template.findById(doc.id, org.bson.Document.class, "withSubdocument"); + + assertThat(loaded.get("subdocument", org.bson.Document.class)).hasSize(3).containsEntry("firstname", "Walter") + .containsEntry("nickname", null); + } + + @Test // GH-3407 + void shouldUpdateSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + subdoc.nickname = "Heisenberg"; + doc.subdocument = subdoc; + + template.save(doc); + + String id = doc.id; + + doc.id = null; + subdoc.nickname = null; + template.update(WithSubdocument.class).replaceWith(doc).findAndReplaceValue(); + + org.bson.Document loaded = template.findById(id, org.bson.Document.class, "withSubdocument"); + + assertThat(loaded.get("subdocument", org.bson.Document.class)).hasSize(3).containsEntry("firstname", "Walter") + .containsEntry("nickname", null); + } + + @Test // GH-3407 + void shouldFindSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + doc.subdocument = subdoc; + + template.save(doc); + + org.bson.Document loaded = template.findOne(query(where("subdocument").is(subdoc)), org.bson.Document.class, + "withSubdocument"); + + assertThat(loaded).isNotNull(); + } + + @Test // GH-3811 + public void sliceShouldLimitCollectionValues() { + + DocumentWithCollectionOfSimpleType source = new DocumentWithCollectionOfSimpleType(); + source.id = "id-1"; + source.values = Arrays.asList("spring", "data", "mongodb"); + + template.save(source); + + Criteria criteria = Criteria.where("id").is(source.id); + Query query = Query.query(criteria); + query.fields().slice("values", 0, 1); + DocumentWithCollectionOfSimpleType target = template.findOne(query, DocumentWithCollectionOfSimpleType.class); + + assertThat(target.values).containsExactly("spring"); + } + + @Test // GH-2750 + void shouldExecuteQueryWithExpression() { + + TypeWithFieldAnnotation source1 = new TypeWithFieldAnnotation(); + source1.emailAddress = "spring.data@pivotal.com"; + + TypeWithFieldAnnotation source2 = new TypeWithFieldAnnotation(); + source2.emailAddress = "spring.data@vmware.com"; + + template.insertAll(List.of(source1, source2)); + + TypeWithFieldAnnotation loaded = template.query(TypeWithFieldAnnotation.class) + .matching(expr(StringOperators.valueOf("emailAddress").regexFind(".*@vmware.com$", "i"))).firstValue(); + + assertThat(loaded).isEqualTo(source2); + } + + @Test // GH-4300 + public void replaceShouldReplaceDocument() { + + org.bson.Document doc = new org.bson.Document("foo", "bar"); + String collectionName = "replace"; + template.save(doc, collectionName); + + org.bson.Document replacement = new org.bson.Document("foo", "baz"); + UpdateResult updateResult = template.replace(query(where("foo").is("bar")), replacement, + ReplaceOptions.replaceOptions(), collectionName); + + assertThat(updateResult.wasAcknowledged()).isTrue(); + assertThat(template.findOne(query(where("foo").is("baz")), org.bson.Document.class, collectionName)).isNotNull(); + } + + @Test // GH-4464 + void saveEntityWithDotInFieldName() { + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.value = "v1"; + + template.save(source); + + org.bson.Document raw = template.execute(WithFieldNameContainingDots.class, collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + assertThat(raw).containsEntry("field.name.with.dots", "v1"); + } + + @Test // GH-4464 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void queryEntityWithDotInFieldNameUsingExpr() { + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.value = "v1"; + + WithFieldNameContainingDots source2 = new WithFieldNameContainingDots(); + source2.id = "id-2"; + source2.value = "v2"; + + template.save(source); + template.save(source2); + + WithFieldNameContainingDots loaded = template.query(WithFieldNameContainingDots.class) // with property -> fieldname mapping + .matching(expr(ComparisonOperators.valueOf(ObjectOperators.getValueOf("value")).equalToValue("v1"))).firstValue(); + + assertThat(loaded).isEqualTo(source); + + loaded = template.query(WithFieldNameContainingDots.class) // using raw fieldname + .matching(expr(ComparisonOperators.valueOf(ObjectOperators.getValueOf("field.name.with.dots")).equalToValue("v1"))).firstValue(); + + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4464 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void updateEntityWithDotInFieldNameUsingAggregations() { + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.value = "v1"; + + template.save(source); + + template.update(WithFieldNameContainingDots.class) + .matching(where("id").is(source.id)) + .apply(AggregationUpdate.newUpdate(ReplaceWithOperation.replaceWithValue(ObjectOperators.setValueTo("value", "changed")))) + .first(); + + org.bson.Document raw = template.execute(WithFieldNameContainingDots.class, collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + assertThat(raw).containsEntry("field.name.with.dots", "changed"); + + template.update(WithFieldNameContainingDots.class) + .matching(where("id").is(source.id)) + .apply(AggregationUpdate.newUpdate(ReplaceWithOperation.replaceWithValue(ObjectOperators.setValueTo("field.name.with.dots", "changed-again")))) + .first(); + + raw = template.execute(WithFieldNameContainingDots.class, collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + assertThat(raw).containsEntry("field.name.with.dots", "changed-again"); + } + + @Test // GH-4464 + void savesMapWithDotInKey() { + + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(WithFieldNameContainingDots.class), client); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + template.getConverter().getMappingContext()); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), converter); + + WithFieldNameContainingDots source = new WithFieldNameContainingDots(); + source.id = "id-1"; + source.mapValue = Map.of("k1", "v1", "map.key.with.dot", "v2"); + + template.save(source); + + org.bson.Document raw = template.execute(WithFieldNameContainingDots.class, + collection -> collection.find(new org.bson.Document("_id", source.id)).first()); + + assertThat(raw.get("mapValue", org.bson.Document.class)) + .containsEntry("k1", "v1") + .containsEntry("map.key.with.dot", "v2"); + } + + @Test // GH-4464 + void readsMapWithDotInKey() { + + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(WithFieldNameContainingDots.class), client); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + template.getConverter().getMappingContext()); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), converter); + + Map sourceMap = Map.of("k1", "v1", "sourceMap.key.with.dot", "v2"); + template.execute(WithFieldNameContainingDots.class, + collection -> { + collection.insertOne(new org.bson.Document("_id", "id-1").append("mapValue", sourceMap)); + return null; + } + ); + + WithFieldNameContainingDots loaded = template.query(WithFieldNameContainingDots.class) + .matching(where("id").is("id-1")) + .firstValue(); + + assertThat(loaded.mapValue).isEqualTo(sourceMap); + } + + private AtomicReference createAfterSaveReference() { + + AtomicReference saved = new AtomicReference<>(); + context.addApplicationListener(new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + saved.set(event.getSource()); + } + }); + + return saved; + } + + static class TypeWithNumbers { + + @Id String id; + Integer intVal; + Float floatVal; + Long longVal; + Double doubleVal; + BigDecimal bigDeciamVal; + BigInteger bigIntegerVal; + Byte byteVal; + } + + static class DoucmentWithNamedIdField { + + @Id String someIdKey; + + @Field(value = "val") // + String value; + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (someIdKey == null ? 0 : someIdKey.hashCode()); + result = prime * result + (value == null ? 0 : value.hashCode()); + return result; + } + + @Override + public boolean equals(@Nullable Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof DoucmentWithNamedIdField)) { + return false; + } + DoucmentWithNamedIdField other = (DoucmentWithNamedIdField) obj; if (someIdKey == null) { if (other.someIdKey != null) { return false; @@ -3420,7 +4152,6 @@ public boolean equals(Object obj) { } - @Data static class DocumentWithDBRefCollection { @Id public String id; @@ -3438,16 +4169,159 @@ static class DocumentWithDBRefCollection { @Field("lazy_db_ref_map") // DATAMONGO-1194 @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) public Map lazyDbRefAnnotatedMap; + + public String getId() { + return this.id; + } + + public List getDbRefAnnotatedList() { + return this.dbRefAnnotatedList; + } + + public Sample getDbRefProperty() { + return this.dbRefProperty; + } + + public List getLazyDbRefAnnotatedList() { + return this.lazyDbRefAnnotatedList; + } + + public Map getLazyDbRefAnnotatedMap() { + return this.lazyDbRefAnnotatedMap; + } + + public void setId(String id) { + this.id = id; + } + + public void setDbRefAnnotatedList(List dbRefAnnotatedList) { + this.dbRefAnnotatedList = dbRefAnnotatedList; + } + + public void setDbRefProperty(Sample dbRefProperty) { + this.dbRefProperty = dbRefProperty; + } + + public void setLazyDbRefAnnotatedList(List lazyDbRefAnnotatedList) { + this.lazyDbRefAnnotatedList = lazyDbRefAnnotatedList; + } + + public void setLazyDbRefAnnotatedMap(Map lazyDbRefAnnotatedMap) { + this.lazyDbRefAnnotatedMap = lazyDbRefAnnotatedMap; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentWithDBRefCollection that = (DocumentWithDBRefCollection) o; + return Objects.equals(id, that.id) && Objects.equals(dbRefAnnotatedList, that.dbRefAnnotatedList) + && Objects.equals(dbRefProperty, that.dbRefProperty) + && Objects.equals(lazyDbRefAnnotatedList, that.lazyDbRefAnnotatedList) + && Objects.equals(lazyDbRefAnnotatedMap, that.lazyDbRefAnnotatedMap); + } + + @Override + public int hashCode() { + return Objects.hash(id, dbRefAnnotatedList, dbRefProperty, lazyDbRefAnnotatedList, lazyDbRefAnnotatedMap); + } + + public String toString() { + return "MongoTemplateTests.DocumentWithDBRefCollection(id=" + this.getId() + ", dbRefAnnotatedList=" + + this.getDbRefAnnotatedList() + ", dbRefProperty=" + this.getDbRefProperty() + ", lazyDbRefAnnotatedList=" + + this.getLazyDbRefAnnotatedList() + ", lazyDbRefAnnotatedMap=" + this.getLazyDbRefAnnotatedMap() + ")"; + } + } + + static class DocumentWithLazyDBRefsAndConstructorCreation { + + @Id public String id; + + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) // + public Sample lazyDbRefProperty; + + @Field("lazy_db_ref_list") + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) // + public List lazyDbRefAnnotatedList; + + @Field("lazy_db_ref_map") + @org.springframework.data.mongodb.core.mapping.DBRef(lazy = true) public Map lazyDbRefAnnotatedMap; + + public DocumentWithLazyDBRefsAndConstructorCreation(String id, Sample lazyDbRefProperty, + List lazyDbRefAnnotatedList, Map lazyDbRefAnnotatedMap) { + this.id = id; + this.lazyDbRefProperty = lazyDbRefProperty; + this.lazyDbRefAnnotatedList = lazyDbRefAnnotatedList; + this.lazyDbRefAnnotatedMap = lazyDbRefAnnotatedMap; + } + + public String getId() { + return this.id; + } + + public Sample getLazyDbRefProperty() { + return this.lazyDbRefProperty; + } + + public List getLazyDbRefAnnotatedList() { + return this.lazyDbRefAnnotatedList; + } + + public Map getLazyDbRefAnnotatedMap() { + return this.lazyDbRefAnnotatedMap; + } + + public void setId(String id) { + this.id = id; + } + + public void setLazyDbRefProperty(Sample lazyDbRefProperty) { + this.lazyDbRefProperty = lazyDbRefProperty; + } + + public void setLazyDbRefAnnotatedList(List lazyDbRefAnnotatedList) { + this.lazyDbRefAnnotatedList = lazyDbRefAnnotatedList; + } + + public void setLazyDbRefAnnotatedMap(Map lazyDbRefAnnotatedMap) { + this.lazyDbRefAnnotatedMap = lazyDbRefAnnotatedMap; + } + + public String toString() { + return "MongoTemplateTests.DocumentWithLazyDBRefsAndConstructorCreation(id=" + this.getId() + + ", lazyDbRefProperty=" + this.getLazyDbRefProperty() + ", lazyDbRefAnnotatedList=" + + this.getLazyDbRefAnnotatedList() + ", lazyDbRefAnnotatedMap=" + this.getLazyDbRefAnnotatedMap() + ")"; + } } - @EqualsAndHashCode static class DocumentWithCollection { @Id String id; List models; - DocumentWithCollection(List models) { - this.models = models; + DocumentWithCollection(List models) { + this.models = models; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentWithCollection that = (DocumentWithCollection) o; + return Objects.equals(id, that.id) && Objects.equals(models, that.models); + } + + @Override + public int hashCode() { + return Objects.hash(id, models); } } @@ -3462,6 +4336,29 @@ static class DocumentWithCollectionOfSamples { List samples; } + static class DocumentWithNestedTypeHavingStringIdProperty { + + @Id String id; + Sample sample; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentWithNestedTypeHavingStringIdProperty that = (DocumentWithNestedTypeHavingStringIdProperty) o; + return Objects.equals(id, that.id) && Objects.equals(sample, that.sample); + } + + @Override + public int hashCode() { + return Objects.hash(id, sample); + } + } + static class DocumentWithMultipleCollections { @Id String id; List string1; @@ -3470,12 +4367,12 @@ static class DocumentWithMultipleCollections { static class DocumentWithNestedCollection { @Id String id; - List> models = new ArrayList>(); + List> models = new ArrayList<>(); } static class DocumentWithNestedList { @Id String id; - List> models = new ArrayList>(); + List> models = new ArrayList<>(); } static class DocumentWithEmbeddedDocumentWithCollection { @@ -3493,7 +4390,6 @@ static interface Model { String id(); } - @EqualsAndHashCode static class ModelA implements Model { @Id String id; @@ -3512,6 +4408,23 @@ public String value() { public String id() { return id; } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ModelA modelA = (ModelA) o; + return Objects.equals(id, modelA.id) && Objects.equals(value, modelA.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } } static class Document { @@ -3524,15 +4437,17 @@ static class MyId { String first; String second; + Instant id; + + @Field("t") Instant time; } static class TypeWithMyId { @Id MyId id; + String value; } - @EqualsAndHashCode - @NoArgsConstructor static class Sample { @Id String id; @@ -3542,14 +4457,33 @@ public Sample(String id, String field) { this.id = id; this.field = field; } + + public Sample() {} + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sample sample = (Sample) o; + return Objects.equals(id, sample.id) && Objects.equals(field, sample.field); + } + + @Override + public int hashCode() { + return Objects.hash(id, field); + } } static class TestClass { - DateTime myDate; + LocalDateTime myDate; @PersistenceConstructor - TestClass(DateTime myDate) { + TestClass(LocalDateTime myDate) { this.myDate = myDate; } } @@ -3560,21 +4494,21 @@ static class PersonWithConvertedId { String name; } - static enum DateTimeToDateConverter implements Converter { + static enum DateTimeToDateConverter implements Converter { INSTANCE; - public Date convert(DateTime source) { - return source == null ? null : source.toDate(); + public Date convert(LocalDateTime source) { + return source == null ? null : java.util.Date.from(source.atZone(ZoneId.systemDefault()).toInstant()); } } - static enum DateToDateTimeConverter implements Converter { + static enum DateToDateTimeConverter implements Converter { INSTANCE; - public DateTime convert(Date source) { - return source == null ? null : new DateTime(source.getTime()); + public LocalDateTime convert(Date source) { + return source == null ? null : LocalDateTime.ofInstant(source.toInstant(), ZoneId.systemDefault()); } } @@ -3584,9 +4518,62 @@ public static class MyPerson { String name; Address address; + public MyPerson() {} + + public MyPerson(String name) { + this.name = name; + } + + public MyPerson(String id, String name, Address address) { + this.id = id; + this.name = name; + this.address = address; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + public String getName() { return name; } + + public void setName(String name) { + this.name = name; + } + + public Address getAddress() { + return address; + } + + public void setAddress(Address address) { + this.address = address; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + MyPerson myPerson = (MyPerson) o; + return Objects.equals(id, myPerson.id) && Objects.equals(name, myPerson.name) + && Objects.equals(address, myPerson.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, address); + } + } + + interface MyPersonProjection { + + String getName(); } static class Address { @@ -3602,7 +4589,7 @@ static class Address { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; @@ -3633,13 +4620,31 @@ public int hashCode() { static class VersionedPerson { @Version Long version; - String id, firstname, lastname; + String id, firstname; + @Field(write = Field.Write.ALWAYS) String lastname; } static class TypeWithFieldAnnotation { @Id ObjectId id; @Field("email") String emailAddress; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TypeWithFieldAnnotation that = (TypeWithFieldAnnotation) o; + return Objects.equals(id, that.id) && Objects.equals(emailAddress, that.emailAddress); + } + + @Override + public int hashCode() { + return Objects.hash(id, emailAddress); + } } static class TypeWithDate { @@ -3680,7 +4685,6 @@ public SomeContent getContent() { } } - @EqualsAndHashCode public static class SomeContent { String id; @@ -3694,7 +4698,6 @@ public String getName() { public void setText(String text) { this.text = text; - } public String getId() { @@ -3704,6 +4707,24 @@ public String getId() { public String getText() { return text; } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeContent that = (SomeContent) o; + return Objects.equals(id, that.id) && Objects.equals(text, that.text) && Objects.equals(name, that.name) + && Objects.equals(dbrefMessage, that.dbrefMessage); + } + + @Override + public int hashCode() { + return Objects.hash(id, text, name, dbrefMessage); + } } static class SomeMessage { @@ -3734,11 +4755,28 @@ static class WithGeoJson { GeoJsonPoint point; } - @Data static class WithObjectTypeProperty { @Id String id; Object value; + + public WithObjectTypeProperty() {} + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Object getValue() { + return value; + } + + public void setValue(Object value) { + this.value = value; + } } static class PersonWithIdPropertyOfTypeUUIDListener @@ -3809,4 +4847,234 @@ public String toString() { } } + + // DATAMONGO-1992 + static class ImmutableVersioned { + + final @Id String id; + final @Version Long version; + + public ImmutableVersioned() { + this(null, null); + } + + public ImmutableVersioned(String id, Long version) { + this.id = id; + this.version = version; + } + + ImmutableVersioned withVersion(Long version) { + return new ImmutableVersioned(id, version); + } + + ImmutableVersioned withId(String id) { + return new ImmutableVersioned(id, version); + } + } + + static class ImmutableAudited { + + final @Id String id; + final @LastModifiedDate Instant modified; + + ImmutableAudited(String id, Instant modified) { + this.id = id; + this.modified = modified; + } + + ImmutableAudited withId(String id) { + return new ImmutableAudited(id, modified); + } + + ImmutableAudited withModified(Instant modified) { + return new ImmutableAudited(id, modified); + } + + public String getId() { + return id; + } + + public Instant getModified() { + return modified; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ImmutableAudited that = (ImmutableAudited) o; + return Objects.equals(id, that.id) && Objects.equals(modified, that.modified); + } + + @Override + public int hashCode() { + return Objects.hash(id, modified); + } + } + + static class RawStringId { + + @MongoId String id; + String value; + + public RawStringId() {} + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RawStringId that = (RawStringId) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + } + + static class Outer { + + @Id ObjectId id; + Inner inner; + } + + static class Inner { + + @Field("id") String id; + String value; + } + + static class WithIdAndFieldAnnotation { + + @Id // + @Field(name = "_id") // + String id; + String value; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + } + + static class WithSubdocument { + + @Id // + @Field(name = "_id") // + String id; + SubdocumentWithWriteNull subdocument; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public SubdocumentWithWriteNull getSubdocument() { + return subdocument; + } + + public void setSubdocument(SubdocumentWithWriteNull subdocument) { + this.subdocument = subdocument; + } + } + + static class SubdocumentWithWriteNull { + + final String firstname, lastname; + + @Field(write = Field.Write.ALWAYS) String nickname; + + public SubdocumentWithWriteNull(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + + public String getFirstname() { + return firstname; + } + + public String getLastname() { + return lastname; + } + + public String getNickname() { + return nickname; + } + + public void setNickname(String nickname) { + this.nickname = nickname; + } + } + + static class WithFieldNameContainingDots { + + String id; + + @Field(value = "field.name.with.dots", nameType = Type.KEY) + String value; + + Map mapValue; + + @Override + public String toString() { + return "WithMap{" + "id='" + id + '\'' + ", value='" + value + '\'' + ", mapValue=" + mapValue + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithFieldNameContainingDots withFieldNameContainingDots = (WithFieldNameContainingDots) o; + return Objects.equals(id, withFieldNameContainingDots.id) && Objects.equals(value, withFieldNameContainingDots.value) + && Objects.equals(mapValue, withFieldNameContainingDots.mapValue); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, mapValue); + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTransactionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTransactionTests.java new file mode 100644 index 0000000000..ec0ab192fa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTransactionTests.java @@ -0,0 +1,396 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static java.util.UUID.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.MongoTestUtils.*; + +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.SetSystemProperty; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Persistable; +import org.springframework.data.mongodb.CapturingTransactionOptionsResolver; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.MongoTransactionOptions; +import org.springframework.data.mongodb.MongoTransactionOptionsResolver; +import org.springframework.data.mongodb.UncategorizedMongoDbException; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.test.util.AfterTransactionAssertion; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.test.annotation.Rollback; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.transaction.AfterTransaction; +import org.springframework.test.context.transaction.BeforeTransaction; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.annotation.EnableTransactionManagement; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; + +/** + * @author Christoph Strobl + * @author Yan Kardziyaka + * @currentRead Shadow's Edge - Brent Weeks + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +@ContextConfiguration +@Transactional(transactionManager = "txManager") +@SetSystemProperty(key = "tx.read.concern", value = "local") +public class MongoTemplateTransactionTests { + + static final String DB_NAME = "template-tx-tests"; + static final String COLLECTION_NAME = "assassins"; + + static @ReplSetClient MongoClient mongoClient; + + @Configuration + @EnableTransactionManagement + static class Config extends AbstractMongoClientConfiguration { + + @Bean + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DB_NAME; + } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Bean + CapturingTransactionOptionsResolver txOptionsResolver() { + return new CapturingTransactionOptionsResolver(MongoTransactionOptionsResolver.defaultResolver()); + } + + @Bean + MongoTransactionManager txManager(MongoDatabaseFactory dbFactory, + MongoTransactionOptionsResolver txOptionsResolver) { + return new MongoTransactionManager(dbFactory, txOptionsResolver, MongoTransactionOptions.NONE); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Bean + public TransactionOptionsTestService transactionOptionsTestService(MongoOperations operations) { + return new TransactionOptionsTestService<>(operations, Assassin.class); + } + } + + @Autowired MongoTemplate template; + @Autowired MongoClient client; + @Autowired TransactionOptionsTestService transactionOptionsTestService; + @Autowired CapturingTransactionOptionsResolver transactionOptionsResolver; + + List>> assertionList; + + @BeforeEach + public void setUp() { + + template.setReadPreference(ReadPreference.primary()); + assertionList = new CopyOnWriteArrayList<>(); + transactionOptionsResolver.clear(); // clean out left overs from dirty context + } + + @BeforeTransaction + public void beforeTransaction() { + createOrReplaceCollection(DB_NAME, COLLECTION_NAME, client); + } + + @AfterTransaction + public void verifyDbState() { + + MongoCollection collection = client.getDatabase(DB_NAME).withReadPreference(ReadPreference.primary()) + .getCollection(COLLECTION_NAME); + + assertionList.forEach(it -> { + + boolean isPresent = collection.countDocuments(Filters.eq("_id", it.getId())) != 0; + + assertThat(isPresent).isEqualTo(it.shouldBePresent()) + .withFailMessage(String.format("After transaction entity %s should %s.", it.getPersistable(), + it.shouldBePresent() ? "be present" : "NOT be present")); + }); + } + + @Rollback(false) + @Test // DATAMONGO-1920 + public void shouldOperateCommitCorrectly() { + + Assassin hu = new Assassin("hu", "Hu Gibbet"); + template.save(hu); + + assertAfterTransaction(hu).isPresent(); + } + + @Test // DATAMONGO-1920 + public void shouldOperateRollbackCorrectly() { + + Assassin vi = new Assassin("vi", "Viridiana Sovari"); + template.save(vi); + + assertAfterTransaction(vi).isNotPresent(); + } + + @Test // DATAMONGO-1920 + public void shouldBeAbleToViewChangesDuringTransaction() throws InterruptedException { + + Assassin durzo = new Assassin("durzo", "Durzo Blint"); + template.save(durzo); + + Thread.sleep(100); + Assassin retrieved = template.findOne(query(where("id").is(durzo.getId())), Assassin.class); + + assertThat(retrieved).isEqualTo(durzo); + + assertAfterTransaction(durzo).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidMaxCommitTime() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + assertThatThrownBy(() -> transactionOptionsTestService.saveWithInvalidMaxCommitTime(assassin)) // + .isInstanceOf(IllegalArgumentException.class); + + assertAfterTransaction(assassin).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldCommitOnTransactionWithinMaxCommitTime() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + transactionOptionsTestService.saveWithinMaxCommitTime(assassin); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(Duration.ofMinutes(1), + MongoTransactionOptions::getMaxCommitTime); + + assertAfterTransaction(assassin).isPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowInvalidDataAccessApiUsageExceptionOnTransactionWithAvailableReadConcern() { + + assertThatThrownBy(() -> transactionOptionsTestService.availableReadConcernFind(randomUUID().toString())) // + .isInstanceOf(InvalidDataAccessApiUsageException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidReadConcern() { + + assertThatThrownBy(() -> transactionOptionsTestService.invalidReadConcernFind(randomUUID().toString())) // + .isInstanceOf(IllegalArgumentException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldReadTransactionOptionFromSystemProperty() { + + transactionOptionsTestService.environmentReadConcernFind(randomUUID().toString()); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns( + new ReadConcern(ReadConcernLevel.fromString(System.getProperty("tx.read.concern"))), + MongoTransactionOptions::getReadConcern); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldNotThrowOnTransactionWithMajorityReadConcern() { + assertThatNoException() // + .isThrownBy(() -> transactionOptionsTestService.majorityReadConcernFind(randomUUID().toString())); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowUncategorizedMongoDbExceptionOnTransactionWithPrimaryPreferredReadPreference() { + + assertThatThrownBy(() -> transactionOptionsTestService.findFromPrimaryPreferredReplica(randomUUID().toString())) // + .isInstanceOf(UncategorizedMongoDbException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidReadPreference() { + + assertThatThrownBy(() -> transactionOptionsTestService.findFromInvalidReplica(randomUUID().toString())) // + .isInstanceOf(IllegalArgumentException.class); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldNotThrowOnTransactionWithPrimaryReadPreference() { + + assertThatNoException() // + .isThrownBy(() -> transactionOptionsTestService.findFromPrimaryReplica(randomUUID().toString())); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowTransactionSystemExceptionOnTransactionWithUnacknowledgedWriteConcern() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + assertThatThrownBy(() -> transactionOptionsTestService.unacknowledgedWriteConcernSave(assassin)) // + .isInstanceOf(TransactionSystemException.class); + + assertAfterTransaction(assassin).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldThrowIllegalArgumentExceptionOnTransactionWithInvalidWriteConcern() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + assertThatThrownBy(() -> transactionOptionsTestService.invalidWriteConcernSave(assassin)) // + .isInstanceOf(IllegalArgumentException.class); + + assertAfterTransaction(assassin).isNotPresent(); + } + + @Rollback(false) + @Test // GH-1628 + @Transactional(transactionManager = "txManager", propagation = Propagation.NEVER) + public void shouldCommitOnTransactionWithAcknowledgedWriteConcern() { + + Assassin assassin = new Assassin(randomUUID().toString(), randomUUID().toString()); + + transactionOptionsTestService.acknowledgedWriteConcernSave(assassin); + + assertThat(transactionOptionsResolver.getLastCapturedOption()).returns(WriteConcern.ACKNOWLEDGED, + MongoTransactionOptions::getWriteConcern); + + assertAfterTransaction(assassin).isPresent(); + } + + // --- Just some helpers and tests entities + + private AfterTransactionAssertion assertAfterTransaction(Assassin assassin) { + + AfterTransactionAssertion assertion = new AfterTransactionAssertion<>(assassin); + assertionList.add(assertion); + return assertion; + } + + @org.springframework.data.mongodb.core.mapping.Document(COLLECTION_NAME) + static class Assassin implements Persistable { + + @Id String id; + String name; + + public Assassin(String id, String name) { + this.id = id; + this.name = name; + } + + @Override + public boolean isNew() { + return id == null; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Assassin assassin = (Assassin) o; + return Objects.equals(id, assassin.id) && Objects.equals(name, assassin.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "MongoTemplateTransactionTests.Assassin(id=" + this.getId() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 5c9ffaa105..79a0bb1fcb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,59 +15,80 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.any; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; - -import lombok.Data; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.math.BigInteger; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.Optional; +import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import org.assertj.core.api.Assertions; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; -import org.hamcrest.collection.IsIterableContainingInOrder; -import org.hamcrest.core.Is; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatcher; -import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationListener; import org.springframework.context.support.GenericApplicationContext; +import org.springframework.context.support.StaticApplicationContext; import org.springframework.core.convert.converter.Converter; import org.springframework.dao.DataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Transient; import org.springframework.data.annotation.Version; import org.springframework.data.convert.CustomConversions; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.aggregation.*; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators.Gte; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Sharded; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; @@ -75,23 +96,40 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.lang.Nullable; +import org.springframework.mock.env.MockEnvironment; import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.util.CollectionUtils; -import com.mongodb.DB; -import com.mongodb.MongoClient; +import com.mongodb.MongoClientSettings; import com.mongodb.MongoException; +import com.mongodb.MongoNamespace; +import com.mongodb.ReadConcern; import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; +import com.mongodb.ServerCursor; +import com.mongodb.WriteConcern; import com.mongodb.client.AggregateIterable; +import com.mongodb.client.DistinctIterable; import com.mongodb.client.FindIterable; import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; import com.mongodb.client.model.DeleteOptions; import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.MapReduceAction; +import com.mongodb.client.model.TimeSeriesGranularity; import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; /** @@ -100,102 +138,144 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Michael J. Simons + * @author Roman Puchkovskiy + * @author Yadhukrishna S Pai + * @author Jakub Zurawa + * @author Ben Foster */ -@RunWith(MockitoJUnitRunner.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class MongoTemplateUnitTests extends MongoOperationsUnitTests { - MongoTemplate template; + private MongoTemplate template; - @Mock MongoDbFactory factory; + @Mock MongoDatabaseFactory factory; @Mock MongoClient mongo; @Mock MongoDatabase db; @Mock MongoCollection collection; + @Mock MongoCollection collectionWithWriteConcern; @Mock MongoCursor cursor; @Mock FindIterable findIterable; @Mock AggregateIterable aggregateIterable; @Mock MapReduceIterable mapReduceIterable; + @Mock DistinctIterable distinctIterable; + @Mock UpdateResult updateResult; + @Mock DeleteResult deleteResult; - Document commandResultDocument = new Document(); + private Document commandResultDocument = new Document(); - MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private MockEnvironment environment = new MockEnvironment(); - @Before - public void setUp() { + @BeforeEach + void beforeEach() { when(findIterable.iterator()).thenReturn(cursor); - when(factory.getDb()).thenReturn(db); + when(factory.getMongoDatabase()).thenReturn(db); when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); - when(db.getCollection(Mockito.any(String.class), eq(Document.class))).thenReturn(collection); - when(db.runCommand(Mockito.any(), Mockito.any(Class.class))).thenReturn(commandResultDocument); - when(collection.find(Mockito.any(org.bson.Document.class))).thenReturn(findIterable); - when(collection.mapReduce(Mockito.any(), Mockito.any())).thenReturn(mapReduceIterable); - when(collection.count(any(Bson.class), any(CountOptions.class))).thenReturn(1L); + when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + when(db.getCollection(any(String.class), eq(Document.class))).thenReturn(collection); + when(db.runCommand(any(), any(Class.class))).thenReturn(commandResultDocument); + when(collection.find(any(org.bson.Document.class), any(Class.class))).thenReturn(findIterable); + when(collection.mapReduce(any(), any(), eq(Document.class))).thenReturn(mapReduceIterable); + when(collection.countDocuments(any(Bson.class), any(CountOptions.class))).thenReturn(1L); + when(collection.estimatedDocumentCount(any())).thenReturn(1L); + when(collection.getNamespace()).thenReturn(new MongoNamespace("db.mock-collection")); when(collection.aggregate(any(List.class), any())).thenReturn(aggregateIterable); + when(collection.withReadConcern(any())).thenReturn(collection); when(collection.withReadPreference(any())).thenReturn(collection); - when(findIterable.projection(Mockito.any())).thenReturn(findIterable); - when(findIterable.sort(Mockito.any(org.bson.Document.class))).thenReturn(findIterable); - when(findIterable.modifiers(Mockito.any(org.bson.Document.class))).thenReturn(findIterable); - when(findIterable.collation(Mockito.any())).thenReturn(findIterable); + when(collection.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))).thenReturn(updateResult); + when(collection.withWriteConcern(any())).thenReturn(collectionWithWriteConcern); + when(collection.distinct(anyString(), any(Document.class), any())).thenReturn(distinctIterable); + when(collectionWithWriteConcern.deleteOne(any(Bson.class), any())).thenReturn(deleteResult); + when(collectionWithWriteConcern.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))).thenReturn(updateResult); + when(findIterable.projection(any())).thenReturn(findIterable); + when(findIterable.sort(any(org.bson.Document.class))).thenReturn(findIterable); + when(findIterable.collation(any())).thenReturn(findIterable); when(findIterable.limit(anyInt())).thenReturn(findIterable); - when(mapReduceIterable.collation(Mockito.any())).thenReturn(mapReduceIterable); - when(mapReduceIterable.sort(Mockito.any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.collation(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.sort(any())).thenReturn(mapReduceIterable); when(mapReduceIterable.iterator()).thenReturn(cursor); when(mapReduceIterable.filter(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.collectionName(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.databaseName(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.action(any())).thenReturn(mapReduceIterable); when(aggregateIterable.collation(any())).thenReturn(aggregateIterable); when(aggregateIterable.allowDiskUse(any())).thenReturn(aggregateIterable); when(aggregateIterable.batchSize(anyInt())).thenReturn(aggregateIterable); when(aggregateIterable.map(any())).thenReturn(aggregateIterable); + when(aggregateIterable.maxTime(anyLong(), any())).thenReturn(aggregateIterable); when(aggregateIterable.into(any())).thenReturn(Collections.emptyList()); + when(aggregateIterable.hint(any())).thenReturn(aggregateIterable); + when(aggregateIterable.hintString(any())).thenReturn(aggregateIterable); + when(distinctIterable.collation(any())).thenReturn(distinctIterable); + when(distinctIterable.map(any())).thenReturn(distinctIterable); + when(distinctIterable.into(any())).thenReturn(Collections.emptyList()); this.mappingContext = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext); + mappingContext.setAutoIndexCreation(true); + mappingContext.setEnvironment(environment); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + this.converter = spy(new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext)); + when(this.converter.getEnvironment()).thenReturn(environment); + converter.afterPropertiesSet(); this.template = new MongoTemplate(factory, converter); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullDatabaseName() throws Exception { - new MongoTemplate(mongo, null); + @Test + void rejectsNullDatabaseName() { + assertThatIllegalArgumentException().isThrownBy(() -> new MongoTemplate(mongo, null)); + } + + @Test // DATAMONGO-1968 + void rejectsNullMongo() { + assertThatIllegalArgumentException().isThrownBy(() -> new MongoTemplate((MongoClient) null, "database")); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullMongo() throws Exception { - new MongoTemplate(null, "database"); + @Test // DATAMONGO-1968 + void rejectsNullMongoClient() { + assertThatIllegalArgumentException() + .isThrownBy(() -> new MongoTemplate((com.mongodb.client.MongoClient) null, "database")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1870 - public void removeHandlesMongoExceptionProperly() throws Exception { + @Test // DATAMONGO-1870 + void removeHandlesMongoExceptionProperly() { MongoTemplate template = mockOutGetDb(); - template.remove(null, "collection"); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.remove(null, "collection")); } @Test - public void defaultsConverterToMappingMongoConverter() throws Exception { + void defaultsConverterToMappingMongoConverter() { MongoTemplate template = new MongoTemplate(mongo, "database"); - assertTrue(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter); + assertThat(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter).isTrue(); } - @Test(expected = InvalidDataAccessApiUsageException.class) - public void rejectsNotFoundMapReduceResource() { + @Test + void rejectsNotFoundMapReduceResource() { GenericApplicationContext ctx = new GenericApplicationContext(); ctx.refresh(); template.setApplicationContext(ctx); - template.mapReduce("foo", "classpath:doesNotExist.js", "function() {}", Person.class); + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> template.mapReduce("foo", "classpath:doesNotExist.js", "function() {}", Person.class)); } - @Test(expected = InvalidDataAccessApiUsageException.class) // DATAMONGO-322 - public void rejectsEntityWithNullIdIfNotSupportedIdType() { + @Test // DATAMONGO-322 + void rejectsEntityWithNullIdIfNotSupportedIdType() { Object entity = new NotAutogenerateableId(); - template.save(entity); + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class).isThrownBy(() -> template.save(entity)); } @Test // DATAMONGO-322 - public void storesEntityWithSetIdAlthoughNotAutogenerateable() { + void storesEntityWithSetIdAlthoughNotAutogenerateable() { NotAutogenerateableId entity = new NotAutogenerateableId(); entity.id = 1; @@ -204,22 +284,33 @@ public void storesEntityWithSetIdAlthoughNotAutogenerateable() { } @Test // DATAMONGO-322 - public void autogeneratesIdForEntityWithAutogeneratableId() { + void autogeneratesIdForEntityWithAutogeneratableId() { this.converter.afterPropertiesSet(); MongoTemplate template = spy(this.template); - doReturn(new ObjectId()).when(template).saveDocument(Mockito.any(String.class), Mockito.any(Document.class), - Mockito.any(Class.class)); + doReturn(new ObjectId()).when(template).saveDocument(any(String.class), any(Document.class), any(Class.class)); AutogenerateableId entity = new AutogenerateableId(); template.save(entity); - assertThat(entity.id, is(notNullValue())); + assertThat(entity.id).isNotNull(); + } + + @Test // DATAMONGO-1912 + void autogeneratesIdForMap() { + + MongoTemplate template = spy(this.template); + doReturn(new ObjectId()).when(template).saveDocument(any(String.class), any(Document.class), any(Class.class)); + + Map entity = new LinkedHashMap<>(); + template.save(entity, "foo"); + + assertThat(entity).containsKey("_id"); } @Test // DATAMONGO-374 - public void convertsUpdateConstraintsUsingConverters() { + void convertsUpdateConstraintsUsingConverters() { CustomConversions conversions = new MongoCustomConversions(Collections.singletonList(MyConverter.INSTANCE)); this.converter.setCustomConversions(conversions); @@ -233,32 +324,30 @@ public void convertsUpdateConstraintsUsingConverters() { QueryMapper queryMapper = new QueryMapper(converter); Document reference = queryMapper.getMappedObject(update.getUpdateObject(), Optional.empty()); - verify(collection, times(1)).updateOne(Mockito.any(org.bson.Document.class), eq(reference), - Mockito.any(UpdateOptions.class)); // .update(Mockito.any(Document.class), eq(reference), anyBoolean(), - // anyBoolean()); + verify(collection, times(1)).updateOne(any(org.bson.Document.class), eq(reference), any(UpdateOptions.class)); } @Test // DATAMONGO-474 - public void setsUnpopulatedIdField() { + void setsUnpopulatedIdField() { NotAutogenerateableId entity = new NotAutogenerateableId(); template.populateIdIfNecessary(entity, 5); - assertThat(entity.id, is(5)); + assertThat(entity.id).isEqualTo(5); } @Test // DATAMONGO-474 - public void doesNotSetAlreadyPopulatedId() { + void doesNotSetAlreadyPopulatedId() { NotAutogenerateableId entity = new NotAutogenerateableId(); entity.id = 5; template.populateIdIfNecessary(entity, 7); - assertThat(entity.id, is(5)); + assertThat(entity.id).isEqualTo(5); } @Test // DATAMONGO-868 - public void findAndModifyShouldBumpVersionByOneWhenVersionFieldNotIncludedInUpdate() { + void findAndModifyShouldBumpVersionByOneWhenVersionFieldNotIncludedInUpdate() { VersionedEntity v = new VersionedEntity(); v.id = 1; @@ -267,17 +356,14 @@ public void findAndModifyShouldBumpVersionByOneWhenVersionFieldNotIncludedInUpda ArgumentCaptor captor = ArgumentCaptor.forClass(org.bson.Document.class); template.findAndModify(new Query(), new Update().set("id", "10"), VersionedEntity.class); - // verify(collection, times(1)).findAndModify(Matchers.any(Document.class), - // org.mockito.Matchers.isNull(Document.class), org.mockito.Matchers.isNull(Document.class), eq(false), - // captor.capture(), eq(false), eq(false)); - verify(collection, times(1)).findOneAndUpdate(Matchers.any(org.bson.Document.class), captor.capture(), - Matchers.any(FindOneAndUpdateOptions.class)); - Assert.assertThat(captor.getValue().get("$inc"), Is. is(new org.bson.Document("version", 1L))); + verify(collection, times(1)).findOneAndUpdate(any(org.bson.Document.class), captor.capture(), + any(FindOneAndUpdateOptions.class)); + assertThat(captor.getValue().get("$inc")).isEqualTo(new Document("version", 1L)); } @Test // DATAMONGO-868 - public void findAndModifyShouldNotBumpVersionByOneWhenVersionFieldAlreadyIncludedInUpdate() { + void findAndModifyShouldNotBumpVersionByOneWhenVersionFieldAlreadyIncludedInUpdate() { VersionedEntity v = new VersionedEntity(); v.id = 1; @@ -287,18 +373,15 @@ public void findAndModifyShouldNotBumpVersionByOneWhenVersionFieldAlreadyInclude template.findAndModify(new Query(), new Update().set("version", 100), VersionedEntity.class); - verify(collection, times(1)).findOneAndUpdate(Matchers.any(org.bson.Document.class), captor.capture(), - Matchers.any(FindOneAndUpdateOptions.class)); - - // verify(collection, times(1)).findAndModify(Matchers.any(Document.class), isNull(Document.class), - // isNull(Document.class), eq(false), captor.capture(), eq(false), eq(false)); + verify(collection, times(1)).findOneAndUpdate(any(org.bson.Document.class), captor.capture(), + any(FindOneAndUpdateOptions.class)); - Assert.assertThat(captor.getValue().get("$set"), Is. is(new org.bson.Document("version", 100))); - Assert.assertThat(captor.getValue().get("$inc"), nullValue()); + assertThat(captor.getValue().get("$set")).isEqualTo(new Document("version", 100)); + assertThat(captor.getValue().get("$inc")).isNull(); } @Test // DATAMONGO-533 - public void registersDefaultEntityIndexCreatorIfApplicationContextHasOneForDifferentMappingContext() { + void registersDefaultEntityIndexCreatorIfApplicationContextHasOneForDifferentMappingContext() { GenericApplicationContext applicationContext = new GenericApplicationContext(); applicationContext.getBeanFactory().registerSingleton("foo", @@ -320,40 +403,53 @@ public boolean matches(MongoPersistentEntityIndexCreator argument) { } @Test // DATAMONGO-566 - public void findAllAndRemoveShouldRetrieveMatchingDocumentsPriorToRemoval() { + void findAllAndRemoveShouldRetrieveMatchingDocumentsPriorToRemoval() { BasicQuery query = new BasicQuery("{'foo':'bar'}"); template.findAllAndRemove(query, VersionedEntity.class); - verify(collection, times(1)).find(Matchers.eq(query.getQueryObject())); + verify(collection, times(1)).find(Mockito.eq(query.getQueryObject()), any(Class.class)); + } + + @Test // GH-3648 + void shouldThrowExceptionIfEntityReaderReturnsNull() { + + when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); + when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))); + MappingMongoConverter converter = mock(MappingMongoConverter.class); + when(converter.getMappingContext()).thenReturn((MappingContext) mappingContext); + when(converter.getProjectionFactory()).thenReturn(new SpelAwareProxyProjectionFactory()); + template = new MongoTemplate(factory, converter); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.findAll(Person.class)) + .withMessageContaining("returned null"); } @Test // DATAMONGO-566 - public void findAllAndRemoveShouldRemoveDocumentsReturedByFindQuery() { + void findAllAndRemoveShouldRemoveDocumentsReturedByFindQuery() { - Mockito.when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); - Mockito.when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))) + when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); + when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))) .thenReturn(new org.bson.Document("_id", Integer.valueOf(1))); ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(org.bson.Document.class); BasicQuery query = new BasicQuery("{'foo':'bar'}"); template.findAllAndRemove(query, VersionedEntity.class); - verify(collection, times(1)).deleteMany(queryCaptor.capture(), Mockito.any()); + verify(collection, times(1)).deleteMany(queryCaptor.capture(), any()); Document idField = DocumentTestUtils.getAsDocument(queryCaptor.getValue(), "_id"); - assertThat((List) idField.get("$in"), - IsIterableContainingInOrder. contains(Integer.valueOf(0), Integer.valueOf(1))); + assertThat((List) idField.get("$in")).containsExactly(Integer.valueOf(0), Integer.valueOf(1)); } @Test // DATAMONGO-566 - public void findAllAndRemoveShouldNotTriggerRemoveIfFindResultIsEmpty() { + void findAllAndRemoveShouldNotTriggerRemoveIfFindResultIsEmpty() { template.findAllAndRemove(new BasicQuery("{'foo':'bar'}"), VersionedEntity.class); - verify(collection, never()).deleteMany(Mockito.any(org.bson.Document.class)); + verify(collection, never()).deleteMany(any(org.bson.Document.class)); } @Test // DATAMONGO-948 - public void sortShouldBeTakenAsIsWhenExecutingQueryWithoutSpecificTypeInformation() { + void sortShouldBeTakenAsIsWhenExecutingQueryWithoutSpecificTypeInformation() { Query query = Query.query(Criteria.where("foo").is("bar")).with(Sort.by("foo")); template.executeQuery(query, "collection1", new DocumentCallbackHandler() { @@ -367,11 +463,11 @@ public void processDocument(Document document) throws MongoException, DataAccess ArgumentCaptor captor = ArgumentCaptor.forClass(org.bson.Document.class); verify(findIterable, times(1)).sort(captor.capture()); - assertThat(captor.getValue(), equalTo(new org.bson.Document("foo", 1))); + assertThat(captor.getValue()).isEqualTo(new Document("foo", 1)); } @Test // DATAMONGO-1166, DATAMONGO-1824 - public void aggregateShouldHonorReadPreferenceWhenSet() { + void aggregateShouldHonorReadPreferenceWhenSet() { template.setReadPreference(ReadPreference.secondary()); @@ -381,47 +477,172 @@ public void aggregateShouldHonorReadPreferenceWhenSet() { } @Test // DATAMONGO-1166, DATAMONGO-1824 - public void aggregateShouldIgnoreReadPreferenceWhenNotSet() { + void aggregateShouldIgnoreReadPreferenceWhenNotSet() { template.aggregate(newAggregation(Aggregation.unwind("foo")), "collection-1", Wrapper.class); verify(collection, never()).withReadPreference(any()); } - @Test // DATAMONGO-1166 - public void geoNearShouldHonorReadPreferenceWhenSet() { + @Test // GH-4277 + void aggregateShouldHonorOptionsReadConcernWhenSet() { + + AggregationOptions options = AggregationOptions.builder().readConcern(ReadConcern.SNAPSHOT).build(); + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(collection).withReadConcern(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void aggregateShouldHonorOptionsReadPreferenceWhenSet() { + + AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.secondary()).build(); + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(collection).withReadPreference(ReadPreference.secondary()); + } + + @Test // GH-4277 + void aggregateStreamShouldHonorOptionsReadPreferenceWhenSet() { + + AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.secondary()).build(); + template.aggregateStream(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", + Wrapper.class); + + verify(collection).withReadPreference(ReadPreference.secondary()); + } + + @Test // GH-4644 + void aggregateStreamShouldHonorMaxTimeIfSet() { + + AggregationOptions options = AggregationOptions.builder().maxTime(Duration.ofSeconds(20)).build(); + + template.aggregateStream(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", + Wrapper.class); + + verify(aggregateIterable).maxTime(20000, TimeUnit.MILLISECONDS); + } + + @Test // DATAMONGO-2153 + void aggregateShouldHonorOptionsComment() { + + AggregationOptions options = AggregationOptions.builder().comment("expensive").build(); + + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(aggregateIterable).comment("expensive"); + } + + @Test // DATAMONGO-1836 + void aggregateShouldHonorOptionsHint() { + + Document hint = new Document("dummyField", 1); + AggregationOptions options = AggregationOptions.builder().hint(hint).build(); + + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(aggregateIterable).hint(hint); + } + + @Test // GH-4238 + void aggregateShouldHonorOptionsHintString() { + + AggregationOptions options = AggregationOptions.builder().hint("index-1").build(); + + template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class); + + verify(aggregateIterable).hintString("index-1"); + } + + @Test // GH-3542 + void aggregateShouldUseRelaxedMappingByDefault() { + + MongoTemplate template = new MongoTemplate(factory, converter) { + + @Override + protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, + Class outputType, AggregationOperationContext context) { + + assertThat(ReflectionTestUtils.getField(context, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + return super.doAggregate(aggregation, collectionName, outputType, context); + } + }; + + template.aggregate( + newAggregation(Jedi.class, Aggregation.unwind("foo")).withOptions(AggregationOptions.builder().build()), + Jedi.class); + } + + @Test // GH-3542 + void aggregateShouldUseStrictMappingIfOptionsIndicate() { + + MongoTemplate template = new MongoTemplate(factory, converter) { + + @Override + protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, + Class outputType, AggregationOperationContext context) { + + assertThat(context).isInstanceOf(TypeBasedAggregationOperationContext.class); + return super.doAggregate(aggregation, collectionName, outputType, context); + } + }; + + assertThatExceptionOfType(InvalidPersistentPropertyPath.class) + .isThrownBy(() -> template.aggregate(newAggregation(Jedi.class, Aggregation.unwind("foo")) + .withOptions(AggregationOptions.builder().strictMapping().build()), Jedi.class)); + } + + @Test // DATAMONGO-1166, DATAMONGO-2264 + void geoNearShouldHonorReadPreferenceWhenSet() { - when(db.runCommand(Mockito.any(org.bson.Document.class), Mockito.any(ReadPreference.class), eq(Document.class))) - .thenReturn(mock(Document.class)); template.setReadPreference(ReadPreference.secondary()); NearQuery query = NearQuery.near(new Point(1, 1)); template.geoNear(query, Wrapper.class); - verify(this.db, times(1)).runCommand(Mockito.any(org.bson.Document.class), eq(ReadPreference.secondary()), - eq(Document.class)); + verify(collection).withReadPreference(eq(ReadPreference.secondary())); + } + + @Test // GH-4277 + void geoNearShouldHonorReadPreferenceFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadPreference(ReadPreference.secondary()); + + template.geoNear(query, Wrapper.class); + + verify(collection).withReadPreference(eq(ReadPreference.secondary())); } - @Test // DATAMONGO-1166 - public void geoNearShouldIgnoreReadPreferenceWhenNotSet() { + @Test // GH-4277 + void geoNearShouldHonorReadConcernFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadConcern(ReadConcern.SNAPSHOT); + + template.geoNear(query, Wrapper.class); + + verify(collection).withReadConcern(eq(ReadConcern.SNAPSHOT)); + } - when(db.runCommand(Mockito.any(Document.class), eq(Document.class))).thenReturn(mock(Document.class)); + @Test // DATAMONGO-1166, DATAMONGO-2264 + void geoNearShouldIgnoreReadPreferenceWhenNotSet() { NearQuery query = NearQuery.near(new Point(1, 1)); template.geoNear(query, Wrapper.class); - verify(this.db, times(1)).runCommand(Mockito.any(Document.class), eq(Document.class)); + verify(collection, never()).withReadPreference(any()); } @Test // DATAMONGO-1334 - @Ignore("TODO: mongo3 - a bit hard to tests with the immutable object stuff") - public void mapReduceShouldUseZeroAsDefaultLimit() { + @Disabled("TODO: mongo3 - a bit hard to tests with the immutable object stuff") + void mapReduceShouldUseZeroAsDefaultLimit() { MongoCursor cursor = mock(MongoCursor.class); MapReduceIterable output = mock(MapReduceIterable.class); when(output.limit(anyInt())).thenReturn(output); - when(output.sort(Mockito.any(Document.class))).thenReturn(output); - when(output.filter(Mockito.any(Document.class))).thenReturn(output); + when(output.sort(any(Document.class))).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); when(output.iterator()).thenReturn(cursor); when(cursor.hasNext()).thenReturn(false); @@ -435,17 +656,17 @@ public void mapReduceShouldUseZeroAsDefaultLimit() { } @Test // DATAMONGO-1334 - public void mapReduceShouldPickUpLimitFromQuery() { + void mapReduceShouldPickUpLimitFromQuery() { MongoCursor cursor = mock(MongoCursor.class); MapReduceIterable output = mock(MapReduceIterable.class); when(output.limit(anyInt())).thenReturn(output); when(output.sort(any())).thenReturn(output); - when(output.filter(Mockito.any(Document.class))).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); when(output.iterator()).thenReturn(cursor); when(cursor.hasNext()).thenReturn(false); - when(collection.mapReduce(anyString(), anyString())).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); query.limit(100); @@ -456,17 +677,17 @@ public void mapReduceShouldPickUpLimitFromQuery() { } @Test // DATAMONGO-1334 - public void mapReduceShouldPickUpLimitFromOptions() { + void mapReduceShouldPickUpLimitFromOptions() { MongoCursor cursor = mock(MongoCursor.class); MapReduceIterable output = mock(MapReduceIterable.class); when(output.limit(anyInt())).thenReturn(output); when(output.sort(any())).thenReturn(output); - when(output.filter(Mockito.any(Document.class))).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); when(output.iterator()).thenReturn(cursor); when(cursor.hasNext()).thenReturn(false); - when(collection.mapReduce(anyString(), anyString())).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); @@ -477,7 +698,7 @@ public void mapReduceShouldPickUpLimitFromOptions() { } @Test // DATAMONGO-1334 - public void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() { + void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() { MongoCursor cursor = mock(MongoCursor.class); MapReduceIterable output = mock(MapReduceIterable.class); @@ -487,7 +708,7 @@ public void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() { when(output.iterator()).thenReturn(cursor); when(cursor.hasNext()).thenReturn(false); - when(collection.mapReduce(anyString(), anyString())).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); template.mapReduce("collection", "function(){}", "function(key,values){}", new MapReduceOptions().limit(1000), Wrapper.class); @@ -496,17 +717,17 @@ public void mapReduceShouldPickUpLimitFromOptionsWhenQueryIsNotPresent() { } @Test // DATAMONGO-1334 - public void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferently() { + void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferently() { MongoCursor cursor = mock(MongoCursor.class); MapReduceIterable output = mock(MapReduceIterable.class); when(output.limit(anyInt())).thenReturn(output); when(output.sort(any())).thenReturn(output); - when(output.filter(Mockito.any(Document.class))).thenReturn(output); + when(output.filter(any(Document.class))).thenReturn(output); when(output.iterator()).thenReturn(cursor); when(cursor.hasNext()).thenReturn(false); - when(collection.mapReduce(anyString(), anyString())).thenReturn(output); + when(collection.mapReduce(anyString(), anyString(), eq(Document.class))).thenReturn(output); Query query = new BasicQuery("{'foo':'bar'}"); query.limit(100); @@ -518,7 +739,9 @@ public void mapReduceShouldPickUpLimitFromOptionsEvenWhenQueryDefinesItDifferent } @Test // DATAMONGO-1639 - public void beforeConvertEventForUpdateSeesNextVersion() { + void beforeConvertEventForUpdateSeesNextVersion() { + + when(updateResult.getModifiedCount()).thenReturn(1L); final VersionedEntity entity = new VersionedEntity(); entity.id = 1; @@ -530,25 +753,17 @@ public void beforeConvertEventForUpdateSeesNextVersion() { @Override public void onBeforeConvert(BeforeConvertEvent event) { - assertThat(event.getSource().version, is(1)); + assertThat(event.getSource().version).isEqualTo(1); } }); template.setApplicationContext(context); - MongoTemplate spy = Mockito.spy(template); - - UpdateResult result = mock(UpdateResult.class); - doReturn(1L).when(result).getModifiedCount(); - - doReturn(result).when(spy).doUpdate(anyString(), Mockito.any(Query.class), Mockito.any(Update.class), - Mockito.any(Class.class), anyBoolean(), anyBoolean()); - - spy.save(entity); + template.save(entity); } @Test // DATAMONGO-1447 - public void shouldNotAppend$isolatedToNonMulitUpdate() { + void shouldNotAppend$isolatedToNonMulitUpdate() { template.updateFirst(new Query(), new Update().isolated().set("jon", "snow"), Wrapper.class); @@ -557,12 +772,12 @@ public void onBeforeConvert(BeforeConvertEvent event) { verify(collection).updateOne(queryCaptor.capture(), updateCaptor.capture(), any()); - assertThat(queryCaptor.getValue(), isBsonObject().notContaining("$isolated")); - assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); + assertThat((Document) queryCaptor.getValue()).doesNotContainKey("$isolated"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } @Test // DATAMONGO-1447 - public void shouldAppend$isolatedToUpdateMultiEmptyQuery() { + void shouldAppend$isolatedToUpdateMultiEmptyQuery() { template.updateMulti(new Query(), new Update().isolated().set("jon", "snow"), Wrapper.class); @@ -571,12 +786,12 @@ public void onBeforeConvert(BeforeConvertEvent event) { verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); - assertThat(queryCaptor.getValue(), isBsonObject().withSize(1).containing("$isolated", 1)); - assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); + assertThat((Document) queryCaptor.getValue()).hasSize(1).containsEntry("$isolated", 1); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } @Test // DATAMONGO-1447 - public void shouldAppend$isolatedToUpdateMultiQueryIfNotPresentAndUpdateSetsValue() { + void shouldAppend$isolatedToUpdateMultiQueryIfNotPresentAndUpdateSetsValue() { Update update = new Update().isolated().set("jon", "snow"); Query query = new BasicQuery("{'eddard':'stark'}"); @@ -588,12 +803,12 @@ public void onBeforeConvert(BeforeConvertEvent event) { verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); - assertThat(queryCaptor.getValue(), isBsonObject().containing("$isolated", 1).containing("eddard", "stark")); - assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); + assertThat((Document) queryCaptor.getValue()).containsEntry("$isolated", 1).containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } @Test // DATAMONGO-1447 - public void shouldNotAppend$isolatedToUpdateMultiQueryIfNotPresentAndUpdateDoesNotSetValue() { + void shouldNotAppend$isolatedToUpdateMultiQueryIfNotPresentAndUpdateDoesNotSetValue() { Update update = new Update().set("jon", "snow"); Query query = new BasicQuery("{'eddard':'stark'}"); @@ -605,12 +820,12 @@ public void onBeforeConvert(BeforeConvertEvent event) { verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); - assertThat(queryCaptor.getValue(), isBsonObject().notContaining("$isolated").containing("eddard", "stark")); - assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); + assertThat((Document) queryCaptor.getValue()).doesNotContainKey("$isolated").containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } @Test // DATAMONGO-1447 - public void shouldNotOverwrite$isolatedToUpdateMultiQueryIfPresentAndUpdateDoesNotSetValue() { + void shouldNotOverwrite$isolatedToUpdateMultiQueryIfPresentAndUpdateDoesNotSetValue() { Update update = new Update().set("jon", "snow"); Query query = new BasicQuery("{'eddard':'stark', '$isolated' : 1}"); @@ -622,12 +837,12 @@ public void onBeforeConvert(BeforeConvertEvent event) { verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); - assertThat(queryCaptor.getValue(), isBsonObject().containing("$isolated", 1).containing("eddard", "stark")); - assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); + assertThat((Document) queryCaptor.getValue()).containsEntry("$isolated", 1).containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); } @Test // DATAMONGO-1447 - public void shouldNotOverwrite$isolatedToUpdateMultiQueryIfPresentAndUpdateSetsValue() { + void shouldNotOverwrite$isolatedToUpdateMultiQueryIfPresentAndUpdateSetsValue() { Update update = new Update().isolated().set("jon", "snow"); Query query = new BasicQuery("{'eddard':'stark', '$isolated' : 0}"); @@ -639,12 +854,40 @@ public void onBeforeConvert(BeforeConvertEvent event) { verify(collection).updateMany(queryCaptor.capture(), updateCaptor.capture(), any()); - assertThat(queryCaptor.getValue(), isBsonObject().containing("$isolated", 0).containing("eddard", "stark")); - assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); + assertThat((Document) queryCaptor.getValue()).containsEntry("$isolated", 0).containsEntry("eddard", "stark"); + assertThat((Document) updateCaptor.getValue()).containsEntry("$set.jon", "snow").doesNotContainKey("$isolated"); + } + + @Test // DATAMONGO-1311 + void executeQueryShouldUseBatchSizeWhenPresent() { + + when(findIterable.batchSize(anyInt())).thenReturn(findIterable); + + Query query = new Query().cursorBatchSize(1234); + template.find(query, Person.class); + + verify(findIterable).batchSize(1234); + } + + @Test // GH-4277 + void findShouldUseReadConcernWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}").withReadConcern(ReadConcern.SNAPSHOT), AutogenerateableId.class); + + verify(collection).withReadConcern(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void findShouldUseReadPreferenceWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}").withReadPreference(ReadPreference.secondary()), + AutogenerateableId.class); + + verify(collection).withReadPreference(ReadPreference.secondary()); } @Test // DATAMONGO-1518 - public void executeQueryShouldUseCollationWhenPresent() { + void executeQueryShouldUseCollationWhenPresent() { template.executeQuery(new BasicQuery("{}").collation(Collation.of("fr")), "collection-1", val -> {}); @@ -652,15 +895,15 @@ public void executeQueryShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void streamQueryShouldUseCollationWhenPresent() { + void streamQueryShouldUseCollationWhenPresent() { - template.stream(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).next(); + template.stream(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); } @Test // DATAMONGO-1518 - public void findShouldUseCollationWhenPresent() { + void findShouldUseCollationWhenPresent() { template.find(new BasicQuery("{'foo' : 'bar'}").collation(Collation.of("fr")), AutogenerateableId.class); @@ -668,7 +911,7 @@ public void findShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void findOneShouldUseCollationWhenPresent() { + void findOneShouldUseCollationWhenPresent() { template.findOne(new BasicQuery("{'foo' : 'bar'}").collation(Collation.of("fr")), AutogenerateableId.class); @@ -676,88 +919,124 @@ public void findOneShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void existsShouldUseCollationWhenPresent() { + void existsShouldUseCollationWhenPresent() { template.exists(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); - verify(collection).count(any(), options.capture()); + verify(collection).countDocuments(any(), options.capture()); - assertThat(options.getValue().getCollation(), - is(equalTo(com.mongodb.client.model.Collation.builder().locale("fr").build()))); + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); } @Test // DATAMONGO-1518 - public void findAndModfiyShoudUseCollationWhenPresent() { + void findAndModfiyShoudUseCollationWhenPresent() { template.findAndModify(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class); ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); - verify(collection).findOneAndUpdate(Mockito.any(), Mockito.any(), options.capture()); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void findAndRemoveShouldUseCollationWhenPresent() { + void findAndRemoveShouldUseCollationWhenPresent() { template.findAndRemove(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); - verify(collection).findOneAndDelete(Mockito.any(), options.capture()); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-2196 + void removeShouldApplyWriteConcern() { - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + Person person = new Person(); + person.id = "id-1"; + + template.setWriteConcern(WriteConcern.UNACKNOWLEDGED); + template.remove(person); + + verify(collection).withWriteConcern(eq(WriteConcern.UNACKNOWLEDGED)); + verify(collectionWithWriteConcern).deleteOne(any(Bson.class), any()); } @Test // DATAMONGO-1518 - public void findAndRemoveManyShouldUseCollationWhenPresent() { + void findAndRemoveManyShouldUseCollationWhenPresent() { - template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); + template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class, + true); ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); - verify(collection).deleteMany(Mockito.any(), options.capture()); + verify(collection).deleteMany(any(), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void updateOneShouldUseCollationWhenPresent() { + void updateOneShouldUseCollationWhenPresent() { template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), AutogenerateableId.class); ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); - verify(collection).updateOne(Mockito.any(), Mockito.any(), options.capture()); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void updateManyShouldUseCollationWhenPresent() { + void updateManyShouldUseCollationWhenPresent() { template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), AutogenerateableId.class); ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); - verify(collection).updateMany(Mockito.any(), Mockito.any(), options.capture()); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-3218 + void updateUsesHintStringFromQuery() { + + template.updateFirst(new Query().withHint("index-1"), new Update().set("spring", "data"), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getHintString()).isEqualTo("index-1"); + } + + @Test // GH-3218 + void updateUsesHintDocumentFromQuery() { + + template.updateFirst(new Query().withHint("{ name : 1 }"), new Update().set("spring", "data"), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); + assertThat(options.getValue().getHint()).isEqualTo(new Document("name", 1)); } @Test // DATAMONGO-1518 - public void replaceOneShouldUseCollationWhenPresent() { + void replaceOneShouldUseCollationWhenPresent() { template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class); - ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); - verify(collection).replaceOne(Mockito.any(), Mockito.any(), options.capture()); + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518, DATAMONGO-1824 - public void aggregateShouldUseCollationWhenPresent() { + void aggregateShouldUseCollationWhenPresent() { Aggregation aggregation = newAggregation(project("id")) .withOptions(newAggregationOptions().collation(Collation.of("fr")).build()); @@ -767,7 +1046,7 @@ public void aggregateShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1824 - public void aggregateShouldUseBatchSizeWhenPresent() { + void aggregateShouldUseBatchSizeWhenPresent() { Aggregation aggregation = newAggregation(project("id")) .withOptions(newAggregationOptions().collation(Collation.of("fr")).cursorBatchSize(100).build()); @@ -777,191 +1056,1963 @@ public void aggregateShouldUseBatchSizeWhenPresent() { } @Test // DATAMONGO-1518 - public void mapReduceShouldUseCollationWhenPresent() { + void mapReduceShouldUseCollationWhenPresent() { template.mapReduce("", "", "", MapReduceOptions.options().collation(Collation.of("fr")), AutogenerateableId.class); verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); } - @Test // DATAMONGO-1518 - public void geoNearShouldUseCollationWhenPresent() { + @Test // DATAMONGO-2027 + void mapReduceShouldUseOutputCollectionWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options().outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable).collectionName(eq("out-collection")); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldNotUseOutputCollectionForInline() { + + template.mapReduce("", "", "", MapReduceOptions.options().actionInline().outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable, never()).collectionName(any()); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldUseOutputActionWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options().actionMerge().outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable).action(eq(MapReduceAction.MERGE)); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldUseOutputDatabaseWhenPresent() { + + template.mapReduce("", "", "", + MapReduceOptions.options().outputDatabase("out-database").outputCollection("out-collection"), + AutogenerateableId.class); + + verify(mapReduceIterable).databaseName(eq("out-database")); + } + + @Test // DATAMONGO-2027 + void mapReduceShouldNotUseOutputDatabaseForInline() { + + template.mapReduce("", "", "", MapReduceOptions.options().outputDatabase("out-database"), AutogenerateableId.class); + + verify(mapReduceIterable, never()).databaseName(any()); + } + + @Test // DATAMONGO-1518, DATAMONGO-2264 + void geoNearShouldUseCollationWhenPresent() { NearQuery query = NearQuery.near(0D, 0D).query(new BasicQuery("{}").collation(Collation.of("fr"))); template.geoNear(query, AutogenerateableId.class); - ArgumentCaptor cmd = ArgumentCaptor.forClass(Document.class); - verify(db).runCommand(cmd.capture(), Mockito.any(Class.class)); + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1880 + void countShouldUseCollationWhenPresent() { + + template.count(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); - assertThat(cmd.getValue().get("collation", Document.class), equalTo(new Document("locale", "fr"))); + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); } - @Test // DATAMONGO-1518 - public void groupShouldUseCollationWhenPresent() { + @Test // DATAMONGO-2360 + void countShouldApplyQueryHintIfPresent() { - commandResultDocument.append("retval", Collections.emptySet()); - template.group("collection-1", GroupBy.key("id").reduceFunction("bar").collation(Collation.of("fr")), - AutogenerateableId.class); + Document queryHint = new Document("age", 1); + template.count(new BasicQuery("{}").withHint(queryHint), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHint()).isEqualTo(queryHint); + } + + @Test // DATAMONGO-2365 + void countShouldApplyQueryHintAsIndexNameIfPresent() { - ArgumentCaptor cmd = ArgumentCaptor.forClass(Document.class); - verify(db).runCommand(cmd.capture(), Mockito.any(Class.class)); + template.count(new BasicQuery("{}").withHint("idx-1"), AutogenerateableId.class); - assertThat(cmd.getValue().get("group", Document.class).get("collation", Document.class), - equalTo(new Document("locale", "fr"))); + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("idx-1"); } @Test // DATAMONGO-1733 - public void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { + void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { - template.doFind("star-wars", new Document(), new Document(), Person.class, PersonProjection.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + PersonProjection.class, CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("firstname", 1))); } @Test // DATAMONGO-1733 - public void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { + void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { - template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, PersonProjection.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class, + PersonProjection.class, CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("bar", 1))); } @Test // DATAMONGO-1733 - public void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { + void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { - template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + PersonSpELProjection.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } - @Test // DATAMONGO-1733 - public void doesNotApplyFieldsToDtoProjection() { + @Test // DATAMONGO-1733, DATAMONGO-2041 + void appliesFieldsToDtoProjection() { - template.doFind("star-wars", new Document(), new Document(), Person.class, Jedi.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + Jedi.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(new Document("firstname", 1))); } @Test // DATAMONGO-1733 - public void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { + void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { - template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, Jedi.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class, + Jedi.class, CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("bar", 1))); } @Test // DATAMONGO-1733 - public void doesNotApplyFieldsWhenTargetIsNotAProjection() { + void doesNotApplyFieldsWhenTargetIsNotAProjection() { - template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + Person.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1733 - public void doesNotApplyFieldsWhenTargetExtendsDomainType() { + void doesNotApplyFieldsWhenTargetExtendsDomainType() { - template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class, null); + template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, + PersonExtended.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } - class AutogenerateableId { + @Test // DATAMONGO-1348, DATAMONGO-2264 + void geoNearShouldMapQueryCorrectly() { - @Id BigInteger id; + NearQuery query = NearQuery.near(new Point(1, 1)); + query.query(Query.query(Criteria.where("customName").is("rand al'thor"))); + + template.geoNear(query, WithNamedFields.class); + + ArgumentCaptor> capture = ArgumentCaptor.forClass(List.class); + + verify(collection).aggregate(capture.capture(), eq(Document.class)); + Document $geoNear = capture.getValue().iterator().next(); + + assertThat($geoNear).containsEntry("$geoNear.query.custom-named-field", "rand al'thor") + .doesNotContainKey("query.customName"); } - class NotAutogenerateableId { + @Test // DATAMONGO-1348, DATAMONGO-2264 + void geoNearShouldMapGeoJsonPointCorrectly() { - @Id Integer id; + NearQuery query = NearQuery.near(new GeoJsonPoint(1, 2)); + query.query(Query.query(Criteria.where("customName").is("rand al'thor"))); - public Pattern getId() { - return Pattern.compile("."); - } + template.geoNear(query, WithNamedFields.class); + + ArgumentCaptor> capture = ArgumentCaptor.forClass(List.class); + + verify(collection).aggregate(capture.capture(), eq(Document.class)); + Document $geoNear = capture.getValue().iterator().next(); + + assertThat($geoNear).containsEntry("$geoNear.near.type", "Point").containsEntry("$geoNear.near.coordinates.[0]", 1D) + .containsEntry("$geoNear.near.coordinates.[1]", 2D); } - static class VersionedEntity { + @Test // DATAMONGO-2155, GH-3407 + void saveVersionedEntityShouldCallUpdateCorrectly() { - @Id Integer id; - @Version Integer version; + when(updateResult.getModifiedCount()).thenReturn(1L); + + VersionedEntity entity = new VersionedEntity(); + entity.id = 1; + entity.version = 10; + + ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + ArgumentCaptor updateCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + + template.save(entity); + + verify(collection, times(1)).replaceOne(queryCaptor.capture(), updateCaptor.capture(), any(com.mongodb.client.model.ReplaceOptions.class)); + + assertThat(queryCaptor.getValue()).isEqualTo(new Document("_id", 1).append("version", 10)); + assertThat(updateCaptor.getValue()) + .isEqualTo(new Document("version", 11).append("_class", VersionedEntity.class.getName()).append("name", null)); } - enum MyConverter implements Converter { + @Test // DATAMONGO-1783 + void usesQueryOffsetForCountOperation() { - INSTANCE; + template.count(new BasicQuery("{}").skip(100), AutogenerateableId.class); - public String convert(AutogenerateableId source) { - return source.toString(); - } + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getSkip()).isEqualTo(100); } - @Data - @org.springframework.data.mongodb.core.mapping.Document(collection = "star-wars") - static class Person { + @Test // DATAMONGO-1783 + void usesQueryLimitForCountOperation() { - @Id String id; - String firstname; + template.count(new BasicQuery("{}").limit(10), AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getLimit()).isEqualTo(10); } - static class PersonExtended extends Person { + @Test // DATAMONGO-2215 + void updateShouldApplyArrayFilters() { - String lastname; + template.updateFirst(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); } - interface PersonProjection { - String getFirstname(); + @Test // DATAMONGO-2215 + void findAndModifyShouldApplyArrayFilters() { + + template.findAndModify(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); } - public interface PersonSpELProjection { + @Test // DATAMONGO-1854 + void streamQueryShouldUseDefaultCollationWhenPresent() { - @Value("#{target.firstname}") - String getName(); + template.stream(new BasicQuery("{}"), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); } - @Data - static class Human { - @Id String id; + @Test // DATAMONGO-1854 + void findShouldNotUseCollationWhenNoDefaultPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Jedi.class); + + verify(findIterable, never()).collation(any()); } - @Data - static class Jedi { + @Test // DATAMONGO-1854 + void findShouldUseDefaultCollationWhenPresent() { - @Field("firstname") String name; + template.find(new BasicQuery("{'foo' : 'bar'}"), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); } - class Wrapper { + @Test // DATAMONGO-1854 + void findOneShouldUseDefaultCollationWhenPresent() { - AutogenerateableId foo; + template.findOne(new BasicQuery("{'foo' : 'bar'}"), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); } - /** - * Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual - * behaviour. - * - * @return - */ - private MongoTemplate mockOutGetDb() { + @Test // DATAMONGO-1854 + void existsShouldUseDefaultCollationWhenPresent() { - MongoTemplate template = spy(this.template); - when(template.getDb()).thenReturn(db); - return template; + template.exists(new BasicQuery("{}"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoOperationsUnitTests#getOperations() - */ - @Override - protected MongoOperations getOperationsForExceptionHandling() { - MongoTemplate template = spy(this.template); - when(template.getDb()).thenThrow(new MongoException("Error!")); - return template; + @Test // DATAMONGO-1854 + void findAndModfiyShoudUseDefaultCollationWhenPresent() { + + template.findAndModify(new BasicQuery("{}"), new Update(), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoOperationsUnitTests#getOperations() - */ - @Override - protected MongoOperations getOperations() { - return this.template; + @Test // DATAMONGO-1854 + void findAndRemoveShouldUseDefaultCollationWhenPresent() { + + template.findAndRemove(new BasicQuery("{}"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldNotCollationIfNotPresent() { + + template.createCollection(AutogenerateableId.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + Assertions.assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldApplyDefaultCollation() { + + template.createCollection(Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldFavorExplicitOptionsOverDefaultCollation() { + + template.createCollection(Sith.class, CollectionOptions.just(Collation.of("en_US"))); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldUseDefaultCollationIfCollectionOptionsAreNull() { + + template.createCollection(Sith.class, null); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseDefaultCollationIfPresent() { + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() { + + template.aggregateStream(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().collation(Collation.of("fr")).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void aggreateStreamShouldUseDefaultCollationIfPresent() { + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void aggreateStreamShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() { + + template.aggregateStream(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().collation(Collation.of("fr")).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2390 + void aggregateShouldNoApplyZeroOrNegativeMaxTime() { + + template.aggregate( + newAggregation(Sith.class, project("id")).withOptions(newAggregationOptions().maxTime(Duration.ZERO).build()), + AutogenerateableId.class, Document.class); + template.aggregate(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().maxTime(Duration.ofSeconds(-1)).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable, never()).maxTime(anyLong(), any()); + } + + @Test // DATAMONGO-2390 + void aggregateShouldApplyMaxTimeIfSet() { + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions( + newAggregationOptions().maxTime(Duration.ofSeconds(10)).build()), AutogenerateableId.class, Document.class); + + verify(aggregateIterable).maxTime(eq(10000L), eq(TimeUnit.MILLISECONDS)); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new AutogenerateableId()); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findOneWithSortShouldUseCollationWhenPresent() { + + template.findOne(new BasicQuery("{}").collation(Collation.of("fr")).with(Sort.by("id")), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void findOneWithSortShouldUseDefaultCollationWhenPresent() { + + template.findOne(new BasicQuery("{}").with(Sort.by("id")), Sith.class); + + verify(findIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseDefaultCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}"), new Sith()); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationEvenIfDefaultCollationIsPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new Sith()); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findDistinctShouldUseDefaultCollationWhenPresent() { + + template.findDistinct(new BasicQuery("{}"), "name", Sith.class, String.class); + + verify(distinctIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findDistinctPreferCollationFromQueryOverDefaultCollation() { + + template.findDistinct(new BasicQuery("{}").collation(Collation.of("fr")), "name", Sith.class, String.class); + + verify(distinctIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldUseDefaultCollationWhenPresent() { + + template.updateFirst(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldUseDefaultCollationWhenPresent() { + + template.updateMulti(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldUseDefaultCollationWhenPresent() { + + template.remove(new BasicQuery("{}"), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldPreferExplicitCollationOverDefaultCollation() { + + template.remove(new BasicQuery("{}").collation(Collation.of("fr")), Sith.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void mapReduceShouldUseDefaultCollationWhenPresent() { + + template.mapReduce("", "", "", MapReduceOptions.options(), Sith.class); + + verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void mapReduceShouldPreferExplicitCollationOverDefaultCollation() { + + template.mapReduce("", "", "", MapReduceOptions.options().collation(Collation.of("fr")), Sith.class); + + verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2261 + void saveShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.save(entity); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.insert(entity); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertAllShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + template.insertAll(Arrays.asList(entity1, entity2)); + + verify(beforeConvertCallback, times(2)).onBeforeConvert(any(), anyString()); + verify(beforeSaveCallback, times(2)).onBeforeSave(any(), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void findAndReplaceShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.findAndReplace(new Query(), entity); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void publishesEventsAndEntityCallbacksInOrder() { + + BeforeConvertCallback beforeConvertCallback = new BeforeConvertCallback() { + + @Override + public Person onBeforeConvert(Person entity, String collection) { + + assertThat(entity.id).isEqualTo("before-convert-event"); + entity.id = "before-convert-callback"; + return entity; + } + }; + + BeforeSaveCallback beforeSaveCallback = new BeforeSaveCallback() { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + assertThat(entity.id).isEqualTo("before-save-event"); + entity.id = "before-save-callback"; + return entity; + } + }; + + AbstractMongoEventListener eventListener = new AbstractMongoEventListener() { + + @Override + public void onBeforeConvert(BeforeConvertEvent event) { + + assertThat(event.getSource().id).isEqualTo("init"); + event.getSource().id = "before-convert-event"; + } + + @Override + public void onBeforeSave(BeforeSaveEvent event) { + + assertThat(event.getSource().id).isEqualTo("before-convert-callback"); + event.getSource().id = "before-save-event"; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(ApplicationListener.class, () -> eventListener); + ctx.registerBean(BeforeConvertCallback.class, () -> beforeConvertCallback); + ctx.registerBean(BeforeSaveCallback.class, () -> beforeSaveCallback); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + Person saved = template.save(entity); + + assertThat(saved.id).isEqualTo("before-save-callback"); + } + + @Test // DATAMONGO-2261 + void beforeSaveCallbackAllowsTargetDocumentModifications() { + + BeforeSaveCallback beforeSaveCallback = new BeforeSaveCallback() { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + document.append("added-by", "callback"); + return entity; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(BeforeSaveCallback.class, () -> beforeSaveCallback); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person(); + entity.id = "luke-skywalker"; + entity.firstname = "luke"; + + template.save(entity); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(any(), captor.capture(), any(com.mongodb.client.model.ReplaceOptions.class)); + assertThat(captor.getValue()).containsEntry("added-by", "callback"); + } + + @Test // DATAMONGO-2307 + void beforeSaveCallbackAllowsTargetEntityModificationsUsingSave() { + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(BeforeSaveCallback.class, this::beforeSaveCallbackReturningNewPersonWithTransientAttribute); + ctx.refresh(); + + template.setApplicationContext(ctx); + + PersonWithTransientAttribute entity = new PersonWithTransientAttribute(); + entity.id = "luke-skywalker"; + entity.firstname = "luke"; + entity.isNew = true; + + PersonWithTransientAttribute savedPerson = template.save(entity); + assertThat(savedPerson.isNew).isFalse(); + } + + @Test // DATAMONGO-2307 + void beforeSaveCallbackAllowsTargetEntityModificationsUsingInsert() { + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(BeforeSaveCallback.class, this::beforeSaveCallbackReturningNewPersonWithTransientAttribute); + ctx.refresh(); + + template.setApplicationContext(ctx); + + PersonWithTransientAttribute entity = new PersonWithTransientAttribute(); + entity.id = "luke-skywalker"; + entity.firstname = "luke"; + entity.isNew = true; + + PersonWithTransientAttribute savedPerson = template.insert(entity); + assertThat(savedPerson.isNew).isFalse(); + } + + // TODO: additional tests for what is when saved. + + @Test // DATAMONGO-2261 + void entityCallbacksAreNotSetByDefault() { + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNull(); + } + + @Test // DATAMONGO-2261 + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNotNull(); + } + + @Test // DATAMONGO-2261 + void setterForEntityCallbackOverridesContextInitializedOnes() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + EntityCallbacks callbacks = EntityCallbacks.create(); + template.setEntityCallbacks(callbacks); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2261 + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + + EntityCallbacks callbacks = EntityCallbacks.create(); + ApplicationContext ctx = new StaticApplicationContext(); + + template.setEntityCallbacks(callbacks); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFind() { + + template.find(new Query().allowSecondaryReads(), AutogenerateableId.class); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindOne() { + + template.findOne(new Query().allowSecondaryReads(), AutogenerateableId.class); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindDistinct() { + + template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForStream() { + + template.stream(new Query().allowSecondaryReads(), AutogenerateableId.class); + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update().set("total") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { total : { $sum : [ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowMultipleAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update() // + .set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) // + .set("grade").toValue(ConditionalOperators.switchCases( // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D") // + ) // + .defaultTo("F"));// + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).containsExactly(Document.parse("{ $set: { average : { $avg: \"$tests\" } } }"), + Document.parse("{ $set: { grade: { $switch: {\n" + " branches: [\n" + + " { case: { $gte: [ \"$average\", 90 ] }, then: \"A\" },\n" + + " { case: { $gte: [ \"$average\", 80 ] }, then: \"B\" },\n" + + " { case: { $gte: [ \"$average\", 70 ] }, then: \"C\" },\n" + + " { case: { $gte: [ \"$average\", 60 ] }, then: \"D\" }\n" + + " ],\n" + " default: \"F\"\n" + " } } } }")); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationExpressionToDomainType() { + + AggregationUpdate update = AggregationUpdate.update().set("name") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { firstname : { $sum:[ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldPassOnUnsetCorrectly() { + + SetOperation setOperation = SetOperation.builder().set("status").toValue("Modified").and().set("comments") + .toValue(Fields.fields("misc1").and("misc2").asList()); + AggregationUpdate update = AggregationUpdate.update(); + update.set(setOperation); + update.unset("misc1", "misc2"); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Arrays.asList(Document.parse("{ $set: { status: \"Modified\", comments: [ \"$misc1\", \"$misc2\" ] } }"), + Document.parse("{ $unset: [ \"misc1\", \"misc2\" ] }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationUnsetToDomainType() { + + AggregationUpdate update = AggregationUpdate.update(); + update.unset("name"); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo(Collections.singletonList(Document.parse("{ $unset : \"firstname\" }"))); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyIfNotPresentInFilter() { + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyToVersionedEntityIfNotPresentInFilter() { + + when(collection.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))) + .thenReturn(UpdateResult.acknowledged(1, 1L, null)); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("version", 1L).append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromExistingDocumentIfNotPresentInFilter() { + + when(findIterable.first()).thenReturn(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromGivenDocumentIfShardKeyIsImmutable() { + + template.save(new ShardedEntityWithNonDefaultImmutableShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + + verifyNoInteractions(findIterable); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendDefaultShardKeyIfNotPresentInFilter() { + + template.save(new ShardedEntityWithDefaultShardKey("id-1", "AT", 4230)); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1")); + verify(findIterable, never()).first(); + } + + @Test // GH-3590 + void shouldIncludeValueFromNestedShardKeyPath() { + + WithShardKeyPointingToNested source = new WithShardKeyPointingToNested(); + source.id = "id-1"; + source.value = "v1"; + source.nested = new WithNamedFields(); + source.nested.customName = "cname"; + source.nested.name = "name"; + + template.save(source); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname")); + } + + @Test // DATAMONGO-2341 + void saveShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(findIterable.first()).thenReturn(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)); + + verify(findIterable).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // DATAMONGO-2341 + void saveVersionedShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(collection.replaceOne(any(), any(), any(com.mongodb.client.model.ReplaceOptions.class))) + .thenReturn(UpdateResult.acknowledged(1, 1L, null)); + when(findIterable.first()).thenReturn(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)); + + verify(findIterable).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // DATAMONGO-2479 + void findShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.iterator()).thenReturn(new OneElementCursor<>(document)); + + template.find(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findByIdShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.first()).thenReturn(document); + + template.findById("init", Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findOneShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.first()).thenReturn(document); + + template.findOne(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAllShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.iterator()).thenReturn(new OneElementCursor<>(document)); + + template.findAll(Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAndModifyShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndUpdate(any(Bson.class), any(Bson.class), any())).thenReturn(document); + + template.findAndModify(new Query(), new Update(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAndRemoveShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndDelete(any(Bson.class), any())).thenReturn(document); + + template.findAndRemove(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAllAndRemoveShouldInvokeAfterConvertCallback() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(findIterable.iterator()).thenReturn(new OneElementCursor<>(document)); + + template.findAllAndRemove(new Query(), Person.class); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(document); + + Person saved = template.findAndReplace(new Query(), entity); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(saved.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void saveShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity = new Person("init", "luke"); + + Person saved = template.save(entity); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity = new Person("init", "luke"); + + Person saved = template.insert(entity); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertAllShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + Collection saved = template.insertAll(Arrays.asList(entity1, entity2)); + + verify(afterSaveCallback, times(2)).onAfterSave(any(), any(), anyString()); + assertThat(saved.iterator().next().getId()).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(EntityCallbacks.create(afterSaveCallback)); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(document); + + Person saved = template.findAndReplace(new Query(), entity); + + verify(afterSaveCallback).onAfterSave(eq(new Person("init", "luke")), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldEmitAfterSaveEvent() { + + AbstractMongoEventListener eventListener = new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + + assertThat(event.getSource().id).isEqualTo("init"); + event.getSource().id = "after-save-event"; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(ApplicationListener.class, () -> eventListener); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(document); + + Person saved = template.findAndReplace(new Query(), entity); + + assertThat(saved.id).isEqualTo("after-save-event"); + } + + @Test // DATAMONGO-2556 + void esitmatedCountShouldBeDelegatedCorrectly() { + + template.estimatedCount(Person.class); + + verify(db).getCollection("star-wars", Document.class); + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-2911 + void insertErrorsOnCustomIteratorImplementation() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> template.insert(new TypeImplementingIterator())); + } + + @Test // GH-3570 + void saveErrorsOnCollectionLikeObjects() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> template.save(new ArrayList<>(Arrays.asList(1, 2, 3)), "myList")); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromString() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsPlainString.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.MINUTES)) + .isEqualTo(10); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromProperty() { + + environment.setProperty("my.timeout", "12m"); + + template.createCollection(TimeSeriesTypeWithExpireAfterFromProperty.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.MINUTES)) + .isEqualTo(12); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromIso8601String() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsIso8601Style.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.DAYS)) + .isEqualTo(1); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpression() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpression.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(11); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpressionReturningDuration() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(100); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithInvalidTimeoutExpiration() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> + template.createCollection(TimeSeriesTypeWithInvalidExpireAfter.class) + ); + } + + @Test // GH-3522 + void usedCountDocumentsForEmptyQueryByDefault() { + + template.count(new Query(), Human.class); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-3522 + void delegatesToEstimatedCountForEmptyQueryIfEnabled() { + + template.useEstimatedCount(true); + + template.count(new Query(), Human.class); + + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-3522 + void stillUsesCountDocumentsForNonEmptyQueryEvenIfEstimationEnabled() { + + template.useEstimatedCount(true); + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }"), Human.class); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-4374 + void countConsidersMaxTimeMs() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").maxTimeMsec(5000), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getMaxTime(TimeUnit.MILLISECONDS)).isEqualTo(5000); + } + + @Test // GH-4374 + void countPassesOnComment() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").comment("rocks!"), Human.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getComment()).isEqualTo(BsonUtils.simpleToBsonValue("rocks!")); + } + + @Test // GH-3984 + void templatePassesOnTimeSeriesOptionsWhenNoTypeGiven() { + + template.createCollection("time-series-collection", CollectionOptions.timeSeries("time_stamp")); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").toString()); + } + + @Test // GH-4300 + void findAndReplaceAllowsDocumentSourceType() { + + template.findAndReplace(new Query(), new Document("spring", "data"), FindAndReplaceOptions.options().upsert(), + Document.class, "coll-1", Person.class); + + verify(db).getCollection(eq("coll-1"), eq(Document.class)); + verify(collection).findOneAndReplace((Bson) any(Bson.class), eq(new Document("spring", "data")), + any(FindOneAndReplaceOptions.class)); + } + + @Test // GH-4462 + void replaceShouldUseCollationWhenPresent() { + + template.replace(new BasicQuery("{}").collation(Collation.of("fr")), new AutogenerateableId()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-4462 + void replaceShouldNotUpsertByDefault() { + + template.replace(new BasicQuery("{}"), new Sith()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + } + + @Test // GH-4462 + void replaceShouldUpsert() { + + template.replace(new BasicQuery("{}"), new Sith(), ReplaceOptions.replaceOptions().upsert()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isTrue(); + } + + @Test // GH-4462 + void replaceShouldUseDefaultCollationWhenPresent() { + + template.replace(new BasicQuery("{}"), new Sith(), ReplaceOptions.replaceOptions()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // GH-4462 + void replaceShouldUseHintIfPresent() { + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new Sith(), + ReplaceOptions.replaceOptions().upsert()); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(), any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-to-use"); + } + + @Test // GH-4462 + void replaceShouldApplyWriteConcern() { + + template.setWriteConcernResolver(new WriteConcernResolver() { + public WriteConcern resolve(MongoAction action) { + + assertThat(action.getMongoActionOperation()).isEqualTo(MongoActionOperation.REPLACE); + return WriteConcern.UNACKNOWLEDGED; + } + }); + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new Sith(), + ReplaceOptions.replaceOptions().upsert()); + + verify(collection).withWriteConcern(eq(WriteConcern.UNACKNOWLEDGED)); + } + + @Test // GH-4099 + void passOnTimeSeriesExpireOption() { + + template.createCollection("time-series-collection", + CollectionOptions.timeSeries("time_stamp", options -> options.expireAfter(Duration.ofSeconds(10)))); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)).isEqualTo(10); + } + + @Test // GH-4099 + void doNotSetTimeSeriesExpireOptionForNegativeValue() { + + template.createCollection("time-series-collection", + CollectionOptions.timeSeries("time_stamp", options -> options.expireAfter(Duration.ofSeconds(-10)))); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)).isEqualTo(0L); + } + + + class AutogenerateableId { + + @Id BigInteger id; + } + + class NotAutogenerateableId { + + @Id Integer id; + + public Pattern getId() { + return Pattern.compile("."); + } + } + + static class VersionedEntity { + + @Id Integer id; + @Version Integer version; + + @Field(write = Field.Write.ALWAYS) String name; + } + + enum MyConverter implements Converter { + + INSTANCE; + + public String convert(AutogenerateableId source) { + return source.toString(); + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "star-wars") + static class Person { + + @Id String id; + String firstname; + + public Person() {} + + public Person(String id, String firstname) { + this.id = id; + this.firstname = firstname; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getFirstname() { + return firstname; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + } + + static class PersonExtended extends Person { + + String lastname; + } + + static class PersonWithTransientAttribute extends Person { + + @Transient boolean isNew = true; + } + + interface PersonProjection { + String getFirstname(); + } + + public interface PersonSpELProjection { + + @Value("#{target.firstname}") + String getName(); + } + + static class Human { + @Id String id; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + class Wrapper { + + AutogenerateableId foo; + } + + static class EntityWithListOfSimple { + List grades; + } + + static class WithNamedFields { + + @Id String id; + + String name; + @Field("custom-named-field") String customName; + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + + @Field("firstname") String name; + } + + @Sharded(shardKey = { "value", "nested.customName" }) + static class WithShardKeyPointingToNested { + String id; + String value; + WithNamedFields nested; + } + + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") Instant timestamp; + Object meta; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "${my.timeout}") + static class TimeSeriesTypeWithExpireAfterFromProperty { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "10m") + static class TimeSeriesTypeWithExpireAfterAsPlainString { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "P1D") + static class TimeSeriesTypeWithExpireAfterAsIso8601Style { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{10 + 1 + 's'}") + static class TimeSeriesTypeWithExpireAfterAsExpression { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{T(java.time.Duration).ofSeconds(100)}") + static class TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "123ops") + static class TimeSeriesTypeWithInvalidExpireAfter { + + String id; + Instant timestamp; + } + + static class TypeImplementingIterator implements Iterator { + + @Override + public boolean hasNext() { + return false; + } + + @Override + public Object next() { + return null; + } + } + + /** + * Mocks out the {@link MongoTemplate#getDb()} method to return the {@link DB} mock instead of executing the actual + * behaviour. + * + * @return + */ + private MongoTemplate mockOutGetDb() { + + MongoTemplate template = spy(this.template); + when(template.getDb()).thenReturn(db); + return template; + } + + @Override + protected MongoOperations getOperationsForExceptionHandling() { + when(template.getMongoDatabaseFactory().getMongoDatabase()).thenThrow(new MongoException("Error")); + return template; + } + + @Override + protected MongoOperations getOperations() { + return this.template; + } + + private BeforeSaveCallback beforeSaveCallbackReturningNewPersonWithTransientAttribute() { + return (entity, document, collection) -> { + + // Return a completely new instance, ie in case of an immutable entity; + PersonWithTransientAttribute newEntity = new PersonWithTransientAttribute(); + newEntity.id = entity.id; + newEntity.firstname = entity.firstname; + newEntity.isNew = false; + return newEntity; + }; + } + + static class ValueCapturingEntityCallback { + + private final List values = new ArrayList<>(1); + + protected void capture(T value) { + values.add(value); + } + + public List getValues() { + return values; + } + + @Nullable + public T getValue() { + return CollectionUtils.lastElement(values); + } + + } + + static class ValueCapturingBeforeConvertCallback extends ValueCapturingEntityCallback + implements BeforeConvertCallback { + + @Override + public Person onBeforeConvert(Person entity, String collection) { + + capture(entity); + return entity; + } + } + + static class ValueCapturingBeforeSaveCallback extends ValueCapturingEntityCallback + implements BeforeSaveCallback { + + @Override + public Person onBeforeSave(Person entity, Document document, String collection) { + + capture(entity); + return entity; + } + } + + static class ValueCapturingAfterSaveCallback extends ValueCapturingEntityCallback + implements AfterSaveCallback { + + @Override + public Person onAfterSave(Person entity, Document document, String collection) { + + capture(entity); + return new Person() { + { + id = "after-save"; + firstname = entity.firstname; + } + }; + } + } + + static class ValueCapturingAfterConvertCallback extends ValueCapturingEntityCallback + implements AfterConvertCallback { + + @Override + public Person onAfterConvert(Person entity, Document document, String collection) { + + capture(entity); + return new Person() { + { + id = "after-convert"; + firstname = entity.firstname; + } + }; + } + } + + static class OneElementCursor implements MongoCursor { + private final Iterator iterator; + + OneElementCursor(T element) { + iterator = Collections.singletonList(element).iterator(); + } + + @Override + public void close() { + // nothing to close + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public T next() { + return iterator.next(); + } + + @Override + public int available() { + return 1; + } + + @Override + public T tryNext() { + if (iterator.hasNext()) { + return iterator.next(); + } else { + return null; + } + } + + @Override + public ServerCursor getServerCursor() { + throw new IllegalStateException("Not implemented"); + } + + @Override + public ServerAddress getServerAddress() { + throw new IllegalStateException("Not implemented"); + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnwrappedTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnwrappedTests.java new file mode 100644 index 0000000000..b8fc2986c2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnwrappedTests.java @@ -0,0 +1,197 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link Unwrapped}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +class MongoTemplateUnwrappedTests { + + private static @Template MongoTemplate template; + + @Test // DATAMONGO-1902 + void readWrite() { + + WithUnwrapped source = new WithUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat(template.findOne(query(where("id").is(source.id)), WithUnwrapped.class)).isEqualTo(source); + } + + @Test // DATAMONGO-1902 + void filterOnUnwrappedValue() { + + WithUnwrapped source = new WithUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat(template.findOne( + Query.query(where("embeddableValue.stringValue").is(source.embeddableValue.stringValue)), WithUnwrapped.class)) + .isEqualTo(source); + } + + @Test // DATAMONGO-1902 + void readWritePrefixed() { + + WithPrefixedUnwrapped source = new WithPrefixedUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat(template.findOne(query(where("id").is(source.id)), WithPrefixedUnwrapped.class)).isEqualTo(source); + } + + @Test // DATAMONGO-1902 + void filterOnPrefixedUnwrappedValue() { + + WithPrefixedUnwrapped source = new WithPrefixedUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new UnwrappableType(); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + template.save(source); + + assertThat( + template.findOne(Query.query(where("embeddableValue.stringValue").is(source.embeddableValue.stringValue)), + WithPrefixedUnwrapped.class)).isEqualTo(source); + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType embeddableValue; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithUnwrapped that = (WithUnwrapped) o; + return Objects.equals(id, that.id) && Objects.equals(embeddableValue, that.embeddableValue); + } + + @Override + public int hashCode() { + return Objects.hash(id, embeddableValue); + } + + public String toString() { + return "MongoTemplateUnwrappedTests.WithUnwrapped(id=" + this.id + ", embeddableValue=" + this.embeddableValue + + ")"; + } + } + + static class WithPrefixedUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") UnwrappableType embeddableValue; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithPrefixedUnwrapped that = (WithPrefixedUnwrapped) o; + return Objects.equals(id, that.id) && Objects.equals(embeddableValue, that.embeddableValue); + } + + @Override + public int hashCode() { + return Objects.hash(id, embeddableValue); + } + + public String toString() { + return "MongoTemplateUnwrappedTests.WithPrefixedUnwrapped(id=" + this.id + ", embeddableValue=" + + this.embeddableValue + ")"; + } + } + + static class UnwrappableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UnwrappableType that = (UnwrappableType) o; + return Objects.equals(stringValue, that.stringValue) && Objects.equals(listValue, that.listValue) + && Objects.equals(atFieldAnnotatedValue, that.atFieldAnnotatedValue); + } + + @Override + public int hashCode() { + return Objects.hash(stringValue, listValue, atFieldAnnotatedValue); + } + + public String toString() { + return "MongoTemplateUnwrappedTests.UnwrappableType(stringValue=" + this.stringValue + ", listValue=" + + this.listValue + ", atFieldAnnotatedValue=" + this.atFieldAnnotatedValue + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUpdateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUpdateTests.java new file mode 100644 index 0000000000..4249506d77 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUpdateTests.java @@ -0,0 +1,446 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; + +import com.mongodb.client.result.UpdateResult; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators; +import org.springframework.data.mongodb.core.aggregation.ReplaceWithOperation; +import org.springframework.data.mongodb.core.aggregation.SetOperation; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.BasicUpdate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +import com.mongodb.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +@ExtendWith({ MongoTemplateExtension.class }) +class MongoTemplateUpdateTests { + + @Template(initialEntitySet = { Score.class, Versioned.class, Book.class }) // + static MongoTestTemplate template; + + @BeforeEach + void setUp() { + template.flush(); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithSet() { + + Score score1 = new Score(1, "Maya", Arrays.asList(10, 5, 10), Arrays.asList(10, 8), 0); + Score score2 = new Score(2, "Ryan", Arrays.asList(5, 6, 5), Arrays.asList(8, 8), 8); + + template.insertAll(Arrays.asList(score1, score2)); + + AggregationUpdate update = AggregationUpdate.update().set(SetOperation.builder() // + .set("totalHomework").toValueOf(ArithmeticOperators.valueOf("homework").sum()).and() // + .set("totalQuiz").toValueOf(ArithmeticOperators.valueOf("quiz").sum())) // + .set(SetOperation.builder() // + .set("totalScore") + .toValueOf(ArithmeticOperators.valueOf("totalHomework").add("totalQuiz").add("extraCredit"))); + + template.update(Score.class).apply(update).all(); + + assertThat(collection(Score.class).find(new org.bson.Document()).into(new ArrayList<>())).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{\"_id\" : 1, \"student\" : \"Maya\", \"homework\" : [ 10, 5, 10 ], \"quiz\" : [ 10, 8 ], \"extraCredit\" : 0, \"totalHomework\" : 25, \"totalQuiz\" : 18, \"totalScore\" : 43, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Score\"}"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"student\" : \"Ryan\", \"homework\" : [ 5, 6, 5 ], \"quiz\" : [ 8, 8 ], \"extraCredit\" : 8, \"totalHomework\" : 16, \"totalQuiz\" : 16, \"totalScore\" : 40, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Score\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithSetToValue() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + template.insertAll(Arrays.asList(one)); + + AggregationUpdate update = AggregationUpdate.update().set("author").toValue(new Author("Ada", "Lovelace")); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(one.id))).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder(org.bson.Document.parse( + "{\"_id\" : 1, \"author\" : {\"first\" : \"Ada\", \"last\" : \"Lovelace\"}, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void versionedAggregateUpdateWithSet() { + + Versioned source = template.insert(Versioned.class).one(new Versioned("id-1", "value-0")); + + AggregationUpdate update = AggregationUpdate.update().set("value").toValue("changed"); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first(); + + assertThat( + collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1).into(new ArrayList<>())) + .containsExactly(new org.bson.Document("_id", source.id).append("version", 1L).append("value", "changed") + .append("_class", "org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Versioned")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void versionedAggregateUpdateTouchingVersionProperty() { + + Versioned source = template.insert(Versioned.class).one(new Versioned("id-1", "value-0")); + + AggregationUpdate update = AggregationUpdate.update() + .set(SetOperation.builder().set("value").toValue("changed").and().set("version").toValue(10L)); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first(); + + assertThat( + collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1).into(new ArrayList<>())) + .containsExactly(new org.bson.Document("_id", source.id).append("version", 10L).append("value", "changed") + .append("_class", "org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Versioned")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithUnset() { + + Book antelopeAntics = new Book(); + antelopeAntics.id = 1; + antelopeAntics.title = "Antelope Antics"; + antelopeAntics.isbn = "0001122223334"; + antelopeAntics.author = new Author("Auntie", "An"); + antelopeAntics.stock = new ArrayList<>(); + antelopeAntics.stock.add(new Warehouse("A", 5)); + antelopeAntics.stock.add(new Warehouse("B", 15)); + + Book beesBabble = new Book(); + beesBabble.id = 2; + beesBabble.title = "Bees Babble"; + beesBabble.isbn = "999999999333"; + beesBabble.author = new Author("Bee", "Bumble"); + beesBabble.stock = new ArrayList<>(); + beesBabble.stock.add(new Warehouse("A", 2)); + beesBabble.stock.add(new Warehouse("B", 5)); + + template.insertAll(Arrays.asList(antelopeAntics, beesBabble)); + + AggregationUpdate update = AggregationUpdate.update().unset("isbn", "stock"); + template.update(Book.class).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{ \"_id\" : 1, \"title\" : \"Antelope Antics\", \"author\" : { \"last\" : \"An\", \"first\" : \"Auntie\" }, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\" }"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"title\" : \"Bees Babble\", \"author\" : { \"last\" : \"Bumble\", \"first\" : \"Bee\" }, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\" }")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithReplaceWith() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)); + + AggregationUpdate update = AggregationUpdate.update() + .replaceWith(ReplaceWithOperation.replaceWithValueOf("author")); + + template.update(Book.class).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder( + org.bson.Document.parse("{\"_id\" : 1, \"first\" : \"John\", \"last\" : \"Backus\"}"), + org.bson.Document.parse("{\"_id\" : 2, \"first\" : \"Grace\", \"last\" : \"Hopper\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateWithReplaceWithNewObject() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)); + + AggregationUpdate update = AggregationUpdate.update().replaceWith(new Author("Ada", "Lovelace")); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(one.id))).apply(update).all(); + + assertThat(all(Book.class)).containsExactlyInAnyOrder(org.bson.Document.parse( + "{\"_id\" : 1, \"first\" : \"Ada\", \"last\" : \"Lovelace\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Author\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"author\" : {\"first\" : \"Grace\", \"last\" : \"Hopper\"}, \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregationUpdateUpsertsCorrectly() { + + AggregationUpdate update = AggregationUpdate.update().set("title").toValue("The Burning White"); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(1))).apply(update).upsert(); + + assertThat(all(Book.class)) + .containsExactly(org.bson.Document.parse("{\"_id\" : 1, \"title\" : \"The Burning White\" }")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void aggregateUpdateFirstMatch() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)); + + template.update(Book.class).apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")).first(); + + assertThat(all(Book.class)).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + } + + @Test // DATAMONGO-2331 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void findAndModifyAppliesAggregationUpdateCorrectly() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)); + + Book retrieved = template.update(Book.class).matching(Query.query(Criteria.where("id").is(one.id))) + .apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")).findAndModifyValue(); + assertThat(retrieved).isEqualTo(one); + + assertThat(all(Book.class)).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Book\"}")); + + } + + @Test // DATAMMONGO-2423 + void nullValueShouldBePropagatedToDatabase() { + + Book currentRead = new Book(); + currentRead.id = 1; + currentRead.author = new Author("Brent", "Weeks"); + currentRead.title = "The Burning White"; + + template.save(currentRead); + + template.update(Book.class).apply(new Update().set("title", null)).first(); + + assertThat(collection(Book.class).find(new org.bson.Document("_id", currentRead.id)).first()).containsEntry("title", + null); + } + + @ParameterizedTest // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + @MethodSource("sortedUpdateBookArgs") + void updateFirstWithSort(Class domainType, Sort sort, UpdateDefinition update) { + + Book one = new Book(); + one.id = 1; + one.isbn = "001 001 300"; + one.title = "News isn't fake"; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.title = "love is love"; + two.isbn = "001 001 100"; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)); + + UpdateResult result = template.update(domainType) // + .inCollection(template.getCollectionName(Book.class))// + .matching(new Query().with(sort)).apply(update) // + .first(); + + assertThat(result.getModifiedCount()).isOne(); + assertThat(collection(Book.class).find(new org.bson.Document("_id", two.id)).first()).containsEntry("title", + "Science is real!"); + } + + @Test // GH-4918 + void updateShouldHonorVersionProvided() { + + Versioned source = template.insert(Versioned.class).one(new Versioned("id-1", "value-0")); + + Update update = new BasicUpdate("{ '$set' : { 'value' : 'changed' }, '$inc' : { 'version' : 10 } }"); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first(); + + assertThat( + collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1).into(new ArrayList<>())) + .containsExactly(new org.bson.Document("_id", source.id).append("version", 10L).append("value", "changed") + .append("_class", "org.springframework.data.mongodb.core.MongoTemplateUpdateTests$Versioned")); + } + + private List all(Class type) { + return collection(type).find(new org.bson.Document()).into(new ArrayList<>()); + } + + private MongoCollection collection(Class type) { + return template.getCollection(template.getCollectionName(type)); + } + + private static Stream sortedUpdateBookArgs() { + + Update update = new Update().set("title", "Science is real!"); + AggregationUpdate aggUpdate = AggregationUpdate.update().set("title").toValue("Science is real!"); + + return Stream.of( // + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), update), // typed, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.lastname"), update), // typed, map `lastname` + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), update), // typed, raw field name + Arguments.of(Object.class, Sort.by(Direction.ASC, "isbn"), update), // untyped, requires raw field name + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), aggUpdate), // aggregation, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), aggUpdate) // aggregation, map `lastname` + ); + } + + @Document("scores") + static class Score { + + Integer id; + String student; + List homework; + List quiz; + Integer extraCredit; + + public Score(Integer id, String student, List homework, List quiz, Integer extraCredit) { + + this.id = id; + this.student = student; + this.homework = homework; + this.quiz = quiz; + this.extraCredit = extraCredit; + } + } + + static class Versioned { + + String id; + @Version Long version; + String value; + + public Versioned(String id, String value) { + this.id = id; + this.value = value; + } + } + + static class Book { + + @Id Integer id; + String title; + String isbn; + Author author; + @Field("copies") Collection stock; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Book book = (Book) o; + return Objects.equals(id, book.id) && Objects.equals(title, book.title) && Objects.equals(isbn, book.isbn) + && Objects.equals(author, book.author) && Objects.equals(stock, book.stock); + } + + @Override + public int hashCode() { + return Objects.hash(id, title, isbn, author, stock); + } + } + + static class Author { + + @Field("first") String firstname; + @Field("last") String lastname; + + public Author(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + } + + static class Warehouse { + + public Warehouse(String location, Integer qty) { + this.location = location; + this.qty = qty; + } + + @Field("warehouse") String location; + Integer qty; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java index cbf428f494..18da8c516d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateValidationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,64 +19,76 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.validation.Validator.*; -import lombok.AllArgsConstructor; -import lombok.Data; - +import java.util.Collections; import java.util.List; +import java.util.Objects; +import java.util.Set; import org.bson.Document; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.CollectionOptions.ValidationOptions; +import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Criteria; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.lang.Nullable; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; import com.mongodb.client.model.ValidationAction; import com.mongodb.client.model.ValidationLevel; /** * Integration tests for {@link CollectionOptions#validation(ValidationOptions)} using - * {@link org.springframework.data.mongodb.core.validation.CriteriaValidator} and - * {@link org.springframework.data.mongodb.core.validation.DocumentValidator}. + * {@link org.springframework.data.mongodb.core.validation.CriteriaValidator}, + * {@link org.springframework.data.mongodb.core.validation.DocumentValidator} and + * {@link org.springframework.data.mongodb.core.validation.JsonSchemaValidator}. * * @author Andreas Zink * @author Christoph Strobl + * @author Julia Lee */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) public class MongoTemplateValidationTests { - public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_2_0 = MongoVersionRule.atLeast(Version.parse("3.2.0")); - static final String COLLECTION_NAME = "validation-1"; + static @Client MongoClient mongoClient; @Configuration - static class Config extends AbstractMongoConfiguration { + static class Config extends AbstractMongoClientConfiguration { @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; } @Override protected String getDatabaseName() { return "validation-tests"; } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } } @Autowired MongoTemplate template; - @Before + @BeforeEach public void setUp() { template.dropCollection(COLLECTION_NAME); } @@ -178,6 +190,20 @@ public void mapsDocumentValidatorFieldsCorrectly() { assertThat(getValidatorInfo(COLLECTION_NAME)).isEqualTo(new Document("customName", new Document("$type", "bool"))); } + @Test // GH-4454 + public void failsJsonSchemaValidationForEncryptedDomainEntityProperty() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create().createSchemaFor(BeanWithEncryptedDomainEntity.class); + template.createCollection(COLLECTION_NAME, CollectionOptions.empty().schema(schema)); + + BeanWithEncryptedDomainEntity person = new BeanWithEncryptedDomainEntity(); + person.encryptedDomainEntity = new SimpleBean("some string", 100, null); + + assertThatExceptionOfType(DataIntegrityViolationException.class) + .isThrownBy(() -> template.save(person)) + .withMessageContaining("Document failed validation"); + } + private Document getCollectionOptions(String collectionName) { return getCollectionInfo(collectionName).get("options", Document.class); } @@ -203,13 +229,70 @@ private Document getCollectionInfo(String collectionName) { }); } - @Data - @AllArgsConstructor @org.springframework.data.mongodb.core.mapping.Document(collection = COLLECTION_NAME) static class SimpleBean { private @Nullable String nonNullString; private @Nullable Integer rangedInteger; private @Field("customName") Object customFieldName; + + public SimpleBean(@Nullable String nonNullString, @Nullable Integer rangedInteger, Object customFieldName) { + this.nonNullString = nonNullString; + this.rangedInteger = rangedInteger; + this.customFieldName = customFieldName; + } + + @Nullable + public String getNonNullString() { + return this.nonNullString; + } + + @Nullable + public Integer getRangedInteger() { + return this.rangedInteger; + } + + public Object getCustomFieldName() { + return this.customFieldName; + } + + public void setNonNullString(@Nullable String nonNullString) { + this.nonNullString = nonNullString; + } + + public void setRangedInteger(@Nullable Integer rangedInteger) { + this.rangedInteger = rangedInteger; + } + + public void setCustomFieldName(Object customFieldName) { + this.customFieldName = customFieldName; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SimpleBean that = (SimpleBean) o; + return Objects.equals(nonNullString, that.nonNullString) && Objects.equals(rangedInteger, that.rangedInteger) && Objects.equals(customFieldName, that.customFieldName); + } + + @Override + public int hashCode() { + return Objects.hash(nonNullString, rangedInteger, customFieldName); + } + + public String toString() { + return "MongoTemplateValidationTests.SimpleBean(nonNullString=" + this.getNonNullString() + ", rangedInteger=" + this.getRangedInteger() + ", customFieldName=" + this.getCustomFieldName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = COLLECTION_NAME) + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + static class BeanWithEncryptedDomainEntity { + @Encrypted SimpleBean encryptedDomainEntity; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateViewTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateViewTests.java new file mode 100644 index 0000000000..15fe90a34a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateViewTests.java @@ -0,0 +1,211 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.CollectionInfo; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for Views. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateViewTests { + + static @Client MongoClient client; + static final String DB_NAME = "mongo-template-view-tests"; + + private MongoTemplate template; + + Student alex = new Student(22001L, "Alex", 1, 4.0D); + Student bernie = new Student(21001L, "bernie", 2, 3.7D); + Student chris = new Student(20010L, "Chris", 3, 2.5D); + Student drew = new Student(22021L, "Drew", 1, 3.2D); + Student harley1 = new Student(17301L, "harley", 6, 3.1D); + Student farmer = new Student(21022L, "Farmer", 1, 2.2D); + Student george = new Student(20020L, "george", 3, 2.8D); + Student harley2 = new Student(18020, "Harley", 5, 2.8D); + + List students = Arrays.asList(alex, bernie, chris, drew, harley1, farmer, george, harley2); + + @BeforeEach + void beforeEach() { + template = new MongoTemplate(client, DB_NAME); + } + + @AfterEach + void afterEach() { + client.getDatabase(DB_NAME).drop(); + } + + @Test // GH-2594 + void createsViewFromPipeline() { + + template.insertAll(students); + + template.createView("firstYears", Student.class, match(where("year").is(1))); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewTarget()).isEqualTo("student"); + assertThat(collectionInfo.getViewPipeline()).containsExactly(new Document("$match", new Document("year", 1))); + } + + @Test // GH-2594 + void mapsPipelineAgainstDomainObject() { + + template.insertAll(students); + + template.createView("fakeStudents", Student.class, match(where("studentID").gte("22"))); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("sID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void takesPipelineAsIsIfNoTypeDefined() { + + template.insertAll(students); + + template.createView("fakeStudents", "student", AggregationPipeline.of(match(where("studentID").gte("22"))), + ViewOptions.none()); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("studentID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void readsFromView() { + + template.insertAll(students); + client.getDatabase(DB_NAME).createView("firstYears", "student", + Arrays.asList(new Document("$match", new Document("year", 1)))); + + assertThat(template.query(Student.class).inCollection("firstYears").all()).containsExactlyInAnyOrder(alex, drew, + farmer); + } + + @Test // GH-2594 + void appliesCollationToView() { + + template.insertAll(students); + + template.createView("firstYears", Student.class, AggregationPipeline.of(match(where("year").is(1))), + new ViewOptions().collation(Collation.of("en_US"))); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getCollation().getLocale()).isEqualTo("en_US"); + } + + private static class Student { + + @Field("sID") Long studentID; + + int year; + + double score; + + String name; + + public Student(long studentID, String name, int year, double score) { + this.studentID = studentID; + this.name = name; + this.year = year; + this.score = score; + } + + public Long getStudentID() { + return this.studentID; + } + + public int getYear() { + return this.year; + } + + public double getScore() { + return this.score; + } + + public String getName() { + return this.name; + } + + public void setStudentID(Long studentID) { + this.studentID = studentID; + } + + public void setYear(int year) { + this.year = year; + } + + public void setScore(double score) { + this.score = score; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Student student = (Student) o; + return year == student.year && Double.compare(student.score, score) == 0 + && Objects.equals(studentID, student.studentID) && Objects.equals(name, student.name); + } + + @Override + public int hashCode() { + return Objects.hash(studentID, year, score, name); + } + + public String toString() { + return "MongoTemplateViewTests.Student(studentID=" + this.getStudentID() + ", year=" + this.getYear() + ", score=" + + this.getScore() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java index 5c026f683a..8604fd960d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/NoExplicitIdTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,26 +15,31 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Collections; import java.util.Map; import java.util.Optional; +import java.util.Set; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration tests for DATAMONGO-1289. @@ -42,13 +47,15 @@ * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class NoExplicitIdTests { + static @Client MongoClient mongoClient; + @Configuration - @EnableMongoRepositories(considerNestedRepositories = true) - static class Config extends AbstractMongoConfiguration { + @EnableMongoRepositories(considerNestedRepositories = true, includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = TypeWithoutExplicitIdPropertyRepository.class)) + static class Config extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -57,14 +64,24 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; + } + + @Override + protected boolean autoIndexCreation() { + return false; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } @Autowired MongoOperations mongoOps; @Autowired TypeWithoutExplicitIdPropertyRepository repo; - @Before + @BeforeEach public void setUp() { mongoOps.dropCollection(TypeWithoutIdProperty.class); } @@ -80,7 +97,7 @@ public void saveAndRetrieveTypeWithoutIdPropertyViaTemplate() { TypeWithoutIdProperty retrieved = mongoOps.findOne(query(where("someString").is(noid.someString)), TypeWithoutIdProperty.class); - assertThat(retrieved.someString, is(noid.someString)); + assertThat(retrieved.someString).isEqualTo(noid.someString); } @Test // DATAMONGO-1289 @@ -92,7 +109,7 @@ public void saveAndRetrieveTypeWithoutIdPropertyViaRepository() { repo.save(noid); TypeWithoutIdProperty retrieved = repo.findBySomeString(noid.someString); - assertThat(retrieved.someString, is(noid.someString)); + assertThat(retrieved.someString).isEqualTo(noid.someString); } @Test // DATAMONGO-1289 @@ -108,7 +125,7 @@ public void saveAndRetrieveTypeWithoutIdPropertyViaRepositoryFindOne() { "typeWithoutIdProperty"); Optional retrieved = repo.findById(map.get("_id").toString()); - assertThat(retrieved.get().someString, is(noid.someString)); + assertThat(retrieved.get().someString).isEqualTo(noid.someString); } static class TypeWithoutIdProperty { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java index 7dec8c06fc..bc126e05f0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Person.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,11 @@ package org.springframework.data.mongodb.core; import org.bson.types.ObjectId; +import org.springframework.lang.Nullable; public class Person { - private final ObjectId id; + private ObjectId id; private String firstName; @@ -89,13 +90,8 @@ public boolean isActive() { return active; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java index 3cd0046c99..cacd564056 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExample.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,8 +17,9 @@ import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.support.AbstractApplicationContext; @@ -29,12 +30,13 @@ */ public class PersonExample { - private static final Logger LOGGER = LoggerFactory.getLogger(PersonExample.class); + private static final Log LOGGER = LogFactory.getLog(PersonExample.class); @Autowired private MongoOperations mongoOps; public static void main(String[] args) { - AbstractApplicationContext applicationContext = new AnnotationConfigApplicationContext(PersonExampleAppConfig.class); + AbstractApplicationContext applicationContext = new AnnotationConfigApplicationContext( + PersonExampleAppConfig.class); PersonExample example = applicationContext.getBean(PersonExample.class); example.doWork(); applicationContext.close(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java index 6c60b22ff5..31afdb91b6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonExampleAppConfig.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,15 +17,16 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; @Configuration public class PersonExampleAppConfig { @Bean public MongoClient mongoClient() { - return new MongoClient("localhost"); + return MongoTestUtils.client(); } @Bean diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java index 84b3767377..cb347aa3e7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithAList.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java index 0ddcaa51e9..dd397643ec 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveInt.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java index 07c3762702..0cc26b2419 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfPrimitiveLong.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java index f853ab6a7f..36c8ea56ed 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeBigInteger.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java index 254e984306..bfdb86b9af 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeInteger.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java index 5623f2239c..f8eb961f27 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeLong.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java index 8c46c010c5..47caf5db27 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeObjectId.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java index b006d910e3..fa5bb5d8ea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeString.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java index 1096fb2198..f9db1e4d04 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithIdPropertyOfTypeUUID.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,58 @@ */ package org.springframework.data.mongodb.core; -import lombok.Data; - +import java.util.Objects; import java.util.UUID; -@Data public class PersonWithIdPropertyOfTypeUUID { private UUID id; private String firstName; private int age; + + public UUID getId() { + return this.id; + } + + public String getFirstName() { + return this.firstName; + } + + public int getAge() { + return this.age; + } + + public void setId(UUID id) { + this.id = id; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonWithIdPropertyOfTypeUUID that = (PersonWithIdPropertyOfTypeUUID) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstName, that.firstName); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstName, age); + } + + public String toString() { + return "PersonWithIdPropertyOfTypeUUID(id=" + this.getId() + ", firstName=" + this.getFirstName() + ", age=" + + this.getAge() + ")"; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java index 0fddccba80..132f0830d4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeInteger.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,8 +23,7 @@ public class PersonWithVersionPropertyOfTypeInteger { String firstName; int age; - @Version - Integer version; + @Version Integer version; @Override public String toString() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java index dfc7d309bf..41ef862dd0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWithVersionPropertyOfTypeLong.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -27,7 +27,7 @@ public class PersonWithVersionPropertyOfTypeLong { @Override public String toString() { - return "PersonWithVersionPropertyOfTypeInteger [id=" + id + ", firstName=" + firstName + ", age=" + age - + ", version=" + version + "]"; + return "PersonWithVersionPropertyOfTypeLong [id=" + id + ", firstName=" + firstName + ", age=" + age + ", version=" + + version + "]"; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java index 1d070b965d..8f2ca7d9ff 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeObjectId.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java index a9f3b52b02..43bfe53dc0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWith_idPropertyOfTypeString.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java index 08c2f3babb..5017a6947e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/PersonWriteConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java index c640db1dfb..22ba43f9be 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Portfolio.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java index 41dd5765fa..52ee79aa1f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryByExampleTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,14 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -import java.net.UnknownHostException; import java.util.List; +import java.util.Objects; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Example; import org.springframework.data.domain.ExampleMatcher; @@ -34,8 +31,9 @@ import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UntypedExampleMatcher; - -import com.mongodb.MongoClient; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; /** * Integration tests for Query-by-example. @@ -44,16 +42,18 @@ * @author Mark Paluch * @author Oliver Gierke */ +@ExtendWith(MongoTemplateExtension.class) public class QueryByExampleTests { - MongoOperations operations; + @Template(initialEntitySet = Person.class) // + static MongoTestTemplate operations; + Person p1, p2, p3; - @Before - public void setUp() throws UnknownHostException { + @BeforeEach + public void setUp() { - operations = new MongoTemplate(new MongoClient(), "query-by-example"); - operations.remove(new Query(), Person.class); + operations.flush(); p1 = new Person(); p1.firstname = "bran"; @@ -82,8 +82,7 @@ public void findByExampleShouldWorkForSimpleProperty() { Query query = new Query(new Criteria().alike(Example.of(sample))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(p1, p3)); + assertThat(result).containsExactlyInAnyOrder(p1, p3); } @Test // DATAMONGO-1245 @@ -96,8 +95,7 @@ public void findByExampleShouldWorkForMultipleProperties() { Query query = new Query(new Criteria().alike(Example.of(sample))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(p3)); + assertThat(result).containsExactly(p3); } @Test // DATAMONGO-1245 @@ -112,8 +110,7 @@ public void findByExampleShouldWorkForIdProperty() { Query query = new Query(new Criteria().alike(Example.of(sample))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(p4)); + assertThat(result).containsExactly(p4); } @Test // DATAMONGO-1245 @@ -126,7 +123,7 @@ public void findByExampleShouldReturnEmptyListIfNotMatching() { Query query = new Query(new Criteria().alike(Example.of(sample))); List result = operations.find(query, Person.class); - assertThat(result, is(empty())); + assertThat(result).isEmpty(); } @Test // DATAMONGO-1245 @@ -137,20 +134,17 @@ public void findByExampleShouldReturnEverythingWhenSampleIsEmpty() { Query query = new Query(new Criteria().alike(Example.of(sample))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(3)); - assertThat(result, hasItems(p1, p2, p3)); + assertThat(result).containsExactlyInAnyOrder(p1, p2, p3); } - @Test // DATAMONGO-1245 + @Test // DATAMONGO-1245, GH-3544 public void findByExampleWithCriteria() { Person sample = new Person(); sample.lastname = "stark"; - Query query = new Query(new Criteria().alike(Example.of(sample)).and("firstname").regex("^ary*")); - - List result = operations.find(query, Person.class); - assertThat(result.size(), is(1)); + Query query = new Query(new Criteria().alike(Example.of(sample)).and("firstname").regex(".*n.*")); + assertThat(operations.find(query, Person.class)).containsExactly(p1); } @Test // DATAMONGO-1459 @@ -163,8 +157,7 @@ public void findsExampleUsingAnyMatch() { Query query = Query.query(Criteria.byExample(Example.of(probe, ExampleMatcher.matchingAny()))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(p1, p2)); + assertThat(result).containsExactlyInAnyOrder(p1, p2); } @Test // DATAMONGO-1768 @@ -176,7 +169,7 @@ public void typedExampleMatchesNothingIfTypesDoNotMatch() { Query query = new Query(new Criteria().alike(Example.of(probe))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(0)); + assertThat(result).isEmpty(); } @Test // DATAMONGO-1768 @@ -189,8 +182,7 @@ public void exampleIgnoringClassTypeKeyMatchesCorrectly() { new Criteria().alike(Example.of(probe, ExampleMatcher.matching().withIgnorePaths("_class")))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(p1, p3)); + assertThat(result).containsExactlyInAnyOrder(p1, p3); } @Test // DATAMONGO-1768 @@ -202,25 +194,115 @@ public void untypedExampleMatchesCorrectly() { Query query = new Query(new Criteria().alike(Example.of(probe, UntypedExampleMatcher.matching()))); List result = operations.find(query, Person.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(p1, p3)); + assertThat(result).containsExactlyInAnyOrder(p1, p3); + } + + @Test // DATAMONGO-2314 + public void alikeShouldWorkOnNestedProperties() { + + PersonWrapper source1 = new PersonWrapper(); + source1.id = "with-child-doc-1"; + source1.child = p1; + + PersonWrapper source2 = new PersonWrapper(); + source2.id = "with-child-doc-2"; + source2.child = p2; + + operations.save(source1); + operations.save(source2); + + Query query = new Query( + new Criteria("child").alike(Example.of(p1, ExampleMatcher.matching().withIgnorePaths("_class")))); + List result = operations.find(query, PersonWrapper.class); + + assertThat(result).containsExactly(source1); } @Document("dramatis-personae") - @EqualsAndHashCode - @ToString static class Person { @Id String id; String firstname, middlename; @Field("last_name") String lastname; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(middlename, person.middlename) && Objects.equals(lastname, person.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, middlename, lastname); + } + + public String toString() { + return "QueryByExampleTests.Person(id=" + this.id + ", firstname=" + this.firstname + ", middlename=" + + this.middlename + ", lastname=" + this.lastname + ")"; + } } - @EqualsAndHashCode - @ToString static class NotAPersonButStillMatchingFields { String firstname, middlename; @Field("last_name") String lastname; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NotAPersonButStillMatchingFields that = (NotAPersonButStillMatchingFields) o; + return Objects.equals(firstname, that.firstname) && Objects.equals(middlename, that.middlename) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(firstname, middlename, lastname); + } + + public String toString() { + return "QueryByExampleTests.NotAPersonButStillMatchingFields(firstname=" + this.firstname + ", middlename=" + + this.middlename + ", lastname=" + this.lastname + ")"; + } + } + + @Document("dramatis-personae") + static class PersonWrapper { + + @Id String id; + Person child; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonWrapper that = (PersonWrapper) o; + return Objects.equals(id, that.id) && Objects.equals(child, that.child); + } + + @Override + public int hashCode() { + return Objects.hash(id, child); + } + + public String toString() { + return "QueryByExampleTests.PersonWrapper(id=" + this.id + ", child=" + this.child + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java index 9ab53dbed8..8c1ef8348b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,25 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import java.util.concurrent.TimeUnit; - import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoTemplate.QueryCursorPreparer; -import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.BasicQuery; -import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; +import com.mongodb.MongoClientSettings; import com.mongodb.client.FindIterable; /** @@ -45,98 +42,77 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Anton Barkan */ -@RunWith(MockitoJUnitRunner.class) -public class QueryCursorPreparerUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class QueryCursorPreparerUnitTests { - @Mock MongoDbFactory factory; + @Mock MongoDatabaseFactory factory; @Mock MongoExceptionTranslator exceptionTranslatorMock; @Mock FindIterable cursor; - @Mock FindIterable cursorToUse; - - @Before - public void setUp() { + @BeforeEach + void setUp() { when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock); - when(cursor.modifiers(any(Document.class))).thenReturn(cursor); + when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + when(cursor.batchSize(anyInt())).thenReturn(cursor); + when(cursor.comment(anyString())).thenReturn(cursor); + when(cursor.allowDiskUse(anyBoolean())).thenReturn(cursor); + when(cursor.maxTime(anyLong(), any())).thenReturn(cursor); + when(cursor.hint(any())).thenReturn(cursor); when(cursor.noCursorTimeout(anyBoolean())).thenReturn(cursor); when(cursor.collation(any())).thenReturn(cursor); } @Test // DATAMONGO-185 - public void appliesHintsCorrectly() { - - Query query = query(where("foo").is("bar")).withHint("hint"); + void appliesHintsCorrectly() { + Query query = query(where("foo").is("bar")).withHint("{ age: 1 }"); prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$hint", "hint"))); + verify(cursor).hint(new Document("age", 1)); } - @Test // DATAMONGO-957 - public void doesNotApplyMetaWhenEmpty() { - - Query query = query(where("foo").is("bar")); - query.setMeta(new Meta()); + @Test // DATAMONGO-2365 + void appliesIndexNameAsHintCorrectly() { + Query query = query(where("foo").is("bar")).withHint("idx-1"); prepare(query); - verify(cursorToUse, never()).modifiers(any(Document.class)); + verify(cursor).hintString("idx-1"); } - @Test // DATAMONGO-957 - public void appliesMaxScanCorrectly() { - - Query query = query(where("foo").is("bar")).maxScan(100); + @Test // DATAMONGO-2319 + void appliesDocumentHintsCorrectly() { + Query query = query(where("foo").is("bar")).withHint(Document.parse("{ age: 1 }")); prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$maxScan", 100L))); + verify(cursor).hint(new Document("age", 1)); } @Test // DATAMONGO-957 - public void appliesMaxTimeCorrectly() { - - Query query = query(where("foo").is("bar")).maxTime(1, TimeUnit.SECONDS); - - prepare(query); - - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$maxTimeMS", 1000L))); - } - - @Test // DATAMONGO-957 - public void appliesCommentCorrectly() { + void appliesCommentCorrectly() { Query query = query(where("foo").is("bar")).comment("spring data"); - prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$comment", "spring data"))); + verify(cursor).comment("spring data"); } - @Test // DATAMONGO-957 - public void appliesSnapshotCorrectly() { - - Query query = query(where("foo").is("bar")).useSnapshot(); + @Test // DATAMONGO-2659 + void appliesAllowDiskUseCorrectly() { + Query query = query(where("foo").is("bar")).allowDiskUse(true); prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$snapshot", true))); + verify(cursor).allowDiskUse(true); } @Test // DATAMONGO-1480 - public void appliesNoCursorTimeoutCorrectly() { + void appliesNoCursorTimeoutCorrectly() { Query query = query(where("foo").is("bar")).noCursorTimeout(); @@ -146,13 +122,21 @@ public void appliesNoCursorTimeoutCorrectly() { } @Test // DATAMONGO-1518 - public void appliesCollationCorrectly() { + void appliesCollationCorrectly() { prepare(new BasicQuery("{}").collation(Collation.of("fr"))); verify(cursor).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); } + @Test // DATAMONGO-1311 + void appliesBatchSizeCorrectly() { + + prepare(new BasicQuery("{}").cursorBatchSize(100)); + + verify(cursor).batchSize(100); + } + private FindIterable prepare(Query query) { CursorPreparer preparer = new MongoTemplate(factory).new QueryCursorPreparer(query, null); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryOperationsUnitTests.java new file mode 100644 index 0000000000..4e103c17be --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryOperationsUnitTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.FieldLookupPolicy; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * Unit tests for {@link QueryOperations}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class QueryOperationsUnitTests { + + static final AggregationOptions NO_MAPPING = AggregationOptions.builder().noMapping().build(); + static final AggregationOptions STRICT_MAPPING = AggregationOptions.builder().strictMapping().build(); + + @Mock QueryMapper queryMapper; + @Mock UpdateMapper updateMapper; + @Mock EntityOperations entityOperations; + @Mock PropertyOperations propertyOperations; + @Mock MongoDatabaseFactory mongoDbFactory; + @Mock MongoMappingContext mappingContext; + + QueryOperations queryOperations; + + @BeforeEach + void beforeEach() { + + when(queryMapper.getMappingContext()).thenReturn((MappingContext) mappingContext); + + queryOperations = new QueryOperations(queryMapper, updateMapper, entityOperations, propertyOperations, + mongoDbFactory); + } + + @Test // GH-3542 + void createAggregationContextUsesRelaxedOneForUntypedAggregationsWhenNoInputTypeProvided() { + + Aggregation aggregation = Aggregation.newAggregation(Aggregation.project("name")); + AggregationDefinition def = queryOperations.createAggregation(aggregation, (Class) null); + TypeBasedAggregationOperationContext ctx = (TypeBasedAggregationOperationContext) def + .getAggregationOperationContext(); + + assertThat(ReflectionTestUtils.getField(ctx, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + } + + @Test // GH-3542 + void createAggregationContextUsesRelaxedOneForTypedAggregationsWhenNoInputTypeProvided() { + + Aggregation aggregation = Aggregation.newAggregation(Person.class, Aggregation.project("name")); + AggregationDefinition def = queryOperations.createAggregation(aggregation, Person.class); + TypeBasedAggregationOperationContext ctx = (TypeBasedAggregationOperationContext) def + .getAggregationOperationContext(); + + assertThat(ReflectionTestUtils.getField(ctx, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + } + + @Test // GH-3542 + void createAggregationContextUsesRelaxedOneForUntypedAggregationsWhenInputTypeProvided() { + + Aggregation aggregation = Aggregation.newAggregation(Aggregation.project("name")); + AggregationDefinition def = queryOperations.createAggregation(aggregation, Person.class); + TypeBasedAggregationOperationContext ctx = (TypeBasedAggregationOperationContext) def + .getAggregationOperationContext(); + + assertThat(ReflectionTestUtils.getField(ctx, "lookupPolicy")).isEqualTo(FieldLookupPolicy.relaxed()); + } + + @Test // GH-3542 + void createAggregationContextUsesDefaultIfNoMappingDesired() { + + Aggregation aggregation = Aggregation.newAggregation(Aggregation.project("name")).withOptions(NO_MAPPING); + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, Person.class); + + assertThat(ctx.getAggregationOperationContext()).isEqualTo(Aggregation.DEFAULT_CONTEXT); + } + + @Test // GH-3542 + void createAggregationContextUsesStrictlyTypedContextForTypedAggregationsWhenRequested() { + + Aggregation aggregation = Aggregation.newAggregation(Person.class, Aggregation.project("name")) + .withOptions(STRICT_MAPPING); + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, (Class) null); + + assertThat(ctx.getAggregationOperationContext()).isInstanceOf(TypeBasedAggregationOperationContext.class); + } + + @Test // GH-4026 + void insertContextDoesNotAddIdIfNoPersistentEntityCanBeFound() { + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4026 + void insertContextDoesNotAddIdIfNoIdPropertyCanBeFound() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + when(entity.getIdProperty()).thenReturn(null); + when(mappingContext.getPersistentEntity(eq(Person.class))).thenReturn((MongoPersistentEntity) entity); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4026 + void insertContextDoesNotAddConvertedIdForNonExplicitFieldTypes() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(false); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4026 + void insertContextAddsConvertedIdForExplicitFieldTypes() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(true); + doReturn(String.class).when(property).getFieldType(); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + when(queryMapper.convertId(any(), eq(String.class))).thenReturn("☮"); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one").append("_id", "☮")); + }); + } + + @Test // GH-4026 + void insertContextAddsConvertedIdForMongoIdTypes() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(false); + when(property.isAnnotationPresent(eq(MongoId.class))).thenReturn(true); + doReturn(String.class).when(property).getFieldType(); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + when(queryMapper.convertId(any(), eq(String.class))).thenReturn("☮"); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one").append("_id", "☮")); + }); + } + + @Test // GH-4026 + void insertContextDoesNotAddConvertedIdForMongoIdTypesTargetingObjectId() { + + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + when(entity.getIdProperty()).thenReturn(property); + when(property.hasExplicitWriteTarget()).thenReturn(false); + when(property.isAnnotationPresent(eq(MongoId.class))).thenReturn(true); + doReturn(ObjectId.class).when(property).getFieldType(); + doReturn(entity).when(mappingContext).getPersistentEntity(eq(Person.class)); + + assertThat(queryOperations.createInsertContext(new Document("value", "one")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("value", "one")); + }); + } + + @Test // GH-4184 + void insertContextDoesNotOverrideExistingId() { + + assertThat(queryOperations.createInsertContext(new Document("_id", "abc")).prepareId(Person.class).getDocument())// + .satisfies(result -> { + assertThat(result).isEqualTo(new Document("_id", "abc")); + }); + } + + static class Person { + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java index 16d7fb96af..9d4ed339b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,17 +16,17 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.aggregation.Aggregation; /** @@ -34,39 +34,40 @@ * * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class ReactiveAggregationOperationSupportUnitTests { @Mock ReactiveMongoTemplate template; - ReactiveAggregationOperationSupport opSupport; + private ReactiveAggregationOperationSupport opSupport; - @Before - public void setUp() { + @BeforeEach + void setUp() { opSupport = new ReactiveAggregationOperationSupport(template); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void throwsExceptionOnNullDomainType() { - opSupport.aggregateAndReturn(null); + @Test // DATAMONGO-1719 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void throwsExceptionOnNullCollectionWhenUsed() { - opSupport.aggregateAndReturn(Person.class).inCollection(null); + @Test // DATAMONGO-1719 + void throwsExceptionOnNullCollectionWhenUsed() { + assertThatIllegalArgumentException() + .isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void throwsExceptionOnEmptyCollectionWhenUsed() { - opSupport.aggregateAndReturn(Person.class).inCollection(""); + @Test // DATAMONGO-1719 + void throwsExceptionOnEmptyCollectionWhenUsed() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).inCollection("")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void throwsExceptionOnNullAggregation() { - opSupport.aggregateAndReturn(Person.class).by(null); + @Test // DATAMONGO-1719 + void throwsExceptionOnNullAggregation() { + assertThatIllegalArgumentException().isThrownBy(() -> opSupport.aggregateAndReturn(Person.class).by(null)); } @Test // DATAMONGO-1719 - public void aggregateWithUntypedAggregationAndExplicitCollection() { + void aggregateWithUntypedAggregationAndExplicitCollection() { opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all(); @@ -76,30 +77,30 @@ public void aggregateWithUntypedAggregationAndExplicitCollection() { } @Test // DATAMONGO-1719 - public void aggregateWithUntypedAggregation() { + void aggregateWithUntypedAggregation() { - when(template.determineCollectionName(any(Class.class))).thenReturn("person"); + when(template.getCollectionName(any(Class.class))).thenReturn("person"); opSupport.aggregateAndReturn(Person.class).by(newAggregation(project("foo"))).all(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); } @Test // DATAMONGO-1719 - public void aggregateWithTypeAggregation() { + void aggregateWithTypeAggregation() { - when(template.determineCollectionName(any(Class.class))).thenReturn("person"); + when(template.getCollectionName(any(Class.class))).thenReturn("person"); opSupport.aggregateAndReturn(Jedi.class).by(newAggregation(Person.class, project("foo"))).all(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportTests.java new file mode 100644 index 0000000000..23d1d03b43 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportTests.java @@ -0,0 +1,169 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.core.Disposable; +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReplSetClient; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Tests for {@link ReactiveChangeStreamOperation}. + * + * @author Christoph Strobl + * @currentRead Dawn Cook - The Decoy Princess + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +public class ReactiveChangeStreamOperationSupportTests { + + static final String DATABASE_NAME = "rx-change-stream"; + static @ReplSetClient MongoClient mongoClient; + + ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + + template = new ReactiveMongoTemplate(mongoClient, DATABASE_NAME); + + MongoTestUtils.createOrReplaceCollectionNow(DATABASE_NAME, "person", mongoClient); + } + + @AfterEach + public void tearDown() { + MongoTestUtils.dropCollectionNow(DATABASE_NAME, "person", mongoClient); + } + + @Test // DATAMONGO-2089 + public void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedException { + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + + Disposable disposable = template.changeStream(Document.class) // + .watchCollection("person") // + .listen() // + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 39); + Person person3 = new Person("MongoDB", 37); + + Flux.merge(template.insert(person1).delayElement(Duration.ofMillis(2)), + template.insert(person2).delayElement(Duration.ofMillis(2)), + template.insert(person3).delayElement(Duration.ofMillis(2))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).hasSize(3) + .allMatch(Document.class::isInstance); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedException { + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + + Disposable disposable = template.changeStream(Person.class).listen() // + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 39); + Person person3 = new Person("MongoDB", 37); + + Flux.merge(template.insert(person1).delayElement(Duration.ofMillis(2)), + template.insert(person2).delayElement(Duration.ofMillis(2)), + template.insert(person3).delayElement(Duration.ofMillis(2))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).containsOnly(person1, + person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-1803 + public void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedException { + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + + Disposable disposable = template.changeStream(Person.class) // + .watchCollection(Person.class) // + .filter(where("age").gte(38)) // + .listen() // + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + Flux.merge(template.save(person1), template.save(person2).delayElement(Duration.ofMillis(50)), + template.save(person3).delayElement(Duration.ofMillis(100))) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).containsOnly(person1, + person3); + } finally { + disposable.dispose(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportUnitTests.java new file mode 100644 index 0000000000..46838d6da9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupportUnitTests.java @@ -0,0 +1,165 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.query.Criteria; + +/** + * Unit tests for {@link ReactiveChangeStreamOperationSupport}. + * + * @author Christoph Strobl + * @currentRead Dawn Cook - The Decoy Princess + */ +@ExtendWith(MockitoExtension.class) +class ReactiveChangeStreamOperationSupportUnitTests { + + @Mock ReactiveMongoTemplate template; + private ReactiveChangeStreamOperationSupport changeStreamSupport; + + @BeforeEach + void setUp() { + when(template.changeStream(any(), any(), any())).thenReturn(Flux.empty()); + changeStreamSupport = new ReactiveChangeStreamOperationSupport(template); + } + + @Test // DATAMONGO-2089 + void listenWithoutDomainTypeUsesDocumentAsDefault() { + + changeStreamSupport.changeStream(Document.class).listen().subscribe(); + + verify(template).changeStream(isNull(), eq(ChangeStreamOptions.empty()), eq(Document.class)); + } + + @Test // DATAMONGO-2089 + void listenWithDomainTypeUsesSourceAsTarget() { + + changeStreamSupport.changeStream(Person.class).listen().subscribe(); + + verify(template).changeStream(isNull(), eq(ChangeStreamOptions.empty()), eq(Person.class)); + } + + @Test // DATAMONGO-2089 + void collectionNameIsPassedOnCorrectly() { + + changeStreamSupport.changeStream(Person.class).watchCollection("star-wars").listen().subscribe(); + + verify(template).changeStream(eq("star-wars"), eq(ChangeStreamOptions.empty()), eq(Person.class)); + } + + @Test // DATAMONGO-2089 + void listenWithDomainTypeCreatesTypedAggregation() { + + Criteria criteria = where("operationType").is("insert"); + changeStreamSupport.changeStream(Person.class).filter(criteria).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Person.class)); + + assertThat(optionsArgumentCaptor.getValue().getFilter()).hasValueSatisfying(it -> { + + assertThat(it).isInstanceOf(TypedAggregation.class); + TypedAggregation aggregation = (TypedAggregation) it; + + assertThat(aggregation.getInputType()).isEqualTo(Person.class); + assertThat(extractPipeline(aggregation)) + .containsExactly(new Document("$match", new Document("operationType", "insert"))); + }); + } + + @Test // DATAMONGO-2089 + void listenWithoutDomainTypeCreatesUntypedAggregation() { + + Criteria criteria = where("operationType").is("insert"); + changeStreamSupport.changeStream(Document.class).filter(criteria).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Document.class)); + + assertThat(optionsArgumentCaptor.getValue().getFilter()).hasValueSatisfying(it -> { + + assertThat(it).isInstanceOf(Aggregation.class); + assertThat(it).isNotInstanceOf(TypedAggregation.class); + + Aggregation aggregation = (Aggregation) it; + + assertThat(extractPipeline(aggregation)) + .containsExactly(new Document("$match", new Document("operationType", "insert"))); + }); + } + + @Test // DATAMONGO-2089 + void optionsShouldBePassedOnCorrectly() { + + Document filter = new Document("$match", new Document("operationType", "insert")); + + changeStreamSupport.changeStream(Document.class).withOptions(options -> { + options.filter(filter); + }).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Document.class)); + + assertThat(optionsArgumentCaptor.getValue()).satisfies(it -> { + assertThat(it.getFilter().get()).isEqualTo(Collections.singletonList(filter)); + }); + } + + @Test // DATAMONGO-2089 + void optionsShouldBeCombinedCorrectly() { + + Document filter = new Document("$match", new Document("operationType", "insert")); + Instant resumeTimestamp = Instant.now(); + + changeStreamSupport.changeStream(Document.class).withOptions(options -> { + options.filter(filter); + }).resumeAt(resumeTimestamp).listen().subscribe(); + + ArgumentCaptor optionsArgumentCaptor = ArgumentCaptor.forClass(ChangeStreamOptions.class); + verify(template).changeStream(isNull(), optionsArgumentCaptor.capture(), eq(Document.class)); + + assertThat(optionsArgumentCaptor.getValue()).satisfies(it -> { + + assertThat(it.getFilter().get()).isEqualTo(Collections.singletonList(filter)); + assertThat(it.getResumeTimestamp()).contains(resumeTimestamp); + }); + } + + private static List extractPipeline(Aggregation aggregation) { + return aggregation.toDocument("person", Aggregation.DEFAULT_CONTEXT).get("pipeline", ArrayList.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveClientSessionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveClientSessionTests.java new file mode 100644 index 0000000000..9c49a3a743 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveClientSessionTests.java @@ -0,0 +1,190 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Beyond the Shadows - Brent Weeks + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +public class ReactiveClientSessionTests { + + static final String DATABASE_NAME = "reflective-client-session-tests"; + static final String COLLECTION_NAME = "test"; + + static @Client MongoClient client; + + ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + + template = new ReactiveMongoTemplate(client, DATABASE_NAME); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, COLLECTION_NAME, client) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.insert(new Document("_id", "id-1").append("value", "spring"), COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1880 + public void shouldApplyClientSession() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + + assertThat(session.getOperationTime()).isNull(); + + template.withSession(() -> session) // + .execute(action -> action.findAll(Document.class, COLLECTION_NAME)) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); + + assertThat(session.getOperationTime()).isNotNull(); + assertThat(session.getServerSession().isClosed()).isFalse(); + + session.close(); + } + + @Test // DATAMONGO-1880 + public void useMonoInCallback() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + + assertThat(session.getOperationTime()).isNull(); + + template.withSession(() -> session).execute(action -> action.findOne(new Query(), Document.class, COLLECTION_NAME)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(session.getOperationTime()).isNotNull(); + assertThat(session.getServerSession().isClosed()).isFalse(); + + session.close(); + } + + @Test // DATAMONGO-1880 + public void reusesClientSessionInSessionScopedCallback() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + CountingSessionSupplier sessionSupplier = new CountingSessionSupplier(session); + + ReactiveSessionScoped sessionScoped = template.withSession(sessionSupplier); + + sessionScoped.execute(action -> action.findOne(new Query(), Document.class, COLLECTION_NAME)).blockFirst(); + assertThat(sessionSupplier.getInvocationCount()).isEqualTo(1); + + sessionScoped.execute(action -> action.findOne(new Query(), Document.class, COLLECTION_NAME)).blockFirst(); + assertThat(sessionSupplier.getInvocationCount()).isEqualTo(1); + } + + @Test // DATAMONGO-1970 + public void addsClientSessionToContext() { + + template.withSession(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())) + .execute(action -> ReactiveMongoContext.getSession()) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-2001 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void countInTransactionShouldReturnCount() { + + ClientSession session = Mono + .from(client.startSession(ClientSessionOptions.builder().causallyConsistent(true).build())).block(); + + template.withSession(() -> session).execute(action -> { + + session.startTransaction(); + + return action.insert(new Document("_id", "id-2").append("value", "in transaction"), COLLECTION_NAME) // + .then(action.count(query(where("value").is("in transaction")), Document.class, COLLECTION_NAME)) // + .flatMap(it -> Mono.from(session.commitTransaction()).then(Mono.just(it))); + + }).as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + template.withSession(() -> session).execute(action -> { + + session.startTransaction(); + + return action.insert(new Document("value", "in transaction"), COLLECTION_NAME) // + .then(action.count(query(where("value").is("foo")), Document.class, COLLECTION_NAME)) // + .flatMap(it -> Mono.from(session.commitTransaction()).then(Mono.just(it))); + + }).as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + static class CountingSessionSupplier implements Supplier { + + AtomicInteger invocationCount = new AtomicInteger(0); + final ClientSession session; + + public CountingSessionSupplier(ClientSession session) { + this.session = session; + } + + @Override + public ClientSession get() { + + invocationCount.incrementAndGet(); + return session; + } + + int getInvocationCount() { + return invocationCount.get(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java index a3218392b8..f23e973202 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,20 +18,25 @@ import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; +import reactor.core.Disposable; +import reactor.core.publisher.Flux; import reactor.test.StepVerifier; import java.util.Date; +import java.util.Objects; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import org.bson.BsonString; import org.bson.BsonValue; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Value; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; @@ -41,31 +46,57 @@ import org.springframework.data.mongodb.core.index.GeospatialIndex; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.MongoClientExtension; -import com.mongodb.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link ReactiveFindOperationSupport}. * * @author Mark Paluch * @author Christoph Strobl + * @author Juergen Zimmermann */ -public class ReactiveFindOperationSupportTests { +@ExtendWith({ MongoClientExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class ReactiveFindOperationSupportTests implements StateFunctions { private static final String STAR_WARS = "star-wars"; - MongoTemplate blocking; - ReactiveMongoTemplate template; + private MongoTemplate blocking; + private ReactiveMongoTemplate template; - Person han; - Person luke; + private static @Client MongoClient client; + private static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; - @Before - public void setUp() { + private Person han; + private Person luke; - blocking = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableFindOperationSupportTests")); - blocking.dropCollection(STAR_WARS); + void setUp() { + blocking = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, "ExecutableFindOperationSupportTests")); + template = new ReactiveMongoTemplate(reactiveClient, "ExecutableFindOperationSupportTests"); + } + + @Override + public void clear() { + if (blocking == null) { + setUp(); + } + recreateCollection(STAR_WARS, false); + } + + @Override + public void setupState() { + if (blocking == null) { + setUp(); + } + insertObjects(); + } + + void insertObjects() { han = new Person(); han.firstname = "han"; @@ -79,88 +110,108 @@ public void setUp() { blocking.save(han); blocking.save(luke); + } + + void recreateCollection(String collectionName, boolean capped) { + + blocking.dropCollection(STAR_WARS); + + CollectionOptions options = CollectionOptions.empty(); + if (capped) { + options = options.capped().size(1024 * 1024); + } - template = new ReactiveMongoTemplate(MongoClients.create(), "ExecutableFindOperationSupportTests"); + blocking.createCollection(STAR_WARS, options); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void domainTypeIsRequired() { - template.query(null); + @Test // DATAMONGO-1719 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void returnTypeIsRequiredOnSet() { - template.query(Person.class).as(null); + @Test // DATAMONGO-1719 + void returnTypeIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).as(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void collectionIsRequiredOnSet() { - template.query(Person.class).inCollection(null); + @Test // DATAMONGO-1719 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.query(Person.class).inCollection(null)); } @Test // DATAMONGO-1719 - public void findAll() { + void findAll() { - StepVerifier.create(template.query(Person.class).all().collectList()).consumeNextWith(actual -> { + template.query(Person.class).all().collectList().as(StepVerifier::create).consumeNextWith(actual -> { assertThat(actual).containsExactlyInAnyOrder(han, luke); }).verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllWithCollection() { - StepVerifier.create(template.query(Human.class).inCollection(STAR_WARS).all()).expectNextCount(2).verifyComplete(); + void findAllWithCollection() { + template.query(Human.class).inCollection(STAR_WARS).all().as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); + } + + @Test // DATAMONGO-2323 + void findAllAsDocumentDocument() { + template.query(Document.class).inCollection(STAR_WARS).all().as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllWithProjection() { + void findAllWithProjection() { - StepVerifier.create(template.query(Person.class).as(Jedi.class).all().map(it -> it.getClass().getName())) // + template.query(Person.class).as(Jedi.class).all().map(it -> it.getClass().getName()).as(StepVerifier::create) // .expectNext(Jedi.class.getName(), Jedi.class.getName()) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllBy() { + void findAllBy() { + + template.query(Person.class).matching(query(where("firstname").is("luke"))).all().as(StepVerifier::create) // + .expectNext(luke) // + .verifyComplete(); + } + + @Test // DATAMONGO-2416 + void findAllByCriteria() { - StepVerifier.create(template.query(Person.class).matching(query(where("firstname").is("luke"))).all()) // + template.query(Person.class).matching(where("firstname").is("luke")).all().as(StepVerifier::create) // .expectNext(luke) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllByWithCollectionUsingMappingInformation() { + void findAllByWithCollectionUsingMappingInformation() { - StepVerifier - .create(template.query(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all()) - .consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // + template.query(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllByWithCollection() { + void findAllByWithCollection() { - StepVerifier - .create( - template.query(Human.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all()) - .expectNextCount(1) // + template.query(Human.class).inCollection(STAR_WARS).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).expectNextCount(1) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllByWithProjection() { + void findAllByWithProjection() { - StepVerifier - .create(template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).all()) - .consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // + template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void findAllByWithClosedInterfaceProjection() { + void findAllByWithClosedInterfaceProjection() { - StepVerifier.create( - template.query(Person.class).as(PersonProjection.class).matching(query(where("firstname").is("luke"))).all()) - .consumeNextWith(it -> { + template.query(Person.class).as(PersonProjection.class).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> { assertThat(it).isInstanceOf(PersonProjection.class); assertThat(it.getFirstname()).isEqualTo("luke"); @@ -169,10 +220,10 @@ public void findAllByWithClosedInterfaceProjection() { } @Test // DATAMONGO-1719 - public void findAllByWithOpenInterfaceProjection() { + void findAllByWithOpenInterfaceProjection() { - StepVerifier.create(template.query(Person.class).as(PersonSpELProjection.class) - .matching(query(where("firstname").is("luke"))).all()).consumeNextWith(it -> { + template.query(Person.class).as(PersonSpELProjection.class).matching(query(where("firstname").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(it -> { assertThat(it).isInstanceOf(PersonSpELProjection.class); assertThat(it.getName()).isEqualTo("luke"); @@ -181,30 +232,31 @@ public void findAllByWithOpenInterfaceProjection() { } @Test // DATAMONGO-1719 - public void findBy() { + void findBy() { - StepVerifier.create(template.query(Person.class).matching(query(where("firstname").is("luke"))).one()) + template.query(Person.class).matching(query(where("firstname").is("luke"))).one().as(StepVerifier::create) .expectNext(luke) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void findByNoMatch() { + void findByNoMatch() { - StepVerifier.create(template.query(Person.class).matching(query(where("firstname").is("spock"))).one()) + template.query(Person.class).matching(query(where("firstname").is("spock"))).one().as(StepVerifier::create) .verifyComplete(); } @Test // DATAMONGO-1719 - public void findByTooManyResults() { + void findByTooManyResults() { - StepVerifier.create(template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one()) + template.query(Person.class).matching(query(where("firstname").in("han", "luke"))).one().as(StepVerifier::create) .expectError(IncorrectResultSizeDataAccessException.class) // .verify(); } @Test // DATAMONGO-1719 - public void findAllNearBy() { + @DirtiesState + void findAllNearBy() { blocking.indexOps(Planet.class).ensureIndex( new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); @@ -215,7 +267,7 @@ public void findAllNearBy() { blocking.save(alderan); blocking.save(dantooine); - StepVerifier.create(template.query(Planet.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)).all()) + template.query(Planet.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)).all().as(StepVerifier::create) .consumeNextWith(actual -> { assertThat(actual.getDistance()).isNotNull(); }) // @@ -224,7 +276,8 @@ public void findAllNearBy() { } @Test // DATAMONGO-1719 - public void findAllNearByWithCollectionAndProjection() { + @DirtiesState + void findAllNearByWithCollectionAndProjection() { blocking.indexOps(Planet.class).ensureIndex( new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); @@ -235,8 +288,9 @@ public void findAllNearByWithCollectionAndProjection() { blocking.save(alderan); blocking.save(dantooine); - StepVerifier.create(template.query(Object.class).inCollection(STAR_WARS).as(Human.class) - .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all()).consumeNextWith(actual -> { + template.query(Object.class).inCollection(STAR_WARS).as(Human.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all().as(StepVerifier::create) + .consumeNextWith(actual -> { assertThat(actual.getDistance()).isNotNull(); assertThat(actual.getContent()).isInstanceOf(Human.class); assertThat(actual.getContent().getId()).isEqualTo("alderan"); @@ -246,7 +300,8 @@ public void findAllNearByWithCollectionAndProjection() { } @Test // DATAMONGO-1719 - public void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { + @DirtiesState + void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { blocking.indexOps(Planet.class).ensureIndex( new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); @@ -257,8 +312,8 @@ public void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() blocking.save(alderan); blocking.save(dantooine); - StepVerifier.create(template.query(Planet.class).as(PlanetProjection.class) - .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all()).consumeNextWith(it -> { + template.query(Planet.class).as(PlanetProjection.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)).all() + .as(StepVerifier::create).consumeNextWith(it -> { assertThat(it.getDistance()).isNotNull(); assertThat(it.getContent()).isInstanceOf(PlanetProjection.class); @@ -269,7 +324,8 @@ public void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() } @Test // DATAMONGO-1719 - public void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { + @DirtiesState + void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { blocking.indexOps(Planet.class).ensureIndex( new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); @@ -280,8 +336,8 @@ public void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { blocking.save(alderan); blocking.save(dantooine); - StepVerifier.create(template.query(Planet.class).as(PlanetSpELProjection.class) - .near(NearQuery.near(-73.9667, 40.78).spherical(true)).all()).consumeNextWith(it -> { + template.query(Planet.class).as(PlanetSpELProjection.class).near(NearQuery.near(-73.9667, 40.78).spherical(true)) + .all().as(StepVerifier::create).consumeNextWith(it -> { assertThat(it.getDistance()).isNotNull(); assertThat(it.getContent()).isInstanceOf(PlanetSpELProjection.class); @@ -291,64 +347,143 @@ public void findAllNearByReturningGeoResultContentAsOpenInterfaceProjection() { .verifyComplete(); } + @Test // DATAMONGO-2080 + @ProvidesState + void tail() throws InterruptedException { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + BlockingQueue collector = new LinkedBlockingQueue<>(); + Flux tail = template.query(Person.class) + .matching(query(new Criteria().orOperator(where("firstname").is("chewbacca"), where("firstname").is("luke")))) + .tail().doOnNext(collector::add); + + Disposable subscription = tail.subscribe(); + + assertThat(collector.poll(1, TimeUnit.SECONDS)).isEqualTo(luke); + assertThat(collector).isEmpty(); + + Person chewbacca = new Person(); + chewbacca.firstname = "chewbacca"; + chewbacca.lastname = "chewie"; + chewbacca.id = "id-3"; + + blocking.save(chewbacca); + + assertThat(collector.poll(1, TimeUnit.SECONDS)).isEqualTo(chewbacca); + + subscription.dispose(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tailWithProjection() { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + template.query(Person.class).as(Jedi.class).matching(query(where("firstname").is("luke"))).tail() + .as(StepVerifier::create) // + .consumeNextWith(it -> assertThat(it).isInstanceOf(Jedi.class)) // + .thenCancel() // + .verify(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tailWithClosedInterfaceProjection() { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + template.query(Person.class).as(PersonProjection.class).matching(query(where("firstname").is("luke"))).tail() + .as(StepVerifier::create) // + .consumeNextWith(it -> { + + assertThat(it).isInstanceOf(PersonProjection.class); + assertThat(it.getFirstname()).isEqualTo("luke"); + }) // + .thenCancel() // + .verify(); + } + + @Test // DATAMONGO-2080 + @ProvidesState + void tailWithOpenInterfaceProjection() { + + recreateCollection(STAR_WARS, true); + insertObjects(); + + template.query(Person.class).as(PersonSpELProjection.class).matching(query(where("firstname").is("luke"))).tail() + .as(StepVerifier::create) // + .consumeNextWith(it -> { + + assertThat(it).isInstanceOf(PersonSpELProjection.class); + assertThat(it.getName()).isEqualTo("luke"); + }) // + .thenCancel() // + .verify(); + } + @Test // DATAMONGO-1719 - public void firstShouldReturnFirstEntryInCollection() { - StepVerifier.create(template.query(Person.class).first()).expectNextCount(1).verifyComplete(); + void firstShouldReturnFirstEntryInCollection() { + template.query(Person.class).first().as(StepVerifier::create).expectNextCount(1).verifyComplete(); } @Test // DATAMONGO-1719 - public void countShouldReturnNrOfElementsInCollectionWhenNoQueryPresent() { - StepVerifier.create(template.query(Person.class).count()).expectNext(2L).verifyComplete(); + void countShouldReturnNrOfElementsInCollectionWhenNoQueryPresent() { + template.query(Person.class).count().as(StepVerifier::create).expectNext(2L).verifyComplete(); } @Test // DATAMONGO-1719 - public void countShouldReturnNrOfElementsMatchingQuery() { + void countShouldReturnNrOfElementsMatchingQuery() { - StepVerifier - .create(template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).count()) - .expectNext(1L) // + template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).count() + .as(StepVerifier::create).expectNext(1L) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void existsShouldReturnTrueIfAtLeastOneElementExistsInCollection() { - StepVerifier.create(template.query(Person.class).exists()).expectNext(true).verifyComplete(); + void existsShouldReturnTrueIfAtLeastOneElementExistsInCollection() { + template.query(Person.class).exists().as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1719 - public void existsShouldReturnFalseIfNoElementExistsInCollection() { + @DirtiesState + void existsShouldReturnFalseIfNoElementExistsInCollection() { blocking.remove(new BasicQuery("{}"), STAR_WARS); - StepVerifier.create(template.query(Person.class).exists()).expectNext(false).verifyComplete(); + template.query(Person.class).exists().as(StepVerifier::create).expectNext(false).verifyComplete(); } @Test // DATAMONGO-1719 - public void existsShouldReturnTrueIfAtLeastOneElementMatchesQuery() { + void existsShouldReturnTrueIfAtLeastOneElementMatchesQuery() { - StepVerifier - .create(template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).exists()) - .expectNext(true) // + template.query(Person.class).matching(query(where("firstname").is(luke.getFirstname()))).exists() + .as(StepVerifier::create).expectNext(true) // .verifyComplete(); } @Test // DATAMONGO-1719 - public void existsShouldReturnFalseWhenNoElementMatchesQuery() { + void existsShouldReturnFalseWhenNoElementMatchesQuery() { - StepVerifier.create(template.query(Person.class).matching(query(where("firstname").is("spock"))).exists()) + template.query(Person.class).matching(query(where("firstname").is("spock"))).exists().as(StepVerifier::create) .expectNext(false) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsEmptyListIfNoMatchFound() { + void distinctReturnsEmptyListIfNoMatchFound() { - StepVerifier.create(template.query(Person.class).distinct("actually-not-property-in-use").as(String.class).all()) - .verifyComplete(); + template.query(Person.class).distinct("actually-not-property-in-use").as(String.class).all() + .as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTypeSpecifiedThatCanBeConvertedDirectlyByACodec() { + @DirtiesState + void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTypeSpecifiedThatCanBeConvertedDirectlyByACodec() { Person anakin = new Person(); anakin.firstname = "anakin"; @@ -356,13 +491,14 @@ public void distinctReturnsSimpleFieldValuesCorrectlyForCollectionHavingReturnTy blocking.save(anakin); - StepVerifier.create(template.query(Person.class).distinct("lastname").as(String.class).all()) + template.query(Person.class).distinct("lastname").as(String.class).all().as(StepVerifier::create) .assertNext(in("solo", "skywalker")).assertNext(in("solo", "skywalker")) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsSimpleFieldValuesCorrectly() { + @DirtiesState + void distinctReturnsSimpleFieldValuesCorrectly() { Person anakin = new Person(); anakin.firstname = "anakin"; @@ -382,7 +518,7 @@ public void distinctReturnsSimpleFieldValuesCorrectly() { Consumer containedInAbilities = in(anakin.ability, padme.ability, jaja.ability); - StepVerifier.create(template.query(Person.class).distinct("ability").all()) // + template.query(Person.class).distinct("ability").all().as(StepVerifier::create) // .assertNext(containedInAbilities) // .assertNext(containedInAbilities) // .assertNext(containedInAbilities) // @@ -390,7 +526,8 @@ public void distinctReturnsSimpleFieldValuesCorrectly() { } @Test // DATAMONGO-1761 - public void distinctReturnsComplexValuesCorrectly() { + @DirtiesState + void distinctReturnsComplexValuesCorrectly() { Sith sith = new Sith(); sith.rank = "lord"; @@ -401,13 +538,14 @@ public void distinctReturnsComplexValuesCorrectly() { blocking.save(anakin); - StepVerifier.create(template.query(Person.class).distinct("ability").all()) // + template.query(Person.class).distinct("ability").all().as(StepVerifier::create) // .expectNext(anakin.ability) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { + @DirtiesState + void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { Sith sith = new Sith(); sith.rank = "lord"; @@ -418,13 +556,14 @@ public void distinctReturnsComplexValuesCorrectlyHavingReturnTypeSpecified() { blocking.save(anakin); - StepVerifier.create(template.query(Person.class).distinct("ability").as(Sith.class).all()) // + template.query(Person.class).distinct("ability").as(Sith.class).all().as(StepVerifier::create) // .expectNext(sith) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsComplexValuesCorrectlyReturnTypeDocumentSpecified() { + @DirtiesState + void distinctReturnsComplexValuesCorrectlyReturnTypeDocumentSpecified() { Sith sith = new Sith(); sith.rank = "lord"; @@ -435,67 +574,71 @@ public void distinctReturnsComplexValuesCorrectlyReturnTypeDocumentSpecified() { blocking.save(anakin); - StepVerifier.create(template.query(Person.class).distinct("ability").as(Document.class).all()) + template.query(Person.class).distinct("ability").as(Document.class).all().as(StepVerifier::create) .expectNext(new Document("rank", "lord").append("_class", Sith.class.getName())) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctMapsFieldNameCorrectly() { + void distinctMapsFieldNameCorrectly() { - StepVerifier.create(template.query(Jedi.class).inCollection(STAR_WARS).distinct("name").as(String.class).all()) + template.query(Jedi.class).inCollection(STAR_WARS).distinct("name").as(String.class).all().as(StepVerifier::create) .assertNext(in("han", "luke")).assertNext(in("han", "luke")) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsRawValuesIfReturnTypeIsBsonValue() { + void distinctReturnsRawValuesIfReturnTypeIsBsonValue() { Consumer inValues = in(new BsonString("solo"), new BsonString("skywalker")); - StepVerifier.create(template.query(Person.class).distinct("lastname").as(BsonValue.class).all()) + template.query(Person.class).distinct("lastname").as(BsonValue.class).all().as(StepVerifier::create) .assertNext(inValues) // .assertNext(inValues) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefinedByTheDomainType() { + @DirtiesState + void distinctReturnsValuesMappedToTheirJavaTypeEvenWhenNotExplicitlyDefinedByTheDomainType() { blocking.save(new Document("darth", "vader"), STAR_WARS); - StepVerifier.create(template.query(Person.class).distinct("darth").all()) // + template.query(Person.class).distinct("darth").all().as(StepVerifier::create) // .expectNext("vader") // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsMappedDomainTypeForProjections() { + @DirtiesState + void distinctReturnsMappedDomainTypeForProjections() { luke.father = new Person(); luke.father.firstname = "anakin"; blocking.save(luke); - StepVerifier.create(template.query(Person.class).distinct("father").as(Jedi.class).all()) + template.query(Person.class).distinct("father").as(Jedi.class).all().as(StepVerifier::create) .expectNext(new Jedi("anakin")) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctAlllowsQueryUsingObjectSourceType() { + @DirtiesState + void distinctAlllowsQueryUsingObjectSourceType() { luke.father = new Person(); luke.father.firstname = "anakin"; blocking.save(luke); - StepVerifier.create(template.query(Object.class).inCollection(STAR_WARS).distinct("father").as(Jedi.class).all()) - .expectNext(new Jedi("anakin")) // + template.query(Object.class).inCollection(STAR_WARS).distinct("father").as(Jedi.class).all() + .as(StepVerifier::create).expectNext(new Jedi("anakin")) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTypePresent() { + @DirtiesState + void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTypePresent() { luke.father = new Person(); luke.father.firstname = "anakin"; @@ -505,22 +648,32 @@ public void distinctReturnsMappedDomainTypeExtractedFromPropertyWhenNoExplicitTy Person expected = new Person(); expected.firstname = luke.father.firstname; - StepVerifier.create(template.query(Person.class).distinct("father").all()) // + template.query(Person.class).distinct("father").all().as(StepVerifier::create) // .expectNext(expected) // .verifyComplete(); } @Test // DATAMONGO-1761 - public void distinctThrowsExceptionWhenExplicitMappingTypeCannotBeApplied() { + void distinctThrowsExceptionWhenExplicitMappingTypeCannotBeApplied() { - StepVerifier.create(template.query(Person.class).distinct("firstname").as(Long.class).all()) + template.query(Person.class).distinct("firstname").as(Long.class).all().as(StepVerifier::create) .expectError(InvalidDataAccessApiUsageException.class) // .verify(); } + @Test // DATAMONGO-2507 + void distinctAppliesFilterQuery() { + + template.query(Person.class).inCollection(STAR_WARS).distinct("firstname") // + .matching(where("lastname").is(luke.lastname)) // + .as(String.class) // + .all() // + .as(StepVerifier::create).consumeNextWith(it -> assertThat(it).isEqualTo("luke")) // + .verifyComplete(); + } + interface Contact {} - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person implements Contact { @@ -529,6 +682,71 @@ static class Person implements Contact { String lastname; Object ability; Person father; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(ability, person.ability) + && Objects.equals(father, person.father); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, ability, father); + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + this.getFather() + + ")"; + } } interface PersonProjection { @@ -541,32 +759,128 @@ public interface PersonSpELProjection { String getName(); } - @Data static class Human { + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Human(id=" + this.getId() + ")"; + } } - @Data - @NoArgsConstructor - @AllArgsConstructor static class Jedi { @Field("firstname") String name; + + public Jedi() {} + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } } - @Data static class Sith { String rank; + + public String getRank() { + return this.rank; + } + + public void setRank(String rank) { + this.rank = rank; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sith sith = (Sith) o; + return Objects.equals(rank, sith.rank); + } + + @Override + public int hashCode() { + return Objects.hash(rank); + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Sith(rank=" + this.getRank() + ")"; + } } - @Data - @AllArgsConstructor @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Planet { @Id String name; Point coordinates; + + public Planet(String name, Point coordinates) { + this.name = name; + this.coordinates = coordinates; + } + + public String getName() { + return this.name; + } + + public Point getCoordinates() { + return this.coordinates; + } + + public void setName(String name) { + this.name = name; + } + + public void setCoordinates(Point coordinates) { + this.coordinates = coordinates; + } + + public String toString() { + return "ReactiveFindOperationSupportTests.Planet(name=" + this.getName() + ", coordinates=" + + this.getCoordinates() + ")"; + } } interface PlanetProjection { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java index 52541efaad..b417430934 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupportUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,21 +16,18 @@ package org.springframework.data.mongodb.core; import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.anyList; - -import lombok.Data; import java.util.Arrays; +import java.util.Objects; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.annotation.Id; /** @@ -38,21 +35,19 @@ * * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class ReactiveInsertOperationSupportUnitTests { private static final String STAR_WARS = "star-wars"; @Mock ReactiveMongoTemplate template; - ReactiveInsertOperationSupport ops; - - Person luke, han; + private ReactiveInsertOperationSupport ops; - @Before - public void setUp() { + private Person luke, han; - when(template.determineCollectionName(any(Class.class))).thenReturn(STAR_WARS); + @BeforeEach + void setUp() { ops = new ReactiveInsertOperationSupport(template); @@ -65,46 +60,88 @@ public void setUp() { han.id = "id-2"; } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void nullCollectionShouldThrowException() { - ops.insert(Person.class).inCollection(null); + @Test // DATAMONGO-1719 + void nullCollectionShouldThrowException() { + assertThatIllegalArgumentException().isThrownBy(() -> ops.insert(Person.class).inCollection(null)); } @Test // DATAMONGO-1719 - public void insertShouldUseDerivedCollectionName() { + void insertShouldUseDerivedCollectionName() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); ops.insert(Person.class).one(luke); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).determineCollectionName(captor.capture()); + verify(template).getCollectionName(captor.capture()); verify(template).insert(eq(luke), eq(STAR_WARS)); assertThat(captor.getAllValues()).containsExactly(Person.class); } @Test // DATAMONGO-1719 - public void insertShouldUseExplicitCollectionName() { + void insertShouldUseExplicitCollectionName() { ops.insert(Person.class).inCollection(STAR_WARS).one(luke); - verify(template, never()).determineCollectionName(any(Class.class)); + verify(template, never()).getCollectionName(any(Class.class)); verify(template).insert(eq(luke), eq(STAR_WARS)); } @Test // DATAMONGO-1719 - public void insertCollectionShouldDelegateCorrectly() { + void insertCollectionShouldDelegateCorrectly() { + + when(template.getCollectionName(any(Class.class))).thenReturn(STAR_WARS); ops.insert(Person.class).all(Arrays.asList(luke, han)); - verify(template).determineCollectionName(any(Class.class)); + verify(template).getCollectionName(any(Class.class)); verify(template).insert(anyList(), eq(STAR_WARS)); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person { + @Id String id; String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveInsertOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupportUnitTests.java new file mode 100644 index 0000000000..609a456912 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupportUnitTests.java @@ -0,0 +1,237 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link ReactiveMapReduceOperationSupport}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Beyond the Shadows - Brent Weeks + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveMapReduceOperationSupportUnitTests { + + private static final String STAR_WARS = "star-wars"; + private static final String MAP_FUNCTION = "function() { emit(this.id, this.firstname) }"; + private static final String REDUCE_FUNCTION = "function(id, name) { return sum(id, name); }"; + + @Mock ReactiveMongoTemplate template; + + private ReactiveMapReduceOperationSupport mapReduceOpsSupport; + + @BeforeEach + void setUp() { + mapReduceOpsSupport = new ReactiveMapReduceOperationSupport(template); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullTemplate() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMapReduceOperationSupport(null)); + } + + @Test // DATAMONGO-1929 + void throwsExceptionOnNullDomainType() { + assertThatIllegalArgumentException().isThrownBy(() -> mapReduceOpsSupport.mapReduce(null)); + } + + @Test // DATAMONGO-1929 + void usesExtractedCollectionName() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-1929 + void usesExplicitCollectionName() { + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .inCollection("the-night-angel").all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq("the-night-angel"), eq(Person.class), + eq(MAP_FUNCTION), eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-1929 + void usesMapReduceOptionsWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + MapReduceOptions options = MapReduceOptions.options(); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).with(options).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), eq(options)); + } + + @Test // DATAMONGO-1929 + void usesQueryWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + Query query = new BasicQuery("{ 'lastname' : 'skywalker' }"); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).matching(query).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-2416 + void usesCriteriaWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + Query query = Query.query(where("lastname").is("skywalker")); + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION) + .matching(where("lastname").is("skywalker")).all(); + + verify(template).mapReduce(eq(query), eq(Person.class), eq(STAR_WARS), eq(Person.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + @Test // DATAMONGO-1929 + void usesProjectionWhenPresent() { + + when(template.getCollectionName(eq(Person.class))).thenReturn(STAR_WARS); + + mapReduceOpsSupport.mapReduce(Person.class).map(MAP_FUNCTION).reduce(REDUCE_FUNCTION).as(Jedi.class).all(); + + verify(template).mapReduce(any(Query.class), eq(Person.class), eq(STAR_WARS), eq(Jedi.class), eq(MAP_FUNCTION), + eq(REDUCE_FUNCTION), isNull()); + } + + interface Contact {} + + @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) + static class Person implements Contact { + + @Id String id; + String firstname; + String lastname; + Object ability; + Person father; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Object getAbility() { + return this.ability; + } + + public Person getFather() { + return this.father; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAbility(Object ability) { + this.ability = ability; + } + + public void setFather(Person father) { + this.father = father; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname) && Objects.equals(ability, person.ability) + && Objects.equals(father, person.father); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, ability, father); + } + + public String toString() { + return "ReactiveMapReduceOperationSupportUnitTests.Person(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", ability=" + this.getAbility() + ", father=" + + this.getFather() + ")"; + } + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public Jedi(String name) { + this.name = name; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public String toString() { + return "ReactiveMapReduceOperationSupportUnitTests.Jedi(name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java index 7b50ab6544..effdc931df 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateCollationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,66 +21,71 @@ import reactor.core.publisher.Mono; import reactor.test.StepVerifier; +import java.util.Collections; import java.util.List; +import java.util.Set; import org.bson.Document; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; import com.mongodb.reactivestreams.client.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; /** * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class ReactiveMongoTemplateCollationTests { - public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_4_0 = MongoVersionRule.atLeast(Version.parse("3.4.0")); public static final String COLLECTION_NAME = "collation-1"; + static @Client MongoClient mongoClient; @Configuration static class Config extends AbstractReactiveMongoConfiguration { @Override public MongoClient reactiveMongoClient() { - return MongoClients.create(); + return mongoClient; } @Override protected String getDatabaseName() { return "collation-tests"; } + + @Override + protected Set> getInitialEntitySet() { + return Collections.emptySet(); + } } @Autowired ReactiveMongoTemplate template; - @Before + @BeforeEach public void setUp() { - StepVerifier.create(template.dropCollection(COLLECTION_NAME)).verifyComplete(); + template.dropCollection(COLLECTION_NAME).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1693 public void createCollectionWithCollation() { - StepVerifier.create(template.createCollection(COLLECTION_NAME, CollectionOptions.just(Collation.of("en_US")))) // + template.createCollection(COLLECTION_NAME, CollectionOptions.just(Collation.of("en_US"))).as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); Mono collation = getCollationInfo(COLLECTION_NAME); - StepVerifier.create(collation) // + collation.as(StepVerifier::create) // .consumeNextWith(document -> assertThat(document.get("locale")).isEqualTo("en_US")) // .verifyComplete(); @@ -98,10 +103,9 @@ private Mono getCollectionInfo(String collectionName) { return template.execute(db -> { - return Flux - .from(db.runCommand(new Document() // - .append("listCollections", 1) // - .append("filter", new Document("name", collectionName)))) // + return Flux.from(db.runCommand(new Document() // + .append("listCollections", 1) // + .append("filter", new Document("name", collectionName)))) // .map(it -> it.get("cursor", Document.class)) .flatMapIterable(it -> (List) it.get("firstBatch", List.class)); }).next(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java index 26ce47c793..3bf9035a44 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateExecuteTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.data.Offset.offset; import static org.junit.Assume.*; import reactor.core.publisher.Flux; @@ -24,16 +24,15 @@ import org.bson.Document; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.util.Version; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; import com.mongodb.MongoException; import com.mongodb.ReadPreference; @@ -45,14 +44,12 @@ * * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:reactive-infrastructure.xml") public class ReactiveMongoTemplateExecuteTests { private static final Version THREE = Version.parse("3.0"); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Autowired SimpleReactiveMongoDatabaseFactory factory; @Autowired ReactiveMongoOperations operations; @@ -66,7 +63,7 @@ public void setUp() { .mergeWith(operations.dropCollection("execute_test1")) // .mergeWith(operations.dropCollection("execute_test2")); - StepVerifier.create(cleanup).verifyComplete(); + cleanup.as(StepVerifier::create).verifyComplete(); if (mongoVersion == null) { mongoVersion = operations.executeCommand("{ buildInfo: 1 }") // @@ -79,18 +76,18 @@ public void setUp() { @Test // DATAMONGO-1444 public void executeCommandJsonCommandShouldReturnSingleResponse() { - StepVerifier.create(operations.executeCommand("{ buildInfo: 1 }")).consumeNextWith(actual -> { + operations.executeCommand("{ buildInfo: 1 }").as(StepVerifier::create).consumeNextWith(actual -> { - assertThat(actual, hasKey("version")); + assertThat(actual).containsKey("version"); }).verifyComplete(); } @Test // DATAMONGO-1444 public void executeCommandDocumentCommandShouldReturnSingleResponse() { - StepVerifier.create(operations.executeCommand(new Document("buildInfo", 1))).consumeNextWith(actual -> { + operations.executeCommand(new Document("buildInfo", 1)).as(StepVerifier::create).consumeNextWith(actual -> { - assertThat(actual, hasKey("version")); + assertThat(actual).containsKey("version"); }).verifyComplete(); } @@ -99,14 +96,14 @@ public void executeCommandJsonCommandShouldReturnMultipleResponses() { assumeTrue(mongoVersion.isGreaterThan(THREE)); - StepVerifier.create(operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}")) + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").as(StepVerifier::create) .expectNextCount(1).verifyComplete(); - StepVerifier.create(operations.executeCommand("{ find: 'execute_test'}")) // + operations.executeCommand("{ find: 'execute_test'}").as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.get("ok", Double.class), is(closeTo(1D, 0D))); - assertThat(actual, hasKey("cursor")); + assertThat(actual.get("ok", Double.class)).isCloseTo(1D, offset(0D)); + assertThat(actual).containsKey("cursor"); }) // .verifyComplete(); } @@ -114,7 +111,7 @@ public void executeCommandJsonCommandShouldReturnMultipleResponses() { @Test // DATAMONGO-1444 public void executeCommandJsonCommandShouldTranslateExceptions() { - StepVerifier.create(operations.executeCommand("{ unknown: 1 }")) // + operations.executeCommand("{ unknown: 1 }").as(StepVerifier::create) // .expectError(InvalidDataAccessApiUsageException.class) // .verify(); } @@ -122,7 +119,7 @@ public void executeCommandJsonCommandShouldTranslateExceptions() { @Test // DATAMONGO-1444 public void executeCommandDocumentCommandShouldTranslateExceptions() { - StepVerifier.create(operations.executeCommand(new Document("unknown", 1))) // + operations.executeCommand(new Document("unknown", 1)).as(StepVerifier::create) // .expectError(InvalidDataAccessApiUsageException.class) // .verify(); @@ -131,7 +128,7 @@ public void executeCommandDocumentCommandShouldTranslateExceptions() { @Test // DATAMONGO-1444 public void executeCommandWithReadPreferenceCommandShouldTranslateExceptions() { - StepVerifier.create(operations.executeCommand(new Document("unknown", 1), ReadPreference.nearest())) // + operations.executeCommand(new Document("unknown", 1), ReadPreference.nearest()).as(StepVerifier::create) // .expectError(InvalidDataAccessApiUsageException.class) // .verify(); } @@ -143,11 +140,11 @@ public void executeOnDatabaseShouldExecuteCommand() { .mergeWith(operations.executeCommand("{ insert: 'execute_test1', documents: [{},{},{}]}")) .mergeWith(operations.executeCommand("{ insert: 'execute_test2', documents: [{},{},{}]}")); - StepVerifier.create(documentFlux).expectNextCount(3).verifyComplete(); + documentFlux.as(StepVerifier::create).expectNextCount(3).verifyComplete(); Flux execute = operations.execute(MongoDatabase::listCollections); - StepVerifier.create(execute.filter(document -> document.getString("name").startsWith("execute_test"))) // + execute.filter(document -> document.getString("name").startsWith("execute_test")).as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); } @@ -169,27 +166,28 @@ public void executeOnDatabaseShouldShouldTranslateExceptions() { throw new MongoException(50, "hi there"); }); - StepVerifier.create(execute).expectError(UncategorizedMongoDbException.class).verify(); + execute.as(StepVerifier::create).expectError(UncategorizedMongoDbException.class).verify(); } @Test // DATAMONGO-1444 public void executeOnCollectionWithTypeShouldReturnFindResults() { - StepVerifier.create(operations.executeCommand("{ insert: 'person', documents: [{},{},{}]}")) // + operations.executeCommand("{ insert: 'person', documents: [{},{},{}]}").as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(operations.execute(Person.class, MongoCollection::find)).expectNextCount(3).verifyComplete(); + operations.execute(Person.class, MongoCollection::find).as(StepVerifier::create).expectNextCount(3) + .verifyComplete(); } @Test // DATAMONGO-1444 public void executeOnCollectionWithNameShouldReturnFindResults() { - StepVerifier.create(operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}")) // + operations.executeCommand("{ insert: 'execute_test', documents: [{},{},{}]}").as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(operations.execute("execute_test", MongoCollection::find)) // + operations.execute("execute_test", MongoCollection::find).as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java index 78e8746834..75b38390cb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateIndexTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,59 +15,73 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.data.Index.atIndex; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import lombok.Data; import reactor.core.publisher.Flux; import reactor.test.StepVerifier; -import java.util.List; +import java.time.Duration; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; import org.bson.Document; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.RepeatFailedTest; +import org.springframework.dao.DataIntegrityViolationException; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.mongodb.reactivestreams.client.ListIndexesPublisher; +import org.springframework.data.mongodb.core.index.Indexed; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoClient; import com.mongodb.reactivestreams.client.MongoCollection; /** - * Integration test for {@link MongoTemplate}. + * Integration test for index creation via {@link ReactiveMongoTemplate}. * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:reactive-infrastructure.xml") +@ExtendWith(MongoClientExtension.class) public class ReactiveMongoTemplateIndexTests { - @Rule public ExpectedException thrown = ExpectedException.none(); + private static @Client MongoClient client; + + private SimpleReactiveMongoDatabaseFactory factory; + private ReactiveMongoTemplate template; + + @BeforeEach + void setUp() { - @Autowired SimpleReactiveMongoDatabaseFactory factory; - @Autowired ReactiveMongoTemplate template; + factory = new SimpleReactiveMongoDatabaseFactory(client, "reactive-template-index-tests"); + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setAutoIndexCreation(true); + template = new ReactiveMongoTemplate(factory, new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); - @Before - public void setUp() { - StepVerifier.create(template.dropCollection(Person.class)).verifyComplete(); + MongoTestUtils.dropCollectionNow("reactive-template-index-tests", "person", client); + MongoTestUtils.dropCollectionNow("reactive-template-index-tests", "indexfail", client); + MongoTestUtils.dropCollectionNow("reactive-template-index-tests", "indexedSample", client); } - @After - public void cleanUp() {} + @AfterEach + void cleanUp() {} - @Test // DATAMONGO-1444 - public void testEnsureIndexShouldCreateIndex() { + @RepeatFailedTest(3) // DATAMONGO-1444 + void testEnsureIndexShouldCreateIndex() { Person p1 = new Person("Oliver"); p1.setAge(25); @@ -76,106 +90,152 @@ public void testEnsureIndexShouldCreateIndex() { p2.setAge(40); template.insert(p2); - StepVerifier - .create(template.indexOps(Person.class) // - .ensureIndex(new Index().on("age", Direction.DESC).unique())) // + template.indexOps(Person.class) // + .ensureIndex(new Index().on("age", Direction.DESC).unique()) // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - MongoCollection coll = template.getCollection(template.getCollectionName(Person.class)); - StepVerifier.create(Flux.from(coll.listIndexes()).collectList()).consumeNextWith(indexInfo -> { - - assertThat(indexInfo.size(), is(2)); - Object indexKey = null; - boolean unique = false; - for (Document ix : indexInfo) { - - if ("age_-1".equals(ix.get("name"))) { - indexKey = ix.get("key"); - unique = (Boolean) ix.get("unique"); - } - } - assertThat(((Document) indexKey), hasEntry("age", -1)); - assertThat(unique, is(true)); - }).verifyComplete(); + template.getCollection(template.getCollectionName(Person.class)).flatMapMany(MongoCollection::listIndexes) + .collectList() // + .as(StepVerifier::create) // + .consumeNextWith(indexInfo -> { + + assertThat(indexInfo).hasSize(2); + Object indexKey = null; + boolean unique = false; + for (Document ix : indexInfo) { + + if ("age_-1".equals(ix.get("name"))) { + indexKey = ix.get("key"); + unique = (Boolean) ix.get("unique"); + } + } + assertThat((Document) indexKey).containsEntry("age", -1); + assertThat(unique).isTrue(); + }).verifyComplete(); } - @Test // DATAMONGO-1444 - public void getIndexInfoShouldReturnCorrectIndex() { + @RepeatFailedTest(3) // DATAMONGO-1444 + void getIndexInfoShouldReturnCorrectIndex() { Person p1 = new Person("Oliver"); p1.setAge(25); - StepVerifier.create(template.insert(p1)).expectNextCount(1).verifyComplete(); - - StepVerifier - .create(template.indexOps(Person.class) // - .ensureIndex(new Index().on("age", Direction.DESC).unique())) // + template.insert(p1) // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(template.indexOps(Person.class).getIndexInfo().collectList()).consumeNextWith(indexInfos -> { + template.indexOps(Person.class) // + .ensureIndex(new Index().on("age", Direction.DESC).unique()) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - assertThat(indexInfos.size(), is(2)); + template.indexOps(Person.class).getIndexInfo().collectList() // + .as(StepVerifier::create) // + .consumeNextWith(indexInfos -> { - IndexInfo ii = indexInfos.get(1); - assertThat(ii.isUnique(), is(true)); - assertThat(ii.isSparse(), is(false)); + assertThat(indexInfos).hasSize(2); - List indexFields = ii.getIndexFields(); - IndexField field = indexFields.get(0); + IndexInfo ii = indexInfos.get(1); + assertThat(ii.isUnique()).isTrue(); + assertThat(ii.isSparse()).isFalse(); - assertThat(field, is(IndexField.create("age", Direction.DESC))); - }).verifyComplete(); + assertThat(ii.getIndexFields()).contains(IndexField.create("age", Direction.DESC), atIndex(0)); + }).verifyComplete(); } - @Test // DATAMONGO-1444 - public void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() { + @RepeatFailedTest(3) // DATAMONGO-1444, DATAMONGO-2264 + void testReadIndexInfoForIndicesCreatedViaMongoShellCommands() { - String command = "db." + template.getCollectionName(Person.class) - + ".createIndex({'age':-1}, {'unique':true, 'sparse':true}), 1"; - StepVerifier.create(template.indexOps(Person.class).dropAllIndexes()).verifyComplete(); + template.indexOps(Person.class).dropAllIndexes() // + .as(StepVerifier::create) // + .verifyComplete(); - StepVerifier.create(template.indexOps(Person.class).getIndexInfo()).verifyComplete(); + template.indexOps(Person.class).getIndexInfo() // + .as(StepVerifier::create) // + .verifyComplete(); - StepVerifier.create(factory.getMongoDatabase().runCommand(new org.bson.Document("eval", command))) // + factory.getMongoDatabase() // + .flatMapMany(db -> db.getCollection(template.getCollectionName(Person.class)) + .createIndex(new Document("age", -1), new IndexOptions().unique(true).sparse(true))) + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - ListIndexesPublisher listIndexesPublisher = template - .getCollection(template.getCollectionName(Person.class)).listIndexes(); - - StepVerifier.create(Flux.from(listIndexesPublisher).collectList()).consumeNextWith(indexInfos -> { + template.getCollection(template.getCollectionName(Person.class)).flatMapMany(MongoCollection::listIndexes) + .collectList() // + .as(StepVerifier::create) // + .consumeNextWith(indexInfos -> { - Document indexKey = null; - boolean unique = false; + Document indexKey = null; + boolean unique = false; - for (Document document : indexInfos) { + for (Document document : indexInfos) { - if ("age_-1".equals(document.get("name"))) { - indexKey = (org.bson.Document) document.get("key"); - unique = (Boolean) document.get("unique"); - } - } + if ("age_-1".equals(document.get("name"))) { + indexKey = (org.bson.Document) document.get("key"); + unique = (Boolean) document.get("unique"); + } + } - assertThat(indexKey, hasEntry("age", -1D)); - assertThat(unique, is(true)); - }).verifyComplete(); + assertThat(indexKey).containsEntry("age", -1); + assertThat(unique).isTrue(); + }).verifyComplete(); - StepVerifier.create(Flux.from(template.indexOps(Person.class).getIndexInfo().collectList())) + Flux.from(template.indexOps(Person.class).getIndexInfo().collectList()) // + .as(StepVerifier::create) // .consumeNextWith(indexInfos -> { IndexInfo info = indexInfos.get(1); - assertThat(info.isUnique(), is(true)); - assertThat(info.isSparse(), is(true)); - - List indexFields = info.getIndexFields(); - IndexField field = indexFields.get(0); + assertThat(info.isUnique()).isTrue(); + assertThat(info.isSparse()).isTrue(); - assertThat(field, is(IndexField.create("age", Direction.DESC))); + assertThat(info.getIndexFields()).contains(IndexField.create("age", Direction.DESC), atIndex(0)); }).verifyComplete(); } - @Data + @RepeatFailedTest(3) // DATAMONGO-1928 + void shouldCreateIndexOnAccess() { + + template.getCollection("indexedSample").flatMapMany(it -> it.listIndexes(Document.class)) // + .as(StepVerifier::create) // + .expectNextCount(0) // + .verifyComplete(); + + template.findAll(IndexedSample.class).defaultIfEmpty(new IndexedSample()) // + .delayElements(Duration.ofMillis(500)) // TODO: check if 4.2.0 server GA still requires this timeout + .then() + .as(StepVerifier::create) // + .verifyComplete(); + + template.getCollection("indexedSample").flatMapMany(it -> it.listIndexes(Document.class)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @RepeatFailedTest(3) // DATAMONGO-1928, DATAMONGO-2264 + void indexCreationShouldFail() throws InterruptedException { + + factory.getMongoDatabase() // + .flatMapMany(db -> db.getCollection("indexfail") // + .createIndex(new Document("field", 1), new IndexOptions().name("foo").unique(true).sparse(true))) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + BlockingQueue queue = new LinkedBlockingQueue<>(); + ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory, this.template.getConverter(), queue::add); + + template.findAll(IndexCreationShouldFail.class).subscribe(); + + Throwable failure = queue.poll(10, TimeUnit.SECONDS); + + assertThat(failure).isNotNull().isInstanceOf(DataIntegrityViolationException.class); + } + static class Sample { @Id String id; @@ -187,5 +247,80 @@ public Sample(String id, String field) { this.id = id; this.field = field; } + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + public String toString() { + return "ReactiveMongoTemplateIndexTests.Sample(id=" + this.getId() + ", field=" + this.getField() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document + static class IndexedSample { + + @Id String id; + @Indexed String field; + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + public String toString() { + return "ReactiveMongoTemplateIndexTests.IndexedSample(id=" + this.getId() + ", field=" + this.getField() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document("indexfail") + static class IndexCreationShouldFail { + + @Id String id; + @Indexed(name = "foo") String field; + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + public String toString() { + return "ReactiveMongoTemplateIndexTests.IndexCreationShouldFail(id=" + this.getId() + ", field=" + this.getField() + + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateReplaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateReplaceTests.java new file mode 100644 index 0000000000..86433ab338 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateReplaceTests.java @@ -0,0 +1,329 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.ReplaceOptions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.bson.BsonInt64; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.reactivestreams.Publisher; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.model.Filters; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class ReactiveMongoTemplateReplaceTests { + + static final String DB_NAME = "mongo-template-replace-tests"; + static final String RESTAURANT_COLLECTION = "restaurant"; + + static @Client MongoClient client; + private ReactiveMongoTemplate template; + + @BeforeEach + void beforeEach() { + + template = new ReactiveMongoTemplate(client, DB_NAME); + template.setEntityLifecycleEventsEnabled(false); + + initTestData(); + } + + @AfterEach() + void afterEach() { + clearTestData(); + } + + @Test // GH-4462 + void replacesExistingDocument() { + + Mono result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant("Central Pork Cafe", "Manhattan")); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesFirstOnMoreThanOneMatch() { + + Mono result = template.replace(query(where("violations").exists(true)), + new Restaurant("Central Pork Cafe", "Manhattan")); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDoc() { + + Mono result = template.replace(query(where("r-name").is("Central Perk Cafe")), + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), + template.getCollectionName(Restaurant.class)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithRawDocMappingQueryAgainstDomainType() { + + Mono result = template.replace(query(where("name").is("Central Perk Cafe")), Restaurant.class, + Document.parse("{ 'r-name' : 'Central Pork Cafe', 'Borough' : 'Manhattan' }"), ReplaceOptions.none(), template.getCollectionName(Restaurant.class)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithMatchingId() { + + Mono result = template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(1L, "Central Pork Cafe", "Manhattan", 0)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(1); + assertThat(it.getModifiedCount()).isEqualTo(1); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 1)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Central Pork Cafe"); + }).verifyComplete(); + } + + @Test // GH-4462 + void replacesExistingDocumentWithNewIdThrowsDataIntegrityViolationException() { + + template.replace(query(where("name").is("Central Perk Cafe")), + new Restaurant(4L, "Central Pork Cafe", "Manhattan", 0)) + .as(StepVerifier::create) + .expectError(DataIntegrityViolationException.class) + .verify(); + } + + @Test // GH-4462 + void doesNothingIfNoMatchFoundAndUpsertSetToFalse/* by default */() { + + Mono result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(null, "Pizza Rat's Pizzaria", "Manhattan", 8)); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(0); + assertThat(it.getModifiedCount()).isEqualTo(0); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("r-name", "Pizza Rat's Pizzaria")).first()) + .as(StepVerifier::create).verifyComplete(); + } + + @Test // GH-4462 + void insertsIfNoMatchFoundAndUpsertSetToTrue() { + + Mono result = template.replace(query(where("name").is("Pizza Rat's Pizzaria")), + new Restaurant(4L, "Pizza Rat's Pizzaria", "Manhattan", 8), replaceOptions().upsert()); + + result.as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getMatchedCount()).isEqualTo(0); + assertThat(it.getModifiedCount()).isEqualTo(0); + assertThat(it.getUpsertedId()).isEqualTo(new BsonInt64(4L)); + }).verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 4)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "Pizza Rat's Pizzaria"); + }) + .verifyComplete(); + } + + @Test // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + void replaceConsidersSort() { + + template.replace(new Query().with(Sort.by(Direction.DESC, "name")), new Restaurant("resist", "Manhattan")) // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + retrieve(collection -> collection.find(Filters.eq("_id", 2)).first()).as(StepVerifier::create) + .consumeNextWith(document -> { + assertThat(document).containsEntry("r-name", "resist"); + }).verifyComplete(); + } + + void initTestData() { + + List testData = Stream.of( // + "{ '_id' : 1, 'r-name' : 'Central Perk Cafe', 'Borough' : 'Manhattan' }", + "{ '_id' : 2, 'r-name' : 'Rock A Feller Bar and Grill', 'Borough' : 'Queens', 'violations' : 2 }", + "{ '_id' : 3, 'r-name' : 'Empire State Pub', 'Borough' : 'Brooklyn', 'violations' : 0 }") // + .map(Document::parse).collect(Collectors.toList()); + + doInCollection(collection -> collection.insertMany(testData)); + } + + void clearTestData() { + doInCollection(collection -> collection.deleteMany(new Document())); + } + + void doInCollection(Function, Publisher> fkt) { + retrieve(collection -> Mono.from(fkt.apply(collection))).then().as(StepVerifier::create).verifyComplete(); + } + + Mono retrieve(Function, Publisher> fkt) { + return Mono.from(fkt.apply(client.getDatabase(DB_NAME).getCollection(RESTAURANT_COLLECTION))); + } + + @org.springframework.data.mongodb.core.mapping.Document(RESTAURANT_COLLECTION) + static class Restaurant { + + Long id; + + @Field("r-name") String name; + String borough; + Integer violations; + + Restaurant() {} + + Restaurant(String name, String borough) { + + this.name = name; + this.borough = borough; + } + + Restaurant(Long id, String name, String borough, Integer violations) { + + this.id = id; + this.name = name; + this.borough = borough; + this.violations = violations; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getRName() { + return name; + } + + public void setRName(String rName) { + this.name = rName; + } + + public String getBorough() { + return borough; + } + + public void setBorough(String borough) { + this.borough = borough; + } + + public int getViolations() { + return violations; + } + + public void setViolations(int violations) { + this.violations = violations; + } + + @Override + public String toString() { + return "Restaurant{" + "id=" + id + ", name='" + name + '\'' + ", borough='" + borough + '\'' + ", violations=" + + violations + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Restaurant that = (Restaurant) o; + return violations == that.violations && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(borough, that.borough); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, borough, violations); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateScrollTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateScrollTests.java new file mode 100644 index 0000000000..0e6e94bdf7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateScrollTests.java @@ -0,0 +1,255 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link Window} queries. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +class ReactiveMongoTemplateScrollTests { + + static @Client MongoClient client; + + public static final String DB_NAME = "mongo-template-scroll-tests"; + + ConfigurableApplicationContext context = new GenericApplicationContext(); + + private ReactiveMongoTestTemplate template = new ReactiveMongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + }); + }); + + @BeforeEach + void setUp() { + + template.remove(Person.class).all() // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.remove(WithRenamedField.class).all() // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @ParameterizedTest // GH-4308 + @MethodSource("positions") + public void shouldApplyCursoringCorrectly(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter) { + + Person john20 = new Person("John", 20); + Person john40_1 = new Person("John", 40); + Person john40_2 = new Person("John", 40); + Person jane_20 = new Person("Jane", 20); + Person jane_40 = new Person("Jane", 40); + Person jane_42 = new Person("Jane", 42); + + template.insertAll(Arrays.asList(john20, john40_1, john40_2, jane_20, jane_40, jane_42)) // + .as(StepVerifier::create) // + .expectNextCount(6) // + .verifyComplete(); + + Query q = new Query(where("firstName").regex("J.*")).with(Sort.by("firstName", "age")).limit(2); + q.with(scrollPosition); + + Window window = template.scroll(q, resultType, "person").block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(assertionConverter.apply(jane_20), assertionConverter.apply(jane_40)); + + window = template.scroll(q.limit(3).with(window.positionAt(window.size() - 1)), resultType, "person") + .block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(3); + assertThat(window).contains(assertionConverter.apply(jane_42), assertionConverter.apply(john20)); + assertThat(window).containsAnyOf(assertionConverter.apply(john40_1), assertionConverter.apply(john40_2)); + + window = template.scroll(q.limit(1).with(window.positionAt(window.size() - 1)), resultType, "person") + .block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsAnyOf(assertionConverter.apply(john40_1), assertionConverter.apply(john40_2)); + } + + @ParameterizedTest // GH-4308 + @MethodSource("renamedFieldProjectTargets") + void scrollThroughResultsWithRenamedField(Class resultType, Function assertionConverter) { + + WithRenamedField one = new WithRenamedField("id-1", "v1", null); + WithRenamedField two = new WithRenamedField("id-2", "v2", null); + WithRenamedField three = new WithRenamedField("id-3", "v3", null); + + template.insertAll(Arrays.asList(one, two, three)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + Query q = new Query(where("value").regex("v.*")).with(Sort.by(Sort.Direction.DESC, "value")).limit(2); + q.with(ScrollPosition.keyset()); + + Window window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(ScrollPosition.keyset()).block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isTrue(); + assertThat(window.isLast()).isFalse(); + assertThat(window).hasSize(2); + assertThat(window).containsOnly(assertionConverter.apply(three), assertionConverter.apply(two)); + + window = template.query(WithRenamedField.class).as(resultType).matching(q) + .scroll(window.positionAt(window.size() - 1)).block(Duration.ofSeconds(10)); + + assertThat(window.hasNext()).isFalse(); + assertThat(window.isLast()).isTrue(); + assertThat(window).hasSize(1); + assertThat(window).containsOnly(assertionConverter.apply(one)); + } + + static Stream positions() { + + return Stream.of(args(ScrollPosition.keyset(), Person.class, Function.identity()), // + args(ScrollPosition.keyset(), Document.class, ReactiveMongoTemplateScrollTests::toDocument), // + args(ScrollPosition.offset(), Person.class, Function.identity())); + } + + static Stream renamedFieldProjectTargets() { + return Stream.of(Arguments.of(WithRenamedField.class, Function.identity()), + Arguments.of(Document.class, new Function() { + @Override + public Document apply(WithRenamedField withRenamedField) { + return new Document("_id", withRenamedField.getId()).append("_val", withRenamedField.getValue()) + .append("_class", WithRenamedField.class.getName()); + } + })); + } + + private static Arguments args(ScrollPosition scrollPosition, Class resultType, + Function assertionConverter) { + return Arguments.of(scrollPosition, resultType, assertionConverter); + } + + static Document toDocument(Person person) { + return new Document("_class", person.getClass().getName()).append("_id", person.getId()).append("active", true) + .append("firstName", person.getFirstName()).append("age", person.getAge()); + } + + static class WithRenamedField { + + String id; + + @Field("_val") String value; + + WithRenamedField nested; + + public WithRenamedField() {} + + public WithRenamedField(String id, String value, WithRenamedField nested) { + this.id = id; + this.value = value; + this.nested = nested; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public WithRenamedField getNested() { + return this.nested; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + public void setNested(WithRenamedField nested) { + this.nested = nested; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithRenamedField that = (WithRenamedField) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value) && Objects.equals(nested, that.nested); + } + + @Override + public int hashCode() { + return Objects.hash(id, value, nested); + } + + public String toString() { + return "ReactiveMongoTemplateScrollTests.WithRenamedField(id=" + this.getId() + ", value=" + this.getValue() + + ", nested=" + this.getNested() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java index a702633053..f87227cdde 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,64 +15,77 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; import reactor.core.Disposable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; +import java.time.Duration; +import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.IntStream; -import org.assertj.core.api.Assertions; -import org.assertj.core.api.Assumptions; import org.bson.BsonDocument; +import org.bson.BsonTimestamp; import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.context.support.GenericApplicationContext; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DuplicateKeyException; +import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Metrics; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoTemplateTests.Address; import org.springframework.data.mongodb.core.MongoTemplateTests.PersonWithConvertedId; import org.springframework.data.mongodb.core.MongoTemplateTests.VersionedPerson; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.core.index.IndexOperationsAdapter; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.test.util.ReplicaSet; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoServerCondition; +import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate; import com.mongodb.WriteConcern; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; /** * Integration test for {@link MongoTemplate}. @@ -80,274 +93,394 @@ * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:reactive-infrastructure.xml") +@ExtendWith({ MongoClientExtension.class, MongoServerCondition.class }) public class ReactiveMongoTemplateTests { - @Rule public ExpectedException thrown = ExpectedException.none(); + private static final String DB_NAME = "reactive-mongo-template-tests"; + private static @Client MongoClient client; + + private ConfigurableApplicationContext context = new GenericApplicationContext(); + private ReactiveMongoTestTemplate template = new ReactiveMongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); - @Autowired SimpleReactiveMongoDatabaseFactory factory; - @Autowired ReactiveMongoTemplate template; + cfg.configureApplicationContext(it -> { + it.applicationContext(context); + }); + }); - @Before - public void setUp() { + @BeforeEach + void setUp() { - StepVerifier - .create(template.dropCollection("people") // - .mergeWith(template.dropCollection("personX")) // - .mergeWith(template.dropCollection("collection")) // - .mergeWith(template.dropCollection(Person.class)) // - .mergeWith(template.dropCollection(Venue.class)) // - .mergeWith(template.dropCollection(PersonWithAList.class)) // - .mergeWith(template.dropCollection(PersonWithIdPropertyOfTypeObjectId.class)) // - .mergeWith(template.dropCollection(PersonWithVersionPropertyOfTypeInteger.class)) // - .mergeWith(template.dropCollection(Sample.class))) // + template + .flush(Person.class, MyPerson.class, Sample.class, Venue.class, PersonWithVersionPropertyOfTypeInteger.class, + RawStringId.class) // + .as(StepVerifier::create) // .verifyComplete(); + + template.flush("people", "collection", "personX", "unique_person").as(StepVerifier::create).verifyComplete(); } - @After - public void cleanUp() {} + private ReactiveMongoDatabaseFactory factory = template.getDatabaseFactory(); @Test // DATAMONGO-1444 - public void insertSetsId() { + void insertSetsId() { PersonWithAList person = new PersonWithAList(); assert person.getId() == null; - StepVerifier.create(template.insert(person)).expectNextCount(1).verifyComplete(); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - assertThat(person.getId(), is(notNullValue())); + assertThat(person.getId()).isNotNull(); } @Test // DATAMONGO-1444 - public void insertAllSetsId() { + void insertAllSetsId() { PersonWithAList person = new PersonWithAList(); - StepVerifier.create(template.insertAll(Collections.singleton(person))).expectNextCount(1).verifyComplete(); + template.insertAll(Collections.singleton(person)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - assertThat(person.getId(), is(notNullValue())); + assertThat(person.getId()).isNotNull(); } @Test // DATAMONGO-1444 - public void insertCollectionSetsId() { + void insertCollectionSetsId() { PersonWithAList person = new PersonWithAList(); - StepVerifier.create(template.insert(Collections.singleton(person), PersonWithAList.class)).expectNextCount(1) + template.insert(Collections.singleton(person), PersonWithAList.class) // + .as(StepVerifier::create) // + .expectNextCount(1) // .verifyComplete(); - assertThat(person.getId(), is(notNullValue())); + assertThat(person.getId()).isNotNull(); + } + + @Test // GH-4944 + void insertAllShouldConvertIdToTargetTypeBeforeSave() { + + RawStringId walter = new RawStringId(); + walter.value = "walter"; + + RawStringId returned = template.insertAll(List.of(walter)).blockLast(); + template.execute(RawStringId.class, MongoCollection::find) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> assertThat(returned.id).isEqualTo(actual.get("_id"))) // + .verifyComplete(); } @Test // DATAMONGO-1444 - public void saveSetsId() { + void saveSetsId() { PersonWithAList person = new PersonWithAList(); assert person.getId() == null; - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.getId()).isNotNull(); + } + + @Test // GH-4026 + void saveShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.save(source).then().as(StepVerifier::create).verifyComplete(); + + template.execute(RawStringId.class, collection -> { + return collection.find(new org.bson.Document()).first(); + }) // + .map(it -> it.get("_id")) // + .as(StepVerifier::create) // + .consumeNextWith(id -> { + assertThat(id).isInstanceOf(String.class); + }).verifyComplete(); + } + + @Test // GH-4026 + void insertShouldGenerateNewIdOfTypeIfExplicitlyDefined() { + + RawStringId source = new RawStringId(); + source.value = "new value"; + + template.insert(source).then().as(StepVerifier::create).verifyComplete(); + + template.execute(RawStringId.class, collection -> { + return collection.find(new org.bson.Document()).first(); + }) // + .map(it -> it.get("_id")) // + .as(StepVerifier::create) // + .consumeNextWith(id -> { + assertThat(id).isInstanceOf(String.class); + }).verifyComplete(); + } + + @Test // GH-4184 + void insertHonorsExistingRawId() { - assertThat(person.getId(), is(notNullValue())); + MongoTemplateTests.RawStringId source = new MongoTemplateTests.RawStringId(); + source.id = "abc"; + source.value = "new value"; + + template.insert(source) + .then(template.execute(db -> Flux.from( + db.getCollection(template.getCollectionName(MongoTemplateTests.RawStringId.class)).find().limit(1).first())) + .next()) + .as(StepVerifier::create).consumeNextWith(result -> { + assertThat(result).isNotNull(); + assertThat(result.get("_id")).isEqualTo("abc"); + }); } @Test // DATAMONGO-1444 - public void insertsSimpleEntityCorrectly() { + void insertsSimpleEntityCorrectly() { Person person = new Person("Mark"); person.setAge(35); - StepVerifier.create(template.insert(person)).expectNextCount(1).verifyComplete(); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.find(new Query(where("_id").is(person.getId())), Person.class)) // + template.find(new Query(where("_id").is(person.getId())), Person.class) // + .as(StepVerifier::create) // .expectNext(person) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void simpleInsertDoesNotAllowArrays() { - - thrown.expect(IllegalArgumentException.class); + void simpleInsertDoesNotAllowArrays() { Person person = new Person("Mark"); person.setAge(35); - template.insert(new Person[] { person }); + + assertThatIllegalArgumentException().isThrownBy(() -> template.insert(new Person[] { person })); } @Test // DATAMONGO-1444 - public void simpleInsertDoesNotAllowCollections() { - - thrown.expect(IllegalArgumentException.class); + void simpleInsertDoesNotAllowCollections() { Person person = new Person("Mark"); person.setAge(35); - template.insert(Collections.singletonList(person)); + + assertThatIllegalArgumentException().isThrownBy(() -> template.insert(Collections.singletonList(person))); } @Test // DATAMONGO-1444 - public void insertsSimpleEntityWithSuppliedCollectionNameCorrectly() { + void insertsSimpleEntityWithSuppliedCollectionNameCorrectly() { Person person = new Person("Homer"); person.setAge(35); - StepVerifier.create(template.insert(person, "people")).expectNextCount(1).verifyComplete(); + template.insert(person, "people") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.find(new Query(where("_id").is(person.getId())), Person.class, "people")) // + template.find(new Query(where("_id").is(person.getId())), Person.class, "people") // + .as(StepVerifier::create) // .expectNext(person) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void insertBatchCorrectly() { + void insertBatchCorrectly() { List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); - StepVerifier.create(template.insertAll(people)).expectNextCount(3).verifyComplete(); + template.insertAll(people) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); - StepVerifier.create(template.find(new Query().with(Sort.by("firstname")), Person.class)) // - .expectNextSequence(people) // + template.find(new Query().with(Sort.by("firstname")), Person.class) // + .as(StepVerifier::create) // + .expectNextCount(3) /// .verifyComplete(); } @Test // DATAMONGO-1444 - public void insertBatchWithSuppliedCollectionNameCorrectly() { + void insertBatchWithSuppliedCollectionNameCorrectly() { List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); - StepVerifier.create(template.insert(people, "people")).expectNextCount(3).verifyComplete(); + template.insert(people, "people") // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); - StepVerifier.create(template.find(new Query().with(Sort.by("firstname")), Person.class, "people")) // - .expectNextSequence(people) // + template.find(new Query().with(Sort.by("firstname")), Person.class, "people") // + .as(StepVerifier::create) // + .expectNextCount(3) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void insertBatchWithSuppliedEntityTypeCorrectly() { + void insertBatchWithSuppliedEntityTypeCorrectly() { List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); - StepVerifier.create(template.insert(people, Person.class)).expectNextCount(3).verifyComplete(); + template.insert(people, Person.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); - StepVerifier.create(template.find(new Query().with(Sort.by("firstname")), Person.class)) // - .expectNextSequence(people) // + template.find(new Query().with(Sort.by("firstname")), Person.class) // + .as(StepVerifier::create) // + .expectNextCount(3) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void testAddingToList() { + void testAddingToList() { PersonWithAList person = createPersonWithAList("Sven", 22); - StepVerifier.create(template.insert(person)).expectNextCount(1).verifyComplete(); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); Query query = new Query(where("id").is(person.getId())); - StepVerifier.create(template.findOne(query, PersonWithAList.class)).consumeNextWith(actual -> { + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(actual.getWishList().size(), is(0)); - }).verifyComplete(); + assertThat(actual.getWishList()).isEmpty(); + }).verifyComplete(); - person.addToWishList("please work!"); + person.addToWishList("please work"); - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.findOne(query, PersonWithAList.class)).consumeNextWith(actual -> { + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(actual.getWishList().size(), is(1)); - }).verifyComplete(); + assertThat(actual.getWishList()).hasSize(1); + }).verifyComplete(); Friend friend = new Friend(); person.setFirstName("Erik"); person.setAge(21); person.addFriend(friend); - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.findOne(query, PersonWithAList.class)).consumeNextWith(actual -> { + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(actual.getWishList().size(), is(1)); - assertThat(actual.getFriends().size(), is(1)); - }).verifyComplete(); + assertThat(actual.getWishList()).hasSize(1); + assertThat(actual.getFriends()).hasSize(1); + }).verifyComplete(); } @Test // DATAMONGO-1444 - public void testFindOneWithSort() { + void testFindOneWithSort() { PersonWithAList sven = createPersonWithAList("Sven", 22); PersonWithAList erik = createPersonWithAList("Erik", 21); PersonWithAList mark = createPersonWithAList("Mark", 40); - StepVerifier.create(template.insertAll(Arrays.asList(sven, erik, mark))).expectNextCount(3).verifyComplete(); + template.insertAll(Arrays.asList(sven, erik, mark)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); // test query with a sort Query query = new Query(where("age").gt(10)); query.with(Sort.by(Direction.DESC, "age")); - StepVerifier.create(template.findOne(query, PersonWithAList.class)).consumeNextWith(actual -> { + template.findOne(query, PersonWithAList.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(actual.getFirstName(), is("Mark")); - }).verifyComplete(); + assertThat(actual.getFirstName()).isEqualTo("Mark"); + }).verifyComplete(); } @Test // DATAMONGO-1444 - public void bogusUpdateDoesNotTriggerException() { + void bogusUpdateDoesNotTriggerException() { ReactiveMongoTemplate mongoTemplate = new ReactiveMongoTemplate(factory); mongoTemplate.setWriteResultChecking(WriteResultChecking.EXCEPTION); Person oliver = new Person("Oliver2", 25); - StepVerifier.create(template.insert(oliver)).expectNextCount(1).verifyComplete(); + template.insert(oliver) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); Query q = new Query(where("BOGUS").gt(22)); Update u = new Update().set("firstName", "Sven"); - StepVerifier.create(mongoTemplate.updateFirst(q, u, Person.class)).expectNextCount(1).verifyComplete(); + mongoTemplate.updateFirst(q, u, Person.class) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); } @Test // DATAMONGO-1444 - public void updateFirstByEntityTypeShouldUpdateObject() { + void updateFirstByEntityTypeShouldUpdateObject() { Person person = new Person("Oliver2", 25); - StepVerifier - .create(template.insert(person) // - .then(template.updateFirst(new Query(where("age").is(25)), new Update().set("firstName", "Sven"), - Person.class)) // - .flatMapMany(p -> template.find(new Query(where("age").is(25)), Person.class))) + template.insert(person) // + .then(template.updateFirst(new Query(where("age").is(25)), new Update().set("firstName", "Sven"), Person.class)) // + .flatMapMany(p -> template.find(new Query(where("age").is(25)), Person.class)) // + .as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.getFirstName(), is(equalTo("Sven"))); + assertThat(actual.getFirstName()).isEqualTo("Sven"); }).verifyComplete(); } @Test // DATAMONGO-1444 - public void updateFirstByCollectionNameShouldUpdateObjects() { + void updateFirstByCollectionNameShouldUpdateObjects() { Person person = new Person("Oliver2", 25); - StepVerifier - .create(template.insert(person, "people") // - .then(template.updateFirst(new Query(where("age").is(25)), new Update().set("firstName", "Sven"), "people")) // - .flatMapMany(p -> template.find(new Query(where("age").is(25)), Person.class, "people"))) + template.insert(person, "people") // + .then(template.updateFirst(new Query(where("age").is(25)), new Update().set("firstName", "Sven"), "people")) // + .flatMapMany(p -> template.find(new Query(where("age").is(25)), Person.class, "people")) // + .as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.getFirstName(), is(equalTo("Sven"))); + assertThat(actual.getFirstName()).isEqualTo("Sven"); }).verifyComplete(); } @Test // DATAMONGO-1444 - public void updateMultiByEntityTypeShouldUpdateObjects() { + void updateMultiByEntityTypeShouldUpdateObjects() { Query query = new Query( new Criteria().orOperator(where("firstName").is("Walter Jr"), where("firstName").is("Walter"))); - StepVerifier - .create(template - .insertAll(Mono - .just(Arrays.asList(new Person("Walter", 50), new Person("Skyler", 43), new Person("Walter Jr", 16)))) // - .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class)) // - .thenMany(template.find(new Query(where("firstName").is("Walt")), Person.class))) // + template + .insertAll( + Mono.just(Arrays.asList(new Person("Walter", 50), new Person("Skyler", 43), new Person("Walter Jr", 16)))) // + .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class)) // + .thenMany(template.find(new Query(where("firstName").is("Walt")), Person.class)) // + .as(StepVerifier::create) // .expectNextCount(2) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void updateMultiByCollectionNameShouldUpdateObject() { + void updateMultiByCollectionNameShouldUpdateObject() { Query query = new Query( new Criteria().orOperator(where("firstName").is("Walter Jr"), where("firstName").is("Walter"))); @@ -361,13 +494,14 @@ public void updateMultiByCollectionNameShouldUpdateObject() { .flatMap(a -> template.updateMulti(query, new Update().set("firstName", "Walt"), Person.class, "people")) // .flatMapMany(p -> template.find(new Query(where("firstName").is("Walt")), Person.class, "people")); - StepVerifier.create(personFlux) // + personFlux // + .as(StepVerifier::create) // .expectNextCount(2) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void throwsExceptionForDuplicateIds() { + void throwsExceptionForDuplicateIds() { ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); @@ -375,13 +509,19 @@ public void throwsExceptionForDuplicateIds() { Person person = new Person(new ObjectId(), "Amol"); person.setAge(28); - StepVerifier.create(template.insert(person)).expectNextCount(1).verifyComplete(); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.insert(person)).expectError(DataIntegrityViolationException.class).verify(); + template.insert(person) // + .as(StepVerifier::create) // + .expectError(DataIntegrityViolationException.class) // + .verify(); } @Test // DATAMONGO-1444 - public void throwsExceptionForUpdateWithInvalidPushOperator() { + void throwsExceptionForUpdateWithInvalidPushOperator() { ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); @@ -390,18 +530,21 @@ public void throwsExceptionForUpdateWithInvalidPushOperator() { Person person = new Person(id, "Amol"); person.setAge(28); - StepVerifier.create(template.insert(person)).expectNextCount(1).verifyComplete(); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); Query query = new Query(where("firstName").is("Amol")); Update upd = new Update().push("age", 29); - StepVerifier.create(template.updateFirst(query, upd, Person.class)) // - .expectError(DataIntegrityViolationException.class) // - .verify(); + template.updateFirst(query, upd, Person.class) // + .as(StepVerifier::create) // + .verifyError(DataIntegrityViolationException.class); } @Test // DATAMONGO-1444 - public void rejectsDuplicateIdInInsertAll() { + void rejectsDuplicateIdInInsertAll() { ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); @@ -410,17 +553,16 @@ public void rejectsDuplicateIdInInsertAll() { Person person = new Person(id, "Amol"); person.setAge(28); - StepVerifier.create(template.insertAll(Arrays.asList(person, person))) // - .expectError(DataIntegrityViolationException.class) // - .verify(); + template.insertAll(Arrays.asList(person, person)) // + .as(StepVerifier::create) // + .verifyError(DataIntegrityViolationException.class); } @Test // DATAMONGO-1444 - public void testFindAndUpdate() { + void testFindAndUpdate() { - StepVerifier - .create( - template.insertAll(Arrays.asList(new Person("Tom", 21), new Person("Dick", 22), new Person("Harry", 23)))) // + template.insertAll(Arrays.asList(new Person("Tom", 21), new Person("Dick", 22), new Person("Harry", 23))) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); @@ -428,464 +570,828 @@ public void testFindAndUpdate() { Update update = new Update().inc("age", 1); Person p = template.findAndModify(query, update, Person.class).block(); // return old - assertThat(p.getFirstName(), is("Harry")); - assertThat(p.getAge(), is(23)); + assertThat(p.getFirstName()).isEqualTo("Harry"); + assertThat(p.getAge()).isEqualTo(23); p = template.findOne(query, Person.class).block(); - assertThat(p.getAge(), is(24)); + assertThat(p.getAge()).isEqualTo(24); p = template.findAndModify(query, update, Person.class, "person").block(); - assertThat(p.getAge(), is(24)); + assertThat(p.getAge()).isEqualTo(24); p = template.findOne(query, Person.class).block(); - assertThat(p.getAge(), is(25)); + assertThat(p.getAge()).isEqualTo(25); p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class).block(); - assertThat(p.getAge(), is(26)); + assertThat(p.getAge()).isEqualTo(26); - p = template.findAndModify(query, update, null, Person.class, "person").block(); - assertThat(p.getAge(), is(26)); + p = template.findAndModify(query, update, FindAndModifyOptions.none(), Person.class, "person").block(); + assertThat(p.getAge()).isEqualTo(26); p = template.findOne(query, Person.class).block(); - assertThat(p.getAge(), is(27)); + assertThat(p.getAge()).isEqualTo(27); Query query2 = new Query(where("firstName").is("Mary")); p = template.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class) .block(); - assertThat(p.getFirstName(), is("Mary")); - assertThat(p.getAge(), is(1)); + assertThat(p.getFirstName()).isEqualTo("Mary"); + assertThat(p.getAge()).isEqualTo(1); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldReplaceDocument() { + + org.bson.Document doc = new org.bson.Document("foo", "bar"); + template.save(doc, "findandreplace").as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + org.bson.Document replacement = new org.bson.Document("foo", "baz"); + template + .findAndReplace(query(where("foo").is("bar")), replacement, FindAndReplaceOptions.options(), + org.bson.Document.class, "findandreplace") // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).containsEntry("foo", "bar"); + }).verifyComplete(); + + template.findOne(query(where("foo").is("baz")), org.bson.Document.class, "findandreplace") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldErrorOnIdPresent() { + + template.save(new MyPerson("Walter")).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + MyPerson replacement = new MyPerson("Heisenberg"); + replacement.id = "invalid-id"; + + template.findAndReplace(query(where("name").is("Walter")), replacement) // + .as(StepVerifier::create) // + .expectError(InvalidDataAccessApiUsageException.class); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldErrorOnSkip() { + + assertThatIllegalArgumentException().isThrownBy(() -> template + .findAndReplace(query(where("name").is("Walter")).skip(10), new MyPerson("Heisenberg")).subscribe()); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldErrorOnLimit() { + + assertThatIllegalArgumentException().isThrownBy(() -> template + .findAndReplace(query(where("name").is("Walter")).limit(10), new MyPerson("Heisenberg")).subscribe()); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldConsiderSortAndUpdateFirstIfMultipleFound() { + + MyPerson walter1 = new MyPerson("Walter 1"); + MyPerson walter2 = new MyPerson("Walter 2"); + + template.save(walter1).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + template.save(walter2).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + MyPerson replacement = new MyPerson("Heisenberg"); + + template.findAndReplace(query(where("name").regex("Walter.*")).with(Sort.by(Direction.DESC, "name")), replacement) + .as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.findAll(MyPerson.class).buffer(10).as(StepVerifier::create) + .consumeNextWith(it -> assertThat(it).hasSize(2).contains(walter1).doesNotContain(walter2)).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldReplaceObject() { + + MyPerson person = new MyPerson("Walter"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getName()).isEqualTo("Walter"); + }).verifyComplete(); + + template.findOne(query(where("name").is("Heisenberg")), MyPerson.class) // + .as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldConsiderFields() { + + MyPerson person = new MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Query query = query(where("name").is("Walter")); + query.fields().include("address"); + + template.findAndReplace(query, new MyPerson("Heisenberg")) // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + + assertThat(it.getName()).isNull(); + assertThat(it.getAddress()).isEqualTo(person.address); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceNonExistingWithUpsertFalse() { + + template.findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg")) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.findAll(MyPerson.class).as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceNonExistingWithUpsertTrue() { + + template + .findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().upsert()) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.findAll(MyPerson.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldProjectReturnedObjectCorrectly() { + + MyPerson person = new MyPerson("Walter"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template + .findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), FindAndReplaceOptions.empty(), + MyPerson.class, MyPersonProjection.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getName()).isEqualTo("Walter"); + }).verifyComplete(); + } + + @Test // GH-4300 + public void findAndReplaceShouldAllowNativeDomainTypesAndReturnAProjection() { + + MongoTemplateTests.MyPerson person = new MongoTemplateTests.MyPerson("Walter"); + person.address = new Address("TX", "Austin"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template + .findAndReplace(query(where("name").is("Walter")), new org.bson.Document("name", "Heisenberg"), + FindAndReplaceOptions.options(), org.bson.Document.class, "myPerson", MongoTemplateTests.MyPerson.class) + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getAddress()).isEqualTo(person.address); + }).verifyComplete(); + + template.execute(MongoTemplateTests.MyPerson.class, collection -> { + return collection.find(new org.bson.Document("name", "Heisenberg")).first(); + }).as(StepVerifier::create) // + .consumeNextWith(loaded -> { + assertThat(loaded.get("_id")).isEqualTo(new ObjectId(person.id)); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceShouldReplaceObjectReturingNew() { + + MyPerson person = new MyPerson("Walter"); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template + .findAndReplace(query(where("name").is("Walter")), new MyPerson("Heisenberg"), + FindAndReplaceOptions.options().returnNew()) + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getName()).isEqualTo("Heisenberg"); + }).verifyComplete(); } @Test // DATAMONGO-1444 - public void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { + void testFindAllAndRemoveFullyReturnsAndRemovesDocuments() { Sample spring = new Sample("100", "spring"); Sample data = new Sample("200", "data"); Sample mongodb = new Sample("300", "mongodb"); - StepVerifier.create(template.insert(Arrays.asList(spring, data, mongodb), Sample.class)) // + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); Query qry = query(where("field").in("spring", "mongodb")); - StepVerifier.create(template.findAllAndRemove(qry, Sample.class)).expectNextCount(2).verifyComplete(); + template.findAllAndRemove(qry, Sample.class) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + template.findOne(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(data) // + .verifyComplete(); + } + + @Test // DATAMONGO-2219 + void testFindAllAndRemoveReturnsEmptyWithoutMatches() { - StepVerifier.create(template.findOne(new Query(), Sample.class)).expectNext(data).verifyComplete(); + Query qry = query(where("field").in("spring", "mongodb")); + template.findAllAndRemove(qry, Sample.class) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.count(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(0L).verifyComplete(); } @Test // DATAMONGO-1774 - public void testFindAllAndRemoveByCollectionReturnsAndRemovesDocuments() { + void testFindAllAndRemoveByCollectionReturnsAndRemovesDocuments() { Sample spring = new Sample("100", "spring"); Sample data = new Sample("200", "data"); Sample mongodb = new Sample("300", "mongodb"); - StepVerifier.create(template.insert(Arrays.asList(spring, data, mongodb), Sample.class)) // + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); Query qry = query(where("field").in("spring", "mongodb")); - StepVerifier.create(template.findAllAndRemove(qry, "sample")).expectNextCount(2).verifyComplete(); + template.findAllAndRemove(qry, "sample") // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); - StepVerifier.create(template.findOne(new Query(), Sample.class)).expectNext(data).verifyComplete(); + template.findOne(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(data) // + .verifyComplete(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1774 - public void removeWithNullShouldThrowError() { - template.remove((Object) null).subscribe(); + @Test // DATAMONGO-1774 + void removeWithNullShouldThrowError() { + assertThatIllegalArgumentException().isThrownBy(() -> template.remove((Object) null).subscribe()); } @Test // DATAMONGO-1774 - public void removeWithEmptyMonoShouldDoNothing() { + void removeWithEmptyMonoShouldDoNothing() { Sample spring = new Sample("100", "spring"); Sample data = new Sample("200", "data"); Sample mongodb = new Sample("300", "mongodb"); - StepVerifier.create(template.insert(Arrays.asList(spring, data, mongodb), Sample.class)) // + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); - StepVerifier.create(template.remove(Mono.empty())).verifyComplete(); - StepVerifier.create(template.count(new Query(), Sample.class)).expectNext(3L).verifyComplete(); + template.remove(Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.count(new Query(), Sample.class) // + .as(StepVerifier::create) // + .expectNext(3L) // + .verifyComplete(); } @Test // DATAMONGO-1774 - public void removeWithMonoShouldDeleteElement() { + void removeWithMonoShouldDeleteElement() { Sample spring = new Sample("100", "spring"); Sample data = new Sample("200", "data"); Sample mongodb = new Sample("300", "mongodb"); - StepVerifier.create(template.insert(Arrays.asList(spring, data, mongodb), Sample.class)) // + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); - StepVerifier.create(template.remove(Mono.just(spring))).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.count(new Query(), Sample.class)).expectNext(2L).verifyComplete(); + template.remove(Mono.just(spring)).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + template.count(new Query(), Sample.class).as(StepVerifier::create).expectNext(2L).verifyComplete(); } @Test // DATAMONGO-1774 - public void removeWithMonoAndCollectionShouldDeleteElement() { + void removeWithMonoAndCollectionShouldDeleteElement() { Sample spring = new Sample("100", "spring"); Sample data = new Sample("200", "data"); Sample mongodb = new Sample("300", "mongodb"); - StepVerifier.create(template.insert(Arrays.asList(spring, data, mongodb), Sample.class)) // + template.insert(Arrays.asList(spring, data, mongodb), Sample.class) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); - StepVerifier.create(template.remove(Mono.just(spring), template.determineCollectionName(Sample.class))) + template.remove(Mono.just(spring), template.getCollectionName(Sample.class)) // + .as(StepVerifier::create) // .expectNextCount(1).verifyComplete(); - StepVerifier.create(template.count(new Query(), Sample.class)).expectNext(2L).verifyComplete(); + template.count(new Query(), Sample.class).as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // DATAMONGO-2195 + void removeVersionedEntityConsidersVersion() { + + PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); + person.firstName = "Dave"; + + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); + + template.update(PersonWithVersionPropertyOfTypeInteger.class).matching(query(where("id").is(person.id))) + .apply(new Update().set("firstName", "Walter")).first() // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.remove(person).as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.wasAcknowledged()).isTrue(); + assertThat(actual.getDeletedCount()).isZero(); + }).verifyComplete(); + template.count(new Query(), PersonWithVersionPropertyOfTypeInteger.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); } @Test // DATAMONGO-1444 - public void optimisticLockingHandling() { + void optimisticLockingHandling() { // Init version PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.age = 29; person.firstName = "Patryk"; - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.findAll(PersonWithVersionPropertyOfTypeInteger.class)).consumeNextWith(actual -> { + template.findAll(PersonWithVersionPropertyOfTypeInteger.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(actual.version, is(0)); - }).verifyComplete(); + assertThat(actual.version).isZero(); + }).verifyComplete(); - StepVerifier.create(template.findAll(PersonWithVersionPropertyOfTypeInteger.class).flatMap(p -> { + template.findAll(PersonWithVersionPropertyOfTypeInteger.class).flatMap(p -> { // Version change person.firstName = "Patryk2"; return template.save(person); - })).expectNextCount(1).verifyComplete(); + }) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); - assertThat(person.version, is(1)); + assertThat(person.version).isOne(); - StepVerifier.create(template.findAll(PersonWithVersionPropertyOfTypeInteger.class)).consumeNextWith(actual -> { + template.findAll(PersonWithVersionPropertyOfTypeInteger.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(actual.version, is(1)); - }).verifyComplete(); + assertThat(actual.version).isOne(); + }).verifyComplete(); // Optimistic lock exception person.version = 0; person.firstName = "Patryk3"; - StepVerifier.create(template.save(person)).expectError(OptimisticLockingFailureException.class).verify(); + template.save(person).as(StepVerifier::create).expectError(OptimisticLockingFailureException.class).verify(); } @Test // DATAMONGO-1444 - public void doesNotFailOnVersionInitForUnversionedEntity() { + void doesNotFailOnVersionInitForUnversionedEntity() { Document dbObject = new Document(); dbObject.put("firstName", "Oliver"); - StepVerifier - .create(template.insert(dbObject, // - template.determineCollectionName(PersonWithVersionPropertyOfTypeInteger.class))) // + template.insert(dbObject, // + template.getCollectionName(PersonWithVersionPropertyOfTypeInteger.class)) // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void removesObjectFromExplicitCollection() { + void removesObjectFromExplicitCollection() { String collectionName = "explicit"; - StepVerifier.create(template.remove(new Query(), collectionName)).expectNextCount(1).verifyComplete(); + template.remove(new Query(), collectionName).as(StepVerifier::create).expectNextCount(1).verifyComplete(); PersonWithConvertedId person = new PersonWithConvertedId(); person.name = "Dave"; - StepVerifier.create(template.save(person, collectionName)).expectNextCount(1).verifyComplete(); - - StepVerifier.create(template.findAll(PersonWithConvertedId.class, collectionName)).expectNextCount(1) + template.save(person, collectionName) // + .as(StepVerifier::create) // + .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(template.remove(person, collectionName)).expectNextCount(1).verifyComplete(); + template.findAll(PersonWithConvertedId.class, collectionName) // + .as(StepVerifier::create) // + .expectNextCount(1).verifyComplete(); - StepVerifier.create(template.findAll(PersonWithConvertedId.class, collectionName)).verifyComplete(); + template.remove(person, collectionName).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.findAll(PersonWithConvertedId.class, collectionName).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void savesMapCorrectly() { + void savesMapCorrectly() { Map map = new HashMap<>(); map.put("key", "value"); - StepVerifier.create(template.save(map, "maps")).expectNextCount(1).verifyComplete(); + template.save(map, "maps") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); } - @Test // DATAMONGO-1444, DATAMONGO-1730 - public void savesMongoPrimitiveObjectCorrectly() { - - StepVerifier.create(template.save(new Object(), "collection")) // - .expectError(MappingException.class) // - .verify(); + @Test + // DATAMONGO-1444, DATAMONGO-1730, DATAMONGO-2150 + void savesMongoPrimitiveObjectCorrectly() { + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save(new Object(), "collection")); } @Test // DATAMONGO-1444 - public void savesPlainDbObjectCorrectly() { + void savesPlainDbObjectCorrectly() { Document dbObject = new Document("foo", "bar"); - StepVerifier.create(template.save(dbObject, "collection")).expectNextCount(1).verifyComplete(); + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - assertThat(dbObject.containsKey("_id"), is(true)); + assertThat(dbObject.containsKey("_id")).isTrue(); } - @Test(expected = MappingException.class) // DATAMONGO-1444, DATAMONGO-1730 - public void rejectsPlainObjectWithOutExplicitCollection() { + @Test // DATAMONGO-1444, DATAMONGO-1730 + void rejectsPlainObjectWithOutExplicitCollection() { Document dbObject = new Document("foo", "bar"); - StepVerifier.create(template.save(dbObject, "collection")).expectNextCount(1).verifyComplete(); + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.findById(dbObject.get("_id"), Document.class)) // - .expectError(MappingException.class) // - .verify(); + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> template.findById(dbObject.get("_id"), Document.class)); } @Test // DATAMONGO-1444 - public void readsPlainDbObjectById() { + void readsPlainDbObjectById() { Document dbObject = new Document("foo", "bar"); - StepVerifier.create(template.save(dbObject, "collection")).expectNextCount(1).verifyComplete(); + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.findById(dbObject.get("_id"), Document.class, "collection")) // + template.findById(dbObject.get("_id"), Document.class, "collection") // + .as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.get("foo"), is(dbObject.get("foo"))); - assertThat(actual.get("_id"), is(dbObject.get("_id"))); + assertThat(actual.get("foo")).isEqualTo(dbObject.get("foo")); + assertThat(actual.get("_id")).isEqualTo(dbObject.get("_id")); }).verifyComplete(); } @Test // DATAMONGO-1444 - public void geoNear() { + void geoNear() { - List venues = Arrays.asList(new Venue("Penn Station", -73.99408, 40.75057), // - new Venue("10gen Office", -73.99171, 40.738868), // - new Venue("Flatiron Building", -73.988135, 40.741404), // - new Venue("Maplewood, NJ", -74.2713, 40.73137)); + List venues = Arrays.asList(TestEntities.geolocation().pennStation(), // + TestEntities.geolocation().tenGenOffice(), // + TestEntities.geolocation().flatironBuilding(), // + TestEntities.geolocation().maplewoodNJ()); - StepVerifier.create(template.insertAll(venues)).expectNextCount(4).verifyComplete(); + template.insertAll(venues) // + .as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); IndexOperationsAdapter.blocking(template.indexOps(Venue.class)) .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2D)); - NearQuery geoFar = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150, Metrics.KILOMETERS); + NearQuery geoFar = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(150, Metrics.KILOMETERS); - StepVerifier.create(template.geoNear(geoFar, Venue.class)) // + template.geoNear(geoFar, Venue.class) // + .as(StepVerifier::create) // .expectNextCount(4) // .verifyComplete(); - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(120, Metrics.KILOMETERS); + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(120, Metrics.KILOMETERS); - StepVerifier.create(template.geoNear(geoNear, Venue.class)) // + template.geoNear(geoNear, Venue.class) // + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void writesPlainString() { + void writesPlainString() { - StepVerifier.create(template.save("{ 'foo' : 'bar' }", "collection")) // + template.save("{ 'foo' : 'bar' }", "collection") // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); } - @Test // DATAMONGO-1444 - public void rejectsNonJsonStringForSave() { - - StepVerifier.create(template.save("Foobar!", "collection")) // - .expectError(MappingException.class) // - .verify(); + @Test // DATAMONGO-1444, DATAMONGO-2150 + void rejectsNonJsonStringForSave() { + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.save("Foobar", "collection")); } @Test // DATAMONGO-1444 - public void initializesVersionOnInsert() { + void initializesVersionOnInsert() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; - StepVerifier.create(template.insert(person)).expectNextCount(1).verifyComplete(); + template.insert(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - assertThat(person.version, is(0)); + assertThat(person.version).isZero(); } @Test // DATAMONGO-1444 - public void initializesVersionOnBatchInsert() { + void initializesVersionOnBatchInsert() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; - StepVerifier.create(template.insertAll(Collections.singleton(person))).expectNextCount(1).verifyComplete(); + template.insertAll(Collections.singleton(person)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(person.version).isZero(); + } + + @Test // DATAMONGO-1992 + void initializesIdAndVersionAndOfImmutableObject() { + + ImmutableVersioned versioned = new ImmutableVersioned(); + + template.insert(versioned) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { - assertThat(person.version, is(0)); + assertThat(actual).isNotSameAs(versioned); + assertThat(versioned.id).isNull(); + assertThat(versioned.version).isNull(); + + assertThat(actual.id).isNotNull(); + assertThat(actual.version).isEqualTo(0); + + }).verifyComplete(); } @Test // DATAMONGO-1444 - public void queryCanBeNull() { + void queryCanBeNull() { - StepVerifier.create(template.findAll(PersonWithIdPropertyOfTypeObjectId.class)).verifyComplete(); + template.findAll(PersonWithIdPropertyOfTypeObjectId.class) // + .as(StepVerifier::create) // + .verifyComplete(); - StepVerifier.create(template.find(null, PersonWithIdPropertyOfTypeObjectId.class)).verifyComplete(); + template.find(null, PersonWithIdPropertyOfTypeObjectId.class) // + .as(StepVerifier::create) // + .verifyComplete(); } @Test // DATAMONGO-1444 - public void versionsObjectIntoDedicatedCollection() { + void versionsObjectIntoDedicatedCollection() { PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; - StepVerifier.create(template.save(person, "personX")).expectNextCount(1).verifyComplete(); - assertThat(person.version, is(0)); + template.save(person, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); - StepVerifier.create(template.save(person, "personX")).expectNextCount(1).verifyComplete(); - assertThat(person.version, is(1)); + template.save(person, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isOne(); } @Test // DATAMONGO-1444 - public void correctlySetsLongVersionProperty() { + void correctlySetsLongVersionProperty() { PersonWithVersionPropertyOfTypeLong person = new PersonWithVersionPropertyOfTypeLong(); person.firstName = "Dave"; - StepVerifier.create(template.save(person, "personX")).expectNextCount(1).verifyComplete(); - assertThat(person.version, is(0L)); + template.save(person, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); } @Test // DATAMONGO-1444 - public void throwsExceptionForIndexViolationIfConfigured() { + void throwsExceptionForIndexViolationIfConfigured() { ReactiveMongoTemplate template = new ReactiveMongoTemplate(factory); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); - StepVerifier - .create(template.indexOps(Person.class) // - .ensureIndex(new Index().on("firstName", Direction.DESC).unique())) // + template.indexOps("unique_person") // + .ensureIndex(new Index().on("firstName", Direction.DESC).unique()) // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); Person person = new Person(new ObjectId(), "Amol"); person.setAge(28); - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person, "unique_person") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); person = new Person(new ObjectId(), "Amol"); person.setAge(28); - StepVerifier.create(template.save(person)).expectError(DataIntegrityViolationException.class).verify(); + template.save(person, "unique_person") // + .as(StepVerifier::create) // + .verifyError(DataIntegrityViolationException.class); + + // safeguard to clean up previous state + template.dropCollection(Person.class).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void preventsDuplicateInsert() { + void preventsDuplicateInsert() { template.setWriteConcern(WriteConcern.MAJORITY); PersonWithVersionPropertyOfTypeInteger person = new PersonWithVersionPropertyOfTypeInteger(); person.firstName = "Dave"; - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); - assertThat(person.version, is(0)); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + assertThat(person.version).isZero(); person.version = null; - StepVerifier.create(template.save(person)).expectError(DuplicateKeyException.class).verify(); + template.save(person) // + .as(StepVerifier::create) // + .verifyError(DuplicateKeyException.class); } @Test // DATAMONGO-1444 - public void countAndFindWithoutTypeInformation() { + void countAndFindWithoutTypeInformation() { Person person = new Person(); - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); Query query = query(where("_id").is(person.getId())); String collectionName = template.getCollectionName(Person.class); - StepVerifier.create(template.find(query, HashMap.class, collectionName)).expectNextCount(1).verifyComplete(); + template.find(query, HashMap.class, collectionName) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.count(query, collectionName)).expectNext(1L).verifyComplete(); + template.count(query, collectionName) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); } @Test // DATAMONGO-1444 - public void nullsPropertiesForVersionObjectUpdates() { + void nullsPropertiesForVersionObjectUpdates() { VersionedPerson person = new VersionedPerson(); person.firstname = "Dave"; person.lastname = "Matthews"; - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - assertThat(person.id, is(notNullValue())); + assertThat(person.id).isNotNull(); person.lastname = null; - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.findOne(query(where("id").is(person.id)), VersionedPerson.class)) // + template.findOne(query(where("id").is(person.id)), VersionedPerson.class) // + .as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.lastname, is(nullValue())); + assertThat(actual.lastname).isNull(); }) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void nullsValuesForUpdatesOfUnversionedEntity() { + void nullsValuesForUpdatesOfUnversionedEntity() { Person person = new Person("Dave"); - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person). // + as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); person.setFirstName(null); - StepVerifier.create(template.save(person)).expectNextCount(1).verifyComplete(); + template.save(person) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.findOne(query(where("id").is(person.getId())), Person.class)) // + template.findOne(query(where("id").is(person.getId())), Person.class) // + .as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.getFirstName(), is(nullValue())); + assertThat(actual.getFirstName()).isNull(); }) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void savesJsonStringCorrectly() { + void savesJsonStringCorrectly() { Document dbObject = new Document().append("first", "first").append("second", "second"); - StepVerifier.create(template.save(dbObject, "collection")).expectNextCount(1).verifyComplete(); + template.save(dbObject, "collection") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); - StepVerifier.create(template.findAll(Document.class, "collection")) // + template.findAll(Document.class, "collection") // + .as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.containsKey("first"), is(true)); + assertThat(actual.containsKey("first")).isTrue(); }) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void executesExistsCorrectly() { + void executesExistsCorrectly() { Sample sample = new Sample(); - StepVerifier.create(template.save(sample)).expectNextCount(1).verifyComplete(); + template.save(sample).as(StepVerifier::create).expectNextCount(1).verifyComplete(); Query query = query(where("id").is(sample.id)); - StepVerifier.create(template.exists(query, Sample.class)).expectNext(true).verifyComplete(); + template.exists(query, Sample.class) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); - StepVerifier.create(template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class))) - .expectNext(true).verifyComplete(); + template.exists(query(where("_id").is(sample.id)), template.getCollectionName(Sample.class)) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); - StepVerifier.create(template.exists(query, Sample.class, template.getCollectionName(Sample.class))).expectNext(true) + template.exists(query, Sample.class, template.getCollectionName(Sample.class)) // + .as(StepVerifier::create).expectNext(true) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void tailStreamsData() throws InterruptedException { + void tailStreamsData() throws InterruptedException { - StepVerifier.create(template.dropCollection("capped") - .then(template.createCollection("capped", // - CollectionOptions.empty().size(1000).maxDocuments(10).capped())) + template.dropCollection("capped").then(template.createCollection("capped", // + CollectionOptions.empty().size(1000).maxDocuments(10).capped())) .then(template.insert(new Document("random", Math.random()).append("key", "value"), // - "capped"))) + "capped")) // + .as(StepVerifier::create) // .expectNextCount(1).verifyComplete(); BlockingQueue documents = new LinkedBlockingQueue<>(1000); @@ -894,20 +1400,20 @@ public void tailStreamsData() throws InterruptedException { Disposable disposable = capped.doOnNext(documents::add).subscribe(); - assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); - assertThat(documents.isEmpty(), is(true)); + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + assertThat(documents).isEmpty(); disposable.dispose(); } @Test // DATAMONGO-1444 - public void tailStreamsDataUntilCancellation() throws InterruptedException { + void tailStreamsDataUntilCancellation() throws InterruptedException { - StepVerifier.create(template.dropCollection("capped") - .then(template.createCollection("capped", // - CollectionOptions.empty().size(1000).maxDocuments(10).capped())) + template.dropCollection("capped").then(template.createCollection("capped", // + CollectionOptions.empty().size(1000).maxDocuments(10).capped())) .then(template.insert(new Document("random", Math.random()).append("key", "value"), // - "capped"))) + "capped")) // + .as(StepVerifier::create) // .expectNextCount(1).verifyComplete(); BlockingQueue documents = new LinkedBlockingQueue<>(1000); @@ -916,49 +1422,53 @@ public void tailStreamsDataUntilCancellation() throws InterruptedException { Disposable disposable = capped.doOnNext(documents::add).subscribe(); - assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); - assertThat(documents.isEmpty(), is(true)); + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + assertThat(documents).isEmpty(); - StepVerifier.create(template.insert(new Document("random", Math.random()).append("key", "value"), "capped")) // + template.insert(new Document("random", Math.random()).append("key", "value"), "capped") // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); disposable.dispose(); - StepVerifier.create(template.insert(new Document("random", Math.random()).append("key", "value"), "capped")) // + template.insert(new Document("random", Math.random()).append("key", "value"), "capped") // + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - assertThat(documents.poll(1, TimeUnit.SECONDS), is(nullValue())); + assertThat(documents.poll(1, TimeUnit.SECONDS)).isNull(); } @Test // DATAMONGO-1761 - public void testDistinct() { + void testDistinct() { Person person1 = new Person("Christoph", 38); Person person2 = new Person("Christine", 39); Person person3 = new Person("Christoph", 37); - StepVerifier.create(template.save(person1)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person2)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person3)).expectNextCount(1).verifyComplete(); + template.insertAll(Arrays.asList(person1, person2, person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); - StepVerifier.create(template.findDistinct("firstName", Person.class, String.class)).expectNextCount(2) + template.findDistinct("firstName", Person.class, String.class) // + .as(StepVerifier::create) // + .expectNextCount(2) // .verifyComplete(); } @Test // DATAMONGO-1803 - public void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedException { - - Assumptions.assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedException { - StepVerifier.create(template.createCollection(Person.class)).expectNextCount(1).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue> documents = new LinkedBlockingQueue<>(100); - Disposable disposable = template - .changeStream(Collections.emptyList(), Document.class, ChangeStreamOptions.empty(), "person") + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Document.class) .doOnNext(documents::add).subscribe(); Thread.sleep(500); // just give it some time to link to the collection. @@ -967,14 +1477,15 @@ public void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedExcep Person person2 = new Person("Data", 39); Person person3 = new Person("MongoDB", 37); - StepVerifier.create(template.save(person1)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person2)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person3)).expectNextCount(1).verifyComplete(); + Flux.merge(template.insert(person1), template.insert(person2), template.insert(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); Thread.sleep(500); // just give it some time to link receive all events try { - Assertions.assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).hasSize(3) + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).hasSize(3) .allMatch(val -> val instanceof Document); } finally { disposable.dispose(); @@ -982,15 +1493,14 @@ public void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedExcep } @Test // DATAMONGO-1803 - public void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedException { - - Assumptions.assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedException { - StepVerifier.create(template.createCollection(Person.class)).expectNextCount(1).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue> documents = new LinkedBlockingQueue<>(100); - Disposable disposable = template - .changeStream(Collections.emptyList(), Person.class, ChangeStreamOptions.empty(), "person") + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) .doOnNext(documents::add).subscribe(); Thread.sleep(500); // just give it some time to link to the collection. @@ -999,14 +1509,15 @@ public void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedExc Person person2 = new Person("Data", 39); Person person3 = new Person("MongoDB", 37); - StepVerifier.create(template.save(person1)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person2)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person3)).expectNextCount(1).verifyComplete(); + Flux.merge(template.insert(person1), template.insert(person2), template.insert(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); Thread.sleep(500); // just give it some time to link receive all events try { - Assertions.assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) .containsExactly(person1, person2, person3); } finally { disposable.dispose(); @@ -1014,15 +1525,16 @@ public void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedExc } @Test // DATAMONGO-1803 - public void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedException { - - Assumptions.assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedException { - StepVerifier.create(template.createCollection(Person.class)).expectNextCount(1).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue> documents = new LinkedBlockingQueue<>(100); - Disposable disposable = template.changeStream(newAggregation(Person.class, match(where("age").gte(38))), - Person.class, ChangeStreamOptions.empty(), "person").doOnNext(documents::add).subscribe(); + Disposable disposable = template.changeStream("person", + ChangeStreamOptions.builder().filter(newAggregation(Person.class, match(where("age").gte(38)))).build(), + Person.class).doOnNext(documents::add).subscribe(); Thread.sleep(500); // just give it some time to link to the collection. @@ -1030,14 +1542,15 @@ public void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedExce Person person2 = new Person("Data", 37); Person person3 = new Person("MongoDB", 39); - StepVerifier.create(template.save(person1)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person2)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person3)).expectNextCount(1).verifyComplete(); + Flux.merge(template.save(person1), template.save(person2), template.save(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); Thread.sleep(500); // just give it some time to link receive all events try { - Assertions.assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) .containsExactly(person1, person3); } finally { disposable.dispose(); @@ -1045,16 +1558,18 @@ public void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedExce } @Test // DATAMONGO-1803 - public void mapsReservedWordsCorrectly() throws InterruptedException { + @EnableIfReplicaSetAvailable + void mapsReservedWordsCorrectly() throws InterruptedException { - Assumptions.assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); - - StepVerifier.create(template.createCollection(Person.class)).expectNextCount(1).verifyComplete(); + template.dropCollection(Person.class).onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue> documents = new LinkedBlockingQueue<>(100); Disposable disposable = template - .changeStream(newAggregation(Person.class, match(where("operationType").is("replace"))), Person.class, - ChangeStreamOptions.empty(), "person") + .changeStream("person", + ChangeStreamOptions.builder() + .filter(newAggregation(Person.class, match(where("operationType").is("replace")))).build(), + Person.class) .doOnNext(documents::add).subscribe(); Thread.sleep(500); // just give it some time to link to the collection. @@ -1062,18 +1577,23 @@ public void mapsReservedWordsCorrectly() throws InterruptedException { Person person1 = new Person("Spring", 38); Person person2 = new Person("Data", 37); - StepVerifier.create(template.save(person1)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person2)).expectNextCount(1).verifyComplete(); + Flux.merge(template.insert(person1), template.insert(person2)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); Person replacement = new Person(person2.getId(), "BDognoM"); replacement.setAge(person2.getAge()); - StepVerifier.create(template.save(replacement)).expectNextCount(1).verifyComplete(); + template.save(replacement) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); Thread.sleep(500); // just give it some time to link receive all events try { - Assertions.assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) .containsExactly(replacement); } finally { disposable.dispose(); @@ -1081,15 +1601,14 @@ public void mapsReservedWordsCorrectly() throws InterruptedException { } @Test // DATAMONGO-1803 - public void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedException { + @Disabled("Heavily relying on timing assumptions; Cannot test message resumption properly; Too much race for too little time in between.") + @EnableIfReplicaSetAvailable + void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedException { - Assumptions.assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); - - StepVerifier.create(template.createCollection(Person.class)).expectNextCount(1).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue> documents = new LinkedBlockingQueue<>(100); - Disposable disposable = template - .changeStream(Collections.emptyList(), Person.class, ChangeStreamOptions.empty(), "person") + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) .doOnNext(documents::add).subscribe(); Thread.sleep(500); // just give it some time to link to the collection. @@ -1098,9 +1617,10 @@ public void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedExcep Person person2 = new Person("Data", 37); Person person3 = new Person("MongoDB", 39); - StepVerifier.create(template.save(person1)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person2)).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.save(person3)).expectNextCount(1).verifyComplete(); + Flux.merge(template.insert(person1), template.insert(person2), template.insert(person3)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); Thread.sleep(500); // just give it some time to link receive all events @@ -1109,15 +1629,13 @@ public void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedExcep BsonDocument resumeToken = documents.take().getRaw().getResumeToken(); BlockingQueue> resumeDocuments = new LinkedBlockingQueue<>(100); - template - .changeStream(Collections.emptyList(), Person.class, - ChangeStreamOptions.builder().resumeToken(resumeToken).build(), "person") + template.changeStream("person", ChangeStreamOptions.builder().resumeToken(resumeToken).build(), Person.class) .doOnNext(resumeDocuments::add).subscribe(); Thread.sleep(500); // just give it some time to link receive all events try { - Assertions.assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) .containsExactly(person2, person3); } finally { disposable.dispose(); @@ -1126,34 +1644,221 @@ public void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedExcep } @Test // DATAMONGO-1870 - public void removeShouldConsiderLimit() { + void removeShouldConsiderLimit() { List samples = IntStream.range(0, 100) // .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // .collect(Collectors.toList()); - StepVerifier.create(template.insertAll(samples)).expectNextCount(100).verifyComplete(); + template.insertAll(samples) // + .as(StepVerifier::create) // + .expectNextCount(100) // + .verifyComplete(); - StepVerifier.create(template.remove(query(where("field").is("lannister")).limit(25), Sample.class)) - .assertNext(wr -> Assertions.assertThat(wr.getDeletedCount()).isEqualTo(25L)).verifyComplete(); + template.remove(query(where("field").is("lannister")).limit(25), Sample.class) // + .as(StepVerifier::create) // + .assertNext(wr -> assertThat(wr.getDeletedCount()).isEqualTo(25L)).verifyComplete(); } @Test // DATAMONGO-1870 - public void removeShouldConsiderSkipAndSort() { + void removeShouldConsiderSkipAndSort() { List samples = IntStream.range(0, 100) // .mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) // .collect(Collectors.toList()); - StepVerifier.create(template.insertAll(samples)).expectNextCount(100).verifyComplete(); + template.insertAll(samples).as(StepVerifier::create).expectNextCount(100).verifyComplete(); + + template.remove(new Query().skip(25).with(Sort.by("field")), Sample.class) // + .as(StepVerifier::create) // + .assertNext(wr -> assertThat(wr.getDeletedCount()).isEqualTo(75L)).verifyComplete(); + + template.count(query(where("field").is("lannister")), Sample.class).as(StepVerifier::create).expectNext(25L) + .verifyComplete(); + template.count(query(where("field").is("stark")), Sample.class).as(StepVerifier::create).expectNext(0L) + .verifyComplete(); + } + + @Test // DATAMONGO-2189 + void afterSaveEventContainsSavedObjectUsingInsert() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insert(source) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(saved.get()).isNotNull().isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + } + + @Test // DATAMONGO-2189 + void afterSaveEventContainsSavedObjectUsingInsertAll() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.insertAll(Collections.singleton(new ImmutableVersioned())) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(saved.get()).isNotNull().isNotSameAs(source); + assertThat(saved.get().id).isNotNull(); + } + + @Test // GH-4107 + void afterSaveEventCanBeDisabled() { + + AtomicReference saved = createAfterSaveReference(); + ImmutableVersioned source = new ImmutableVersioned(); + + template.setEntityLifecycleEventsEnabled(false); + template.insertAll(Collections.singleton(new ImmutableVersioned())) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + assertThat(saved).hasValue(null); + } + + @Test // DATAMONGO-2012 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @EnableIfReplicaSetAvailable + void watchesDatabaseCorrectly() throws InterruptedException { + + template.dropCollection(Person.class).onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.dropCollection("personX").onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + template.createCollection("personX").as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream(ChangeStreamOptions.empty(), Person.class).doOnNext(documents::add) + .subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + template.save(person1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + template.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + template.save(person3, "personX") // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person1, person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-2012, DATAMONGO-2113 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @EnableIfReplicaSetAvailable + void resumesAtTimestampCorrectly() throws InterruptedException { + + template.dropCollection(Person.class).onErrorResume(it -> Mono.empty()).as(StepVerifier::create).verifyComplete(); + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) + .doOnNext(documents::add).subscribe(); + + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + template.save(person1).delayElement(Duration.ofSeconds(1)) // + .as(StepVerifier::create) // + .expectNextCount(1) // + + .verifyComplete(); + template.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Thread.sleep(500); // just give it some time to link receive all events + + disposable.dispose(); + + documents.take(); // skip first + Instant resumeAt = documents.take().getTimestamp(); // take 2nd + + template.save(person3).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + BlockingQueue> resumeDocuments = new LinkedBlockingQueue<>(100); + template.changeStream("person", ChangeStreamOptions.builder().resumeAt(resumeAt).build(), Person.class) + .doOnNext(resumeDocuments::add).subscribe(); + + Thread.sleep(500); // just give it some time to link receive all events + + try { + assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())) + .containsExactly(person2, person3); + } finally { + disposable.dispose(); + } + } + + @Test // DATAMONGO-2115 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @EnableIfReplicaSetAvailable + void resumesAtBsonTimestampCorrectly() throws InterruptedException { + + template.createCollection(Person.class).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(template.remove(new Query().skip(25).with(Sort.by("field")), Sample.class)) - .assertNext(wr -> Assertions.assertThat(wr.getDeletedCount()).isEqualTo(75L)).verifyComplete(); + BlockingQueue> documents = new LinkedBlockingQueue<>(100); + Disposable disposable = template.changeStream("person", ChangeStreamOptions.empty(), Person.class) + .doOnNext(documents::add).subscribe(); - StepVerifier.create(template.count(query(where("field").is("lannister")), Sample.class)).expectNext(25L) + Thread.sleep(500); // just give it some time to link to the collection. + + Person person1 = new Person("Spring", 38); + Person person2 = new Person("Data", 37); + Person person3 = new Person("MongoDB", 39); + + template.save(person1).delayElement(Duration.ofSeconds(1)) // + .as(StepVerifier::create) // + .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(template.count(query(where("field").is("stark")), Sample.class)).expectNext(0L) + template.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // .verifyComplete(); + + documents.take(); // skip first + BsonTimestamp resumeAt = documents.take().getBsonTimestamp(); // take 2nd + + disposable.dispose(); + + template.save(person3).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.changeStream("person", ChangeStreamOptions.builder().resumeAt(resumeAt).build(), Person.class) + .map(ChangeStreamEvent::getBody) // + .buffer(2) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).containsExactly(person2, person3); + }).thenCancel() // + .verify(); } private PersonWithAList createPersonWithAList(String firstname, int age) { @@ -1165,17 +1870,204 @@ private PersonWithAList createPersonWithAList(String firstname, int age) { return p; } - @Data + private AtomicReference createAfterSaveReference() { + + AtomicReference saved = new AtomicReference<>(); + context.addApplicationListener(new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + saved.set(event.getSource()); + } + }); + + return saved; + } + + static class ImmutableVersioned { + + final @Id String id; + final @Version Long version; + + ImmutableVersioned() { + id = null; + version = null; + } + + public ImmutableVersioned(String id, Long version) { + this.id = id; + this.version = version; + } + + public ImmutableVersioned withId(String id) { + return this.id == id ? this : new ImmutableVersioned(id, this.version); + } + + public ImmutableVersioned withVersion(Long version) { + return this.version == version ? this : new ImmutableVersioned(this.id, version); + } + } + static class Sample { @Id String id; String field; - public Sample() {} + Sample() {} - public Sample(String id, String field) { + Sample(String id, String field) { this.id = id; this.field = field; } + + public String getId() { + return this.id; + } + + public String getField() { + return this.field; + } + + public void setId(String id) { + this.id = id; + } + + public void setField(String field) { + this.field = field; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sample sample = (Sample) o; + return Objects.equals(id, sample.id) && Objects.equals(field, sample.field); + } + + @Override + public int hashCode() { + return Objects.hash(id, field); + } + + public String toString() { + return "ReactiveMongoTemplateTests.Sample(id=" + this.getId() + ", field=" + this.getField() + ")"; + } + } + + public static class MyPerson { + + String id; + String name; + Address address; + + public MyPerson() {} + + MyPerson(String name) { + this.name = name; + } + + public MyPerson(String id, String name, Address address) { + this.id = id; + this.name = name; + this.address = address; + } + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public Address getAddress() { + return this.address; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setAddress(Address address) { + this.address = address; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MyPerson myPerson = (MyPerson) o; + return Objects.equals(id, myPerson.id) && Objects.equals(name, myPerson.name) + && Objects.equals(address, myPerson.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, address); + } + + public String toString() { + return "ReactiveMongoTemplateTests.MyPerson(id=" + this.getId() + ", name=" + this.getName() + ", address=" + + this.getAddress() + ")"; + } + } + + interface MyPersonProjection { + String getName(); + } + + static class RawStringId { + + @MongoId String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RawStringId that = (RawStringId) o; + return Objects.equals(id, that.id) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "ReactiveMongoTemplateTests.RawStringId(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTransactionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTransactionTests.java new file mode 100644 index 0000000000..5a7271e2b4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTransactionTests.java @@ -0,0 +1,317 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.ReactiveMongoTransactionManager; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.transaction.ReactiveTransaction; +import org.springframework.transaction.reactive.TransactionCallback; +import org.springframework.transaction.reactive.TransactionalOperator; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for Mongo Transactions using {@link ReactiveMongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + * @currentRead The Core - Peter V. Brett + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +public class ReactiveMongoTemplateTransactionTests { + + static final String DATABASE_NAME = "reactive-template-tx-tests"; + static final String COLLECTION_NAME = "test"; + static final Document DOCUMENT = new Document("_id", "id-1").append("value", "spring"); + static final Query ID_QUERY = query(where("_id").is("id-1")); + + static final Person AHMANN = new Person("ahmann", 32); + static final Person ARLEN = new Person("arlen", 24); + static final Person LEESHA = new Person("leesha", 22); + static final Person RENNA = new Person("renna", 22); + + static @Client MongoClient client; + ReactiveMongoTemplate template; + + @BeforeEach + public void setUp() { + + template = new ReactiveMongoTemplate(client, DATABASE_NAME); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, COLLECTION_NAME, client).as(StepVerifier::create) // + .verifyComplete(); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, "person", client).as(StepVerifier::create).verifyComplete(); + + MongoTestUtils.createOrReplaceCollection(DATABASE_NAME, "personWithVersionPropertyOfTypeInteger", client) + .as(StepVerifier::create) // + .verifyComplete(); + + template.insert(DOCUMENT, COLLECTION_NAME).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.insertAll(Arrays.asList(AHMANN, ARLEN, LEESHA, RENNA)) // + .as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void reactiveTransactionWithExplicitTransactionStart() { + + Publisher sessionPublisher = client + .startSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + + ClientSession clientSession = Mono.from(sessionPublisher).block(); + + template.withSession(Mono.just(clientSession)) + .execute(action -> ReactiveMongoContext.getSession().flatMap(session -> { + + session.startTransaction(); + return action.remove(ID_QUERY, Document.class, COLLECTION_NAME); + + })).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + assertThat(clientSession.hasActiveTransaction()).isTrue(); + StepVerifier.create(clientSession.commitTransaction()).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void reactiveTransactionsCommitOnComplete() { + + initTx().transactional(template.remove(ID_QUERY, Document.class, COLLECTION_NAME)).as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void reactiveTransactionsAbortOnError() { + + initTx().transactional( + template.remove(ID_QUERY, Document.class, COLLECTION_NAME).flatMap(result -> Mono.fromSupplier(() -> { + throw new RuntimeException("¯\\_(ツ)_/¯"); + }))).as(StepVerifier::create) // + .expectError() // + .verify(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void withSessionDoesNotManageTransactions() { + + Mono.from(client.startSession()).flatMap(session -> { + + session.startTransaction(); + return template.withSession(session).remove(ID_QUERY, Document.class, COLLECTION_NAME); + }).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void changesNotVisibleOutsideTransaction() { + + initTx().execute(new TransactionCallback<>() { + @Override + public Publisher doInTransaction(ReactiveTransaction status) { + return template.remove(ID_QUERY, Document.class, COLLECTION_NAME).flatMapMany(val -> { + + // once we use the collection directly we're no longer participating in the tx + return client.getDatabase(DATABASE_NAME).getCollection(COLLECTION_NAME).find(ID_QUERY.getQueryObject()) + .first(); + }); + } + }).as(StepVerifier::create).expectNext(DOCUMENT).verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void executeCreatesNewTransaction() { + + ReactiveSessionScoped sessionScoped = template.withSession(client.startSession()); + + sessionScoped.execute(action -> { + return action.remove(ID_QUERY, Document.class, COLLECTION_NAME); + }) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + + sessionScoped.execute(action -> { + return action.insert(DOCUMENT, COLLECTION_NAME); + }) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + template.exists(ID_QUERY, COLLECTION_NAME) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void takeDoesNotAbortTransaction() { + + initTx() + .transactional(template.find(query(where("age").exists(true)).with(Sort.by("age")), Person.class).take(3) + .flatMap(template::remove)) // + .as(StepVerifier::create) // + .expectNextCount(3) // + .verifyComplete(); + + template.count(query(where("age").exists(true)), Person.class) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1970 + public void errorInFlowOutsideTransactionDoesNotAbortIt() { + + initTx().execute(new TransactionCallback<>() { + @Override + public Publisher doInTransaction(ReactiveTransaction status) { + return template.find(query(where("age").is(22)).with(Sort.by("age")), Person.class).buffer(2) + .flatMap(values -> { + + return template + .remove(query(where("id").in(values.stream().map(Person::getId).collect(Collectors.toList()))), + Person.class) + .then(Mono.just(values)); + }); + } + }).collectList() // completes the above computation + .flatMap(deleted -> { + throw new RuntimeException("error outside the transaction does not influence it."); + }).as(StepVerifier::create) // + .verifyError(); + + template.count(query(where("age").exists(true)), Person.class) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteWithMatchingVersion() { + + PersonWithVersionPropertyOfTypeInteger rojer = new PersonWithVersionPropertyOfTypeInteger(); + rojer.firstName = "rojer"; + + PersonWithVersionPropertyOfTypeInteger saved = template.insert(rojer).block(); + + initTx().transactional(template.remove(saved)) // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getDeletedCount()).isOne()) // + .verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteWithVersionMismatch() { + + PersonWithVersionPropertyOfTypeInteger rojer = new PersonWithVersionPropertyOfTypeInteger(); + rojer.firstName = "rojer"; + + PersonWithVersionPropertyOfTypeInteger saved = template.insert(rojer).block(); + saved.version = 5; + + initTx().transactional(template.remove(saved)) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + + assertThat(actual.wasAcknowledged()).isTrue(); + assertThat(actual.getDeletedCount()).isZero(); + }).verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteNonExistingWithVersion() { + + PersonWithVersionPropertyOfTypeInteger rojer = new PersonWithVersionPropertyOfTypeInteger(); + rojer.id = "deceased"; + rojer.firstName = "rojer"; + rojer.version = 5; + + initTx().transactional(template.remove(rojer)) // + .as(StepVerifier::create) // + .consumeNextWith(result -> assertThat(result.getDeletedCount()).isZero()) // + .verifyComplete(); + } + + TransactionalOperator initTx() { + + ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory()); + return TransactionalOperator.create(txmgr, new DefaultTransactionDefinition()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index ff3a311794..f89b2fa8c1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,45 +15,111 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.any; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import lombok.Data; +import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; - +import reactor.test.StepVerifier; + +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; + +import org.assertj.core.api.Assertions; +import org.bson.BsonDocument; +import org.bson.BsonString; import org.bson.Document; import org.bson.conversions.Bson; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationListener; +import org.springframework.context.support.StaticApplicationContext; import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoTemplateUnitTests.AutogenerateableId; -import org.springframework.data.mongodb.core.ReactiveMongoTemplate.NoOpDbRefResolver; -import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators.Gte; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; +import org.springframework.data.mongodb.core.aggregation.Fields; +import org.springframework.data.mongodb.core.aggregation.SetOperation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; - +import org.springframework.util.CollectionUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; import com.mongodb.client.model.DeleteOptions; import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.TimeSeriesGranularity; import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.InsertManyResult; +import com.mongodb.client.result.InsertOneResult; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ChangeStreamPublisher; +import com.mongodb.reactivestreams.client.DistinctPublisher; import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; import com.mongodb.reactivestreams.client.MongoClient; import com.mongodb.reactivestreams.client.MongoCollection; import com.mongodb.reactivestreams.client.MongoDatabase; @@ -63,62 +129,141 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Roman Puchkovskiy + * @author Mathieu Ouellet + * @author Yadhukrishna S Pai + * @author Ben Foster */ -@RunWith(MockitoJUnitRunner.Silent.class) +@SuppressWarnings({ "unchecked", "rawtypes" }) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class ReactiveMongoTemplateUnitTests { - ReactiveMongoTemplate template; + private ReactiveMongoTemplate template; @Mock SimpleReactiveMongoDatabaseFactory factory; @Mock MongoClient mongoClient; @Mock MongoDatabase db; @Mock MongoCollection collection; @Mock FindPublisher findPublisher; + @Mock AggregatePublisher aggregatePublisher; @Mock Publisher runCommandPublisher; + @Mock Publisher updateResultPublisher; + @Mock Publisher findAndUpdatePublisher; + @Mock Publisher successPublisher; + @Mock DistinctPublisher distinctPublisher; + @Mock Publisher deletePublisher; + @Mock MapReducePublisher mapReducePublisher; + @Mock ChangeStreamPublisher changeStreamPublisher; - MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; - @Before - public void setUp() { + @BeforeEach + void beforeEach() { when(factory.getExceptionTranslator()).thenReturn(exceptionTranslator); - when(factory.getMongoDatabase()).thenReturn(db); + when(factory.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + when(factory.getMongoDatabase()).thenReturn(Mono.just(db)); when(db.getCollection(any())).thenReturn(collection); when(db.getCollection(any(), any())).thenReturn(collection); when(db.runCommand(any(), any(Class.class))).thenReturn(runCommandPublisher); - when(collection.find()).thenReturn(findPublisher); - when(collection.find(Mockito.any(Document.class))).thenReturn(findPublisher); + when(db.createCollection(any(), any(CreateCollectionOptions.class))).thenReturn(runCommandPublisher); + when(collection.withReadPreference(any())).thenReturn(collection); + when(collection.withReadConcern(any())).thenReturn(collection); + when(collection.find(any(Class.class))).thenReturn(findPublisher); + when(collection.find(any(Document.class), any(Class.class))).thenReturn(findPublisher); + when(collection.aggregate(anyList())).thenReturn(aggregatePublisher); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(aggregatePublisher); + when(collection.countDocuments(any(), any(CountOptions.class))).thenReturn(Mono.just(0L)); + when(collection.estimatedDocumentCount(any())).thenReturn(Mono.just(0L)); + when(collection.updateOne(any(), any(Bson.class), any(UpdateOptions.class))).thenReturn(updateResultPublisher); + when(collection.updateMany(any(Bson.class), any(Bson.class), any())).thenReturn(updateResultPublisher); + when(collection.updateOne(any(), anyList(), any())).thenReturn(updateResultPublisher); + when(collection.updateMany(any(), anyList(), any())).thenReturn(updateResultPublisher); + when(collection.findOneAndUpdate(any(), any(Bson.class), any(FindOneAndUpdateOptions.class))) + .thenReturn(findAndUpdatePublisher); + when(collection.findOneAndReplace(any(Bson.class), any(), any())).thenReturn(findPublisher); + when(collection.findOneAndDelete(any(), any(FindOneAndDeleteOptions.class))).thenReturn(findPublisher); + when(collection.distinct(anyString(), any(Document.class), any())).thenReturn(distinctPublisher); + when(collection.deleteMany(any(Bson.class), any())).thenReturn(deletePublisher); + when(collection.findOneAndUpdate(any(), any(Bson.class), any(FindOneAndUpdateOptions.class))) + .thenReturn(findAndUpdatePublisher); + when(collection.mapReduce(anyString(), anyString(), any())).thenReturn(mapReducePublisher); + when(collection.replaceOne(any(Bson.class), any(), any(ReplaceOptions.class))).thenReturn(updateResultPublisher); + when(collection.insertOne(any(Bson.class))).thenReturn(successPublisher); + when(collection.insertMany(anyList())).thenReturn(successPublisher); when(findPublisher.projection(any())).thenReturn(findPublisher); when(findPublisher.limit(anyInt())).thenReturn(findPublisher); when(findPublisher.collation(any())).thenReturn(findPublisher); when(findPublisher.first()).thenReturn(findPublisher); + when(findPublisher.allowDiskUse(anyBoolean())).thenReturn(findPublisher); + when(aggregatePublisher.allowDiskUse(anyBoolean())).thenReturn(aggregatePublisher); + when(aggregatePublisher.collation(any())).thenReturn(aggregatePublisher); + when(aggregatePublisher.maxTime(anyLong(), any())).thenReturn(aggregatePublisher); + when(aggregatePublisher.first()).thenReturn(findPublisher); this.mappingContext = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new NoOpDbRefResolver(), mappingContext); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); this.template = new ReactiveMongoTemplate(factory, converter); - } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1444 - public void rejectsNullDatabaseName() throws Exception { - new ReactiveMongoTemplate(mongoClient, null); + @Test // DATAMONGO-1444 + void rejectsNullDatabaseName() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReactiveMongoTemplate(mongoClient, null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1444 - public void rejectsNullMongo() throws Exception { - new ReactiveMongoTemplate(null, "database"); + @Test // DATAMONGO-1444 + void rejectsNullMongo() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReactiveMongoTemplate(null, "database")); } @Test // DATAMONGO-1444 - public void defaultsConverterToMappingMongoConverter() throws Exception { + void defaultsConverterToMappingMongoConverter() throws Exception { ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "database"); - assertTrue(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter); + assertThat(ReflectionTestUtils.getField(template, "mongoConverter") instanceof MappingMongoConverter).isTrue(); + } + + @Test // DATAMONGO-1912 + void autogeneratesIdForMap() { + + ReactiveMongoTemplate template = spy(this.template); + doReturn(Mono.just(new ObjectId())).when(template).saveDocument(any(String.class), any(Document.class), + any(Class.class)); + + Map entity = new LinkedHashMap<>(); + template.save(entity, "foo").as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(entity).containsKey("_id"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1311 + void executeQueryShouldUseBatchSizeWhenPresent() { + + when(findPublisher.batchSize(anyInt())).thenReturn(findPublisher); + + Query query = new Query().cursorBatchSize(1234); + template.find(query, Person.class).subscribe(); + + verify(findPublisher).batchSize(1234); + } + + @Test // DATAMONGO-2659 + void executeQueryShouldUseAllowDiskSizeWhenPresent() { + + when(findPublisher.batchSize(anyInt())).thenReturn(findPublisher); + + Query query = new Query().allowDiskUse(true); + template.find(query, Person.class).subscribe(); + + verify(findPublisher).allowDiskUse(true); } @Test // DATAMONGO-1518 - public void findShouldUseCollationWhenPresent() { + void findShouldUseCollationWhenPresent() { template.find(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); @@ -127,7 +272,7 @@ public void findShouldUseCollationWhenPresent() { // @Test // DATAMONGO-1518 - public void findOneShouldUseCollationWhenPresent() { + void findOneShouldUseCollationWhenPresent() { template.findOne(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); @@ -135,7 +280,7 @@ public void findOneShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void existsShouldUseCollationWhenPresent() { + void existsShouldUseCollationWhenPresent() { template.exists(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); @@ -143,34 +288,34 @@ public void existsShouldUseCollationWhenPresent() { } @Test // DATAMONGO-1518 - public void findAndModfiyShoudUseCollationWhenPresent() { + void findAndModfiyShoudUseCollationWhenPresent() { - when(collection.findOneAndUpdate(any(Bson.class), any(), any())).thenReturn(Mono.empty()); + when(collection.findOneAndUpdate(any(Bson.class), any(Bson.class), any())).thenReturn(Mono.empty()); template.findAndModify(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class) .subscribe(); ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); - verify(collection).findOneAndUpdate(Mockito.any(), Mockito.any(), options.capture()); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void findAndRemoveShouldUseCollationWhenPresent() { + void findAndRemoveShouldUseCollationWhenPresent() { when(collection.findOneAndDelete(any(Bson.class), any())).thenReturn(Mono.empty()); template.findAndRemove(new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class).subscribe(); ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); - verify(collection).findOneAndDelete(Mockito.any(), options.capture()); + verify(collection).findOneAndDelete(any(), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void findAndRemoveManyShouldUseCollationWhenPresent() { + void findAndRemoveManyShouldUseCollationWhenPresent() { when(collection.deleteMany(any(Bson.class), any())).thenReturn(Mono.empty()); @@ -178,175 +323,1733 @@ public void findAndRemoveManyShouldUseCollationWhenPresent() { .subscribe(); ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); - verify(collection).deleteMany(Mockito.any(), options.capture()); + verify(collection).deleteMany(any(), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void updateOneShouldUseCollationWhenPresent() { + void updateOneShouldUseCollationWhenPresent() { - when(collection.updateOne(any(Bson.class), any(), any())).thenReturn(Mono.empty()); + when(collection.updateOne(any(Bson.class), any(Bson.class), any())).thenReturn(Mono.empty()); template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), AutogenerateableId.class).subscribe(); ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); - verify(collection).updateOne(Mockito.any(), Mockito.any(), options.capture()); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } @Test // DATAMONGO-1518 - public void updateManyShouldUseCollationWhenPresent() { + void updateManyShouldUseCollationWhenPresent() { - when(collection.updateMany(any(Bson.class), any(), any())).thenReturn(Mono.empty()); + when(collection.updateMany(any(Bson.class), any(Bson.class), any())).thenReturn(Mono.empty()); template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), new Update().set("foo", "bar"), AutogenerateableId.class).subscribe(); ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); - verify(collection).updateMany(Mockito.any(), Mockito.any(), options.capture()); - - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } - @Test // DATAMONGO-1518 - public void replaceOneShouldUseCollationWhenPresent() { + @Test // GH-3218 + void updateUsesHintStringFromQuery() { - when(collection.replaceOne(any(Bson.class), any(), any())).thenReturn(Mono.empty()); + template.updateFirst(new Query().withHint("index-1"), new Update().set("spring", "data"), Person.class).subscribe(); - template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class) + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-1"); + } + + @Test // GH-3218 + void updateUsesHintDocumentFromQuery() { + + template.updateFirst(new Query().withHint("{ firstname : 1 }"), new Update().set("spring", "data"), Person.class) .subscribe(); ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); - verify(collection).replaceOne(Mockito.any(Bson.class), Mockito.any(), options.capture()); + verify(collection).updateOne(any(Bson.class), any(Bson.class), options.capture()); - assertThat(options.getValue().getCollation().getLocale(), is("fr")); + assertThat(options.getValue().getHint()).isEqualTo(new Document("firstname", 1)); } - @Ignore("currently no aggregation") @Test // DATAMONGO-1518 - public void aggregateShouldUseCollationWhenPresent() { + void replaceOneShouldUseCollationWhenPresent() { - Aggregation aggregation = newAggregation(project("id")) - .withOptions(newAggregationOptions().collation(Collation.of("fr")).build()); - // template.aggregate(aggregation, AutogenerateableId.class, Document.class).subscribe(); + when(collection.replaceOne(any(Bson.class), any(), any(ReplaceOptions.class))).thenReturn(Mono.empty()); - ArgumentCaptor cmd = ArgumentCaptor.forClass(Document.class); - verify(db).runCommand(cmd.capture(), Mockito.any(Class.class)); + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), new Update(), AutogenerateableId.class) + .subscribe(); - assertThat(cmd.getValue().get("collation", Document.class), equalTo(new Document("locale", "fr"))); + ArgumentCaptor options = ArgumentCaptor.forClass(ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); } - @Ignore("currently no mapReduce") - @Test // DATAMONGO-1518 - public void mapReduceShouldUseCollationWhenPresent() { + @Test // DATAMONGO-1518, DATAMONGO-2257 + void mapReduceShouldUseCollationWhenPresent() { - // template.mapReduce("", "", "", MapReduceOptions.options().collation(Collation.of("fr")), - // AutogenerateableId.class).subscribe(); - // - // verify(mapReduceIterable).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + template.mapReduce(new BasicQuery("{}"), AutogenerateableId.class, AutogenerateableId.class, "", "", + MapReduceOptions.options().collation(Collation.of("fr"))).subscribe(); + + verify(mapReducePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); } - @Test // DATAMONGO-1518 - public void geoNearShouldUseCollationWhenPresent() { + @Test // DATAMONGO-1518, DATAMONGO-2264 + void geoNearShouldUseCollationWhenPresent() { NearQuery query = NearQuery.near(0D, 0D).query(new BasicQuery("{}").collation(Collation.of("fr"))); template.geoNear(query, AutogenerateableId.class).subscribe(); - ArgumentCaptor cmd = ArgumentCaptor.forClass(Document.class); - verify(db).runCommand(cmd.capture(), Mockito.any(Class.class)); + verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // GH-4277 + void geoNearShouldHonorReadPreferenceFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadPreference(ReadPreference.secondary()); + + template.geoNear(query, Wrapper.class).subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.secondary())); + } + + @Test // GH-4277 + void geoNearShouldHonorReadConcernFromQuery() { + + NearQuery query = NearQuery.near(new Point(1, 1)); + query.withReadConcern(ReadConcern.SNAPSHOT); - assertThat(cmd.getValue().get("collation", Document.class), equalTo(new Document("locale", "fr"))); + template.geoNear(query, Wrapper.class).subscribe(); + + verify(collection).withReadConcern(eq(ReadConcern.SNAPSHOT)); } @Test // DATAMONGO-1719 - public void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { + void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { - template.doFind("star-wars", new Document(), new Document(), Person.class, PersonProjection.class, null) - .subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("firstname", 1))); } @Test // DATAMONGO-1719 - public void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { + void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { - template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, PersonProjection.class, null) - .subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class, + PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("bar", 1))); } @Test // DATAMONGO-1719 - public void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { + void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { - template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class, null) - .subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + PersonSpELProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher, never()).projection(any()); } - @Test // DATAMONGO-1719 - public void doesNotApplyFieldsToDtoProjection() { + @Test // DATAMONGO-1719, DATAMONGO-2041 + void appliesFieldsToDtoProjection() { - template.doFind("star-wars", new Document(), new Document(), Person.class, Jedi.class, null).subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); - verify(findPublisher, never()).projection(any()); + verify(findPublisher).projection(eq(new Document("firstname", 1))); } @Test // DATAMONGO-1719 - public void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { + void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { - template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, Jedi.class, null).subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class, + Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("bar", 1))); } @Test // DATAMONGO-1719 - public void doesNotApplyFieldsWhenTargetIsNotAProjection() { + void doesNotApplyFieldsWhenTargetIsNotAProjection() { - template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class, null).subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + Person.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher, never()).projection(any()); } @Test // DATAMONGO-1719 - public void doesNotApplyFieldsWhenTargetExtendsDomainType() { + void doesNotApplyFieldsWhenTargetExtendsDomainType() { - template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class, null).subscribe(); + template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, + PersonExtended.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher, never()).projection(any()); } - @Data - @org.springframework.data.mongodb.core.mapping.Document(collection = "star-wars") - static class Person { + @Test // DATAMONGO-1783 + void countShouldUseSkipFromQuery() { - @Id String id; - String firstname; + template.count(new Query().skip(10), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getSkip()).isEqualTo(10); } - static class PersonExtended extends Person { + @Test // DATAMONGO-1783 + void countShouldUseLimitFromQuery() { - String lastname; + template.count(new Query().limit(100), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getLimit()).isEqualTo(100); } - interface PersonProjection { - String getFirstname(); + @Test // DATAMONGO-2360 + void countShouldApplyQueryHintIfPresent() { + + Document queryHint = new Document("age", 1); + template.count(new Query().withHint(queryHint), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHint()).isEqualTo(queryHint); } - public interface PersonSpELProjection { + @Test // DATAMONGO-2365 + void countShouldApplyQueryHintAsIndexNameIfPresent() { - @Value("#{target.firstname}") - String getName(); + template.count(new Query().withHint("idx-1"), Person.class, "star-wars").subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("idx-1"); } - @Data - static class Jedi { + @Test // DATAMONGO-2215 + void updateShouldApplyArrayFilters() { - @Field("firstname") String name; + template.updateFirst(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-2215 + void findAndModifyShouldApplyArrayFilters() { + + template.findAndModify(new BasicQuery("{}"), + new Update().set("grades.$[element]", 100).filterArray(Criteria.where("element").gte(100)), + EntityWithListOfSimple.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + Assertions.assertThat((List) options.getValue().getArrayFilters()) + .contains(new org.bson.Document("element", new Document("$gte", 100))); + } + + @Test // DATAMONGO-1854 + void findShouldNotUseCollationWhenNoDefaultPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Jedi.class).subscribe(); + + verify(findPublisher, never()).collation(any()); + } + + @Test // DATAMONGO-1854 + void findShouldUseDefaultCollationWhenPresent() { + + template.find(new BasicQuery("{'foo' : 'bar'}"), Sith.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findOneShouldUseDefaultCollationWhenPresent() { + + template.findOne(new BasicQuery("{'foo' : 'bar'}"), Sith.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void existsShouldUseDefaultCollationWhenPresent() { + + template.exists(new BasicQuery("{}"), Sith.class).subscribe(); + + verify(findPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findAndModfiyShoudUseDefaultCollationWhenPresent() { + + template.findAndModify(new BasicQuery("{}"), new Update(), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndUpdateOptions.class); + verify(collection).findOneAndUpdate(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void findAndRemoveShouldUseDefaultCollationWhenPresent() { + + template.findAndRemove(new BasicQuery("{}"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndDeleteOptions.class); + verify(collection).findOneAndDelete(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldNotCollationIfNotPresent() { + + template.createCollection(AutogenerateableId.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + Assertions.assertThat(options.getValue().getCollation()).isNull(); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldApplyDefaultCollation() { + + template.createCollection(Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldFavorExplicitOptionsOverDefaultCollation() { + + template.createCollection(Sith.class, CollectionOptions.just(Collation.of("en_US"))).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("en_US").build()); + } + + @Test // DATAMONGO-1854 + void createCollectionShouldUseDefaultCollationIfCollectionOptionsAreNull() { + + template.createCollection(Sith.class, null).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseDefaultCollationIfPresent() { + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class).subscribe(); + + verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // GH-4277 + void aggreateShouldUseReadConcern() { + + AggregationOptions options = AggregationOptions.builder().readConcern(ReadConcern.SNAPSHOT).build(); + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(collection).withReadConcern(ReadConcern.SNAPSHOT); + } + + @Test // GH-4286 + void aggreateShouldUseReadReadPreference() { + + AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.primaryPreferred()).build(); + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(collection).withReadPreference(ReadPreference.primaryPreferred()); + } + + @Test // GH-4543 + void aggregateDoesNotLimitBackpressure() { + + reset(collection); + + AtomicLong request = new AtomicLong(); + Publisher realPublisher = Flux.just(new Document()).doOnRequest(request::addAndGet); + + doAnswer(invocation -> { + Subscriber subscriber = invocation.getArgument(0); + realPublisher.subscribe(subscriber); + return null; + }).when(aggregatePublisher).subscribe(any()); + + when(collection.aggregate(anyList())).thenReturn(aggregatePublisher); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(aggregatePublisher); + + template.aggregate(newAggregation(Sith.class, project("id")), AutogenerateableId.class, Document.class).subscribe(); + + assertThat(request).hasValueGreaterThan(128); + } + + @Test // DATAMONGO-1854 + void aggreateShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() { + + template + .aggregate( + newAggregation(Sith.class, project("id")) + .withOptions(newAggregationOptions().collation(Collation.of("fr")).build()), + AutogenerateableId.class, Document.class) + .subscribe(); + + verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-2153 + void aggregateShouldHonorOptionsComment() { + + AggregationOptions options = AggregationOptions.builder().comment("expensive").build(); + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(aggregatePublisher).comment("expensive"); + } + + @Test // DATAMONGO-1836 + void aggregateShouldHonorOptionsHint() { + + Document hint = new Document("dummyHint", 1); + AggregationOptions options = AggregationOptions.builder().hint(hint).build(); + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(aggregatePublisher).hint(hint); + } + + @Test // GH-4238 + void aggregateShouldHonorOptionsHintString() { + + AggregationOptions options = AggregationOptions.builder().hint("index-1").build(); + + template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class, + Document.class).subscribe(); + + verify(aggregatePublisher).hintString("index-1"); + } + + @Test // DATAMONGO-2390 + void aggregateShouldNoApplyZeroOrNegativeMaxTime() { + + template + .aggregate(newAggregation(MongoTemplateUnitTests.Sith.class, project("id")).withOptions( + newAggregationOptions().maxTime(Duration.ZERO).build()), AutogenerateableId.class, Document.class) + .subscribe(); + template + .aggregate( + newAggregation(MongoTemplateUnitTests.Sith.class, project("id")) + .withOptions(newAggregationOptions().maxTime(Duration.ofSeconds(-1)).build()), + AutogenerateableId.class, Document.class) + .subscribe(); + + verify(aggregatePublisher, never()).maxTime(anyLong(), any()); + } + + @Test // DATAMONGO-2390 + void aggregateShouldApplyMaxTimeIfSet() { + + template + .aggregate( + newAggregation(MongoTemplateUnitTests.Sith.class, project("id")) + .withOptions(newAggregationOptions().maxTime(Duration.ofSeconds(10)).build()), + AutogenerateableId.class, Document.class) + .subscribe(); + + verify(aggregatePublisher).maxTime(eq(10000L), eq(TimeUnit.MILLISECONDS)); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new Jedi()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseDefaultCollationWhenPresent() { + + template.findAndReplace(new BasicQuery("{}"), new Sith()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // DATAMONGO-1854 + void findAndReplaceShouldUseCollationEvenIfDefaultCollationIsPresent() { + + template.findAndReplace(new BasicQuery("{}").collation(Collation.of("fr")), new MongoTemplateUnitTests.Sith()) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(FindOneAndReplaceOptions.class); + verify(collection).findOneAndReplace(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // DATAMONGO-1854 + void findDistinctShouldUseDefaultCollationWhenPresent() { + + template.findDistinct(new BasicQuery("{}"), "name", Sith.class, String.class).subscribe(); + + verify(distinctPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build())); + } + + @Test // DATAMONGO-1854 + void findDistinctPreferCollationFromQueryOverDefaultCollation() { + + template.findDistinct(new BasicQuery("{}").collation(Collation.of("fr")), "name", Sith.class, String.class) + .subscribe(); + + verify(distinctPublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldUseDefaultCollationWhenPresent() { + + template.updateFirst(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateFirstShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateFirst(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateOne(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldUseDefaultCollationWhenPresent() { + + template.updateMulti(new BasicQuery("{}"), Update.update("foo", "bar"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void updateMultiShouldPreferExplicitCollationOverDefaultCollation() { + + template.updateMulti(new BasicQuery("{}").collation(Collation.of("fr")), Update.update("foo", "bar"), Sith.class) + .subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(UpdateOptions.class); + verify(collection).updateMany(any(), any(Bson.class), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldUseDefaultCollationWhenPresent() { + + template.remove(new BasicQuery("{}"), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("de_AT").build()); + } + + @Test // DATAMONGO-1854 + void removeShouldPreferExplicitCollationOverDefaultCollation() { + + template.remove(new BasicQuery("{}").collation(Collation.of("fr")), Sith.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); + verify(collection).deleteMany(any(), options.capture()); + + assertThat(options.getValue().getCollation()) + .isEqualTo(com.mongodb.client.model.Collation.builder().locale("fr").build()); + } + + @Test // DATAMONGO-2261 + void saveShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.save(entity).subscribe(); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void insertShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.insert(entity).subscribe(); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); } + + @Test // DATAMONGO-2261 + void insertAllShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + template.insertAll(Arrays.asList(entity1, entity2)).subscribe(); + + verify(beforeConvertCallback, times(2)).onBeforeConvert(any(), anyString()); + verify(beforeSaveCallback, times(2)).onBeforeSave(any(), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void findAndReplaceShouldInvokeCallbacks() { + + ValueCapturingBeforeConvertCallback beforeConvertCallback = spy(new ValueCapturingBeforeConvertCallback()); + ValueCapturingBeforeSaveCallback beforeSaveCallback = spy(new ValueCapturingBeforeSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback)); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + template.findAndReplace(new Query(), entity).subscribe(); + + verify(beforeConvertCallback).onBeforeConvert(eq(entity), anyString()); + verify(beforeSaveCallback).onBeforeSave(eq(entity), any(), anyString()); + } + + @Test // DATAMONGO-2261 + void entityCallbacksAreNotSetByDefault() { + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNull(); + } + + @Test // DATAMONGO-2261 + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isNotNull(); + } + + @Test // DATAMONGO-2261 + void setterForEntityCallbackOverridesContextInitializedOnes() { + + ApplicationContext ctx = new StaticApplicationContext(); + template.setApplicationContext(ctx); + + ReactiveEntityCallbacks callbacks = ReactiveEntityCallbacks.create(); + template.setEntityCallbacks(callbacks); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2261 + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + + ReactiveEntityCallbacks callbacks = ReactiveEntityCallbacks.create(); + ApplicationContext ctx = new StaticApplicationContext(); + + template.setEntityCallbacks(callbacks); + template.setApplicationContext(ctx); + + Assertions.assertThat(ReflectionTestUtils.getField(template, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFind() { + + template.find(new Query().allowSecondaryReads(), AutogenerateableId.class).subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindOne() { + + template.findOne(new Query().allowSecondaryReads(), AutogenerateableId.class).subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2344, DATAMONGO-2572 + void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindDistinct() { + + template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class) + .subscribe(); + + verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update().set("total") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { total : { $sum : [ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldAllowMultipleAggregationExpressions() { + + AggregationUpdate update = AggregationUpdate.update() // + .set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) // + .set("grade").toValue(ConditionalOperators.switchCases( // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"), // + CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D") // + ) // + .defaultTo("F"));// + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).containsExactly(Document.parse("{ $set: { average : { $avg: \"$tests\" } } }"), + Document.parse("{ $set: { grade: { $switch: {\n" + " branches: [\n" + + " { case: { $gte: [ \"$average\", 90 ] }, then: \"A\" },\n" + + " { case: { $gte: [ \"$average\", 80 ] }, then: \"B\" },\n" + + " { case: { $gte: [ \"$average\", 70 ] }, then: \"C\" },\n" + + " { case: { $gte: [ \"$average\", 60 ] }, then: \"D\" }\n" + + " ],\n" + " default: \"F\"\n" + " } } } }")); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationExpressionToDomainType() { + + AggregationUpdate update = AggregationUpdate.update().set("name") + .toValue(ArithmeticOperators.valueOf("val1").sum().and("val2")); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Collections.singletonList(Document.parse("{ $set : { firstname : { $sum:[ \"$val1\",\"$val2\" ] } } }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldPassOnUnsetCorrectly() { + + SetOperation setOperation = SetOperation.builder().set("status").toValue("Modified").and().set("comments") + .toValue(Fields.fields("misc1").and("misc2").asList()); + AggregationUpdate update = AggregationUpdate.update(); + update.set(setOperation); + update.unset("misc1", "misc2"); + + template.updateFirst(new BasicQuery("{}"), update, Wrapper.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo( + Arrays.asList(Document.parse("{ $set: { status: \"Modified\", comments: [ \"$misc1\", \"$misc2\" ] } }"), + Document.parse("{ $unset: [ \"misc1\", \"misc2\" ] }"))); + } + + @Test // DATAMONGO-2331 + void updateShouldMapAggregationUnsetToDomainType() { + + AggregationUpdate update = AggregationUpdate.update(); + update.unset("name"); + + template.updateFirst(new BasicQuery("{}"), update, Jedi.class).subscribe(); + + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + + verify(collection, times(1)).updateOne(any(org.bson.Document.class), captor.capture(), any(UpdateOptions.class)); + + assertThat(captor.getValue()).isEqualTo(Collections.singletonList(Document.parse("{ $unset : \"firstname\" }"))); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyIfNotPresentInFilter() { + + when(findPublisher.first()).thenReturn(Mono.empty()); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromGivenDocumentIfShardKeyIsImmutable() { + + template.save(new ShardedEntityWithNonDefaultImmutableShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "AT").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + + verifyNoInteractions(findPublisher); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyToVersionedEntityIfNotPresentInFilter() { + + when(collection.replaceOne(any(Bson.class), any(Document.class), any(ReplaceOptions.class))) + .thenReturn(Mono.just(UpdateResult.acknowledged(1, 1L, null))); + when(findPublisher.first()).thenReturn(Mono.empty()); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("version", 1L).append("country", "AT").append("userid", 4230)); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendNonDefaultShardKeyFromExistingDocumentIfNotPresentInFilter() { + + when(findPublisher.first()) + .thenReturn(Mono.just(new Document("_id", "id-1").append("country", "US").append("userid", 4230))); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + ArgumentCaptor replacement = ArgumentCaptor.forClass(Document.class); + + verify(collection).replaceOne(filter.capture(), replacement.capture(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("country", "US").append("userid", 4230)); + assertThat(replacement.getValue()).containsEntry("country", "AT").containsEntry("userid", 4230); + } + + @Test // DATAMONGO-2341 + void saveShouldAppendDefaultShardKeyIfNotPresentInFilter() { + + template.save(new ShardedEntityWithDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); + verify(collection).replaceOne(filter.capture(), any(), any()); + + assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1")); + } + + @Test // DATAMONGO-2341 + void saveShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(findPublisher.first()) + .thenReturn(Mono.just(new Document("_id", "id-1").append("country", "US").append("userid", 4230))); + + template.save(new ShardedEntityWithNonDefaultShardKey("id-1", "AT", 4230)).subscribe(); + + verify(findPublisher).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // DATAMONGO-2341 + void saveVersionedShouldProjectOnShardKeyWhenLoadingExistingDocument() { + + when(collection.replaceOne(any(Bson.class), any(Document.class), any(ReplaceOptions.class))) + .thenReturn(Mono.just(UpdateResult.acknowledged(1, 1L, null))); + when(findPublisher.first()).thenReturn(Mono.empty()); + + template.save(new ShardedVersionedEntityWithNonDefaultShardKey("id-1", 1L, "AT", 4230)).subscribe(); + + verify(findPublisher).projection(new Document("country", 1).append("userid", 1)); + } + + @Test // GH-3648 + void shouldThrowExceptionIfEntityReaderReturnsNull() { + + MappingMongoConverter converter = mock(MappingMongoConverter.class); + when(converter.getMappingContext()).thenReturn((MappingContext) mappingContext); + when(converter.getProjectionFactory()).thenReturn(new SpelAwareProxyProjectionFactory()); + template = new ReactiveMongoTemplate(factory, converter); + + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(new Document()); + + template.find(new Query(), Person.class).as(StepVerifier::create).verifyError(MappingException.class); + } + + @Test // DATAMONGO-2479 + void findShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(document); + + List results = template.find(new Query(), Person.class).timeout(Duration.ofSeconds(1)).toStream() + .collect(Collectors.toList()); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(results.get(0).id).isEqualTo("after-convert"); + } + + @Test // GH-4543 + void findShouldNotLimitBackpressure() { + + AtomicLong request = new AtomicLong(); + stubFindSubscribe(new Document(), request); + + template.find(new Query(), Person.class).subscribe(); + + assertThat(request).hasValueGreaterThan(128); + } + + @Test // DATAMONGO-2479 + void findByIdShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(any(Bson.class), eq(Document.class))).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findById("init", Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findOneShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(any(Bson.class), eq(Document.class))).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findOne(new Query(), Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAllShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(document); + + List results = template.findAll(Person.class).timeout(Duration.ofSeconds(1)).toStream() + .collect(Collectors.toList()); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(results.get(0).id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAndModifyShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndUpdate(any(Bson.class), any(Bson.class), any())).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findAndModify(new Query(), new Update(), Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAndRemoveShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndDelete(any(Bson.class), any())).thenReturn(findPublisher); + stubFindSubscribe(document); + + Person result = template.findAndRemove(new Query(), Person.class).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(result.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAllAndRemoveShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(document); + when(collection.deleteMany(any(Bson.class), any(DeleteOptions.class))) + .thenReturn(Mono.just(spy(DeleteResult.class))); + + List results = template.findAllAndRemove(new Query(), Person.class).timeout(Duration.ofSeconds(1)) + .toStream().collect(Collectors.toList()); + + verify(afterConvertCallback).onAfterConvert(eq(new Person("init", "luke")), eq(document), anyString()); + assertThat(results.get(0).id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterConvertCallbacks() { + + ValueCapturingAfterConvertCallback afterConvertCallback = spy(new ValueCapturingAfterConvertCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterConvertCallback)); + + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(findPublisher); + stubFindSubscribe(new Document("_id", "init").append("firstname", "luke")); + + Person entity = new Person(); + entity.id = "init"; + entity.firstname = "luke"; + + Person saved = template.findAndReplace(new Query(), entity).block(Duration.ofSeconds(1)); + + verify(afterConvertCallback).onAfterConvert(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-convert"); + } + + @Test // DATAMONGO-2479 + void saveShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + when(collection.replaceOne(any(Bson.class), any(Document.class), any(ReplaceOptions.class))) + .thenReturn(Mono.just(mock(UpdateResult.class))); + + Person entity = new Person("init", "luke"); + + Person saved = template.save(entity).block(Duration.ofSeconds(1)); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + when(collection.insertOne(any())).thenReturn(Mono.just(mock(InsertOneResult.class))); + + Person entity = new Person("init", "luke"); + + Person saved = template.insert(entity).block(Duration.ofSeconds(1)); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void insertAllShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + Person entity1 = new Person(); + entity1.id = "1"; + entity1.firstname = "luke"; + + Person entity2 = new Person(); + entity1.id = "2"; + entity1.firstname = "luke"; + + when(collection.insertMany(anyList())).then(invocation -> { + List list = invocation.getArgument(0); + return Flux.fromIterable(list).map(i -> mock(InsertManyResult.class)); + }); + + List saved = template.insertAll(Arrays.asList(entity1, entity2)).timeout(Duration.ofSeconds(1)).toStream() + .collect(Collectors.toList()); + + verify(afterSaveCallback, times(2)).onAfterSave(any(), any(), anyString()); + assertThat(saved.get(0).id).isEqualTo("after-save"); + assertThat(saved.get(1).id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldInvokeAfterSaveCallbacks() { + + ValueCapturingAfterSaveCallback afterSaveCallback = spy(new ValueCapturingAfterSaveCallback()); + + template.setEntityCallbacks(ReactiveEntityCallbacks.create(afterSaveCallback)); + + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(findPublisher); + stubFindSubscribe(new Document("_id", "init").append("firstname", "luke")); + + Person entity = new Person("init", "luke"); + + Person saved = template.findAndReplace(new Query(), entity).block(Duration.ofSeconds(1)); + + verify(afterSaveCallback).onAfterSave(eq(entity), any(), anyString()); + assertThat(saved.id).isEqualTo("after-save"); + } + + @Test // DATAMONGO-2479 + void findAndReplaceShouldEmitAfterSaveEvent() { + + AbstractMongoEventListener eventListener = new AbstractMongoEventListener() { + + @Override + public void onAfterSave(AfterSaveEvent event) { + + assertThat(event.getSource().id).isEqualTo("init"); + event.getSource().id = "after-save-event"; + } + }; + + StaticApplicationContext ctx = new StaticApplicationContext(); + ctx.registerBean(ApplicationListener.class, () -> eventListener); + ctx.refresh(); + + template.setApplicationContext(ctx); + + Person entity = new Person("init", "luke"); + + Document document = new Document("_id", "init").append("firstname", "luke"); + when(collection.findOneAndReplace(any(Bson.class), any(Document.class), any())).thenReturn(Mono.just(document)); + + Person saved = template.findAndReplace(new Query(), entity).block(Duration.ofSeconds(1)); + + assertThat(saved.id).isEqualTo("after-save-event"); + } + + @Test // DATAMONGO-2556 + void esitmatedCountShouldBeDelegatedCorrectly() { + + template.estimatedCount(Person.class).subscribe(); + + verify(db).getCollection("star-wars", Document.class); + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-3522 + void usedCountDocumentsForEmptyQueryByDefault() { + + template.count(new Query(), Person.class).subscribe(); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-3522 + void delegatesToEstimatedCountForEmptyQueryIfEnabled() { + + template.useEstimatedCount(true); + + template.count(new Query(), Person.class).subscribe(); + + verify(collection).estimatedDocumentCount(any()); + } + + @Test // GH-3522 + void stillUsesCountDocumentsForNonEmptyQueryEvenIfEstimationEnabled() { + + template.useEstimatedCount(true); + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }"), Person.class).subscribe(); + + verify(collection).countDocuments(any(Document.class), any()); + } + + @Test // GH-4374 + void countConsidersMaxTimeMs() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").maxTimeMsec(5000), Person.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getMaxTime(TimeUnit.MILLISECONDS)).isEqualTo(5000); + } + + @Test // GH-4374 + void countPassesOnComment() { + + template.count(new BasicQuery("{ 'spring' : 'data-mongodb' }").comment("rocks!"), Person.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CountOptions.class); + verify(collection).countDocuments(any(Document.class), options.capture()); + assertThat(options.getValue().getComment()).isEqualTo(BsonUtils.simpleToBsonValue("rocks!")); + } + + @Test // GH-2911 + void insertErrorsOnPublisher() { + + Publisher publisher = Mono.just("data"); + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.insert(publisher)); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); + } + + @Test // GH-4167 + void changeStreamOptionStartAftershouldApplied() { + + when(factory.getMongoDatabase(anyString())).thenReturn(Mono.just(db)); + + when(collection.watch(any(Class.class))).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.batchSize(anyInt())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.startAfter(any())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.fullDocument(any())).thenReturn(changeStreamPublisher); + + BsonDocument token = new BsonDocument("token", new BsonString("id")); + template + .changeStream("database", "collection", ChangeStreamOptions.builder().startAfter(token).build(), Object.class) + .subscribe(); + + verify(changeStreamPublisher).startAfter(eq(token)); + } + + @Test // GH-4495 + void changeStreamOptionFullDocumentBeforeChangeShouldBeApplied() { + + when(factory.getMongoDatabase(anyString())).thenReturn(Mono.just(db)); + + when(collection.watch(any(Class.class))).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.batchSize(anyInt())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.startAfter(any())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.fullDocument(any())).thenReturn(changeStreamPublisher); + when(changeStreamPublisher.fullDocumentBeforeChange(any())).thenReturn(changeStreamPublisher); + + ChangeStreamOptions options = ChangeStreamOptions.builder() + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED).build(); + template.changeStream("database", "collection", options, Object.class).subscribe(); + + verify(changeStreamPublisher).fullDocumentBeforeChange(FullDocumentBeforeChange.REQUIRED); + + } + + @Test // GH-4462 + void replaceShouldUseCollationWhenPresent() { + + template.replace(new BasicQuery("{}").collation(Collation.of("fr")), new Jedi()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("fr"); + } + + @Test // GH-4462 + void replaceShouldNotUpsertByDefault() { + + template.replace(new BasicQuery("{}"), new MongoTemplateUnitTests.Sith()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isFalse(); + } + + @Test // GH-4462 + void replaceShouldUpsert() { + + template.replace(new BasicQuery("{}"), new MongoTemplateUnitTests.Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions().upsert()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().isUpsert()).isTrue(); + } + + @Test // GH-4462 + void replaceShouldUseDefaultCollationWhenPresent() { + + template.replace(new BasicQuery("{}"), new MongoTemplateUnitTests.Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getCollation().getLocale()).isEqualTo("de_AT"); + } + + @Test // GH-4462 + void replaceShouldUseHintIfPresent() { + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new MongoTemplateUnitTests.Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions().upsert()).subscribe(); + + ArgumentCaptor options = ArgumentCaptor + .forClass(com.mongodb.client.model.ReplaceOptions.class); + verify(collection).replaceOne(any(Bson.class), any(), options.capture()); + + assertThat(options.getValue().getHintString()).isEqualTo("index-to-use"); + } + + @Test // GH-4462 + void replaceShouldApplyWriteConcern() { + + template.setWriteConcernResolver(new WriteConcernResolver() { + public WriteConcern resolve(MongoAction action) { + + assertThat(action.getMongoActionOperation()).isEqualTo(MongoActionOperation.REPLACE); + return WriteConcern.UNACKNOWLEDGED; + } + }); + + template.replace(new BasicQuery("{}").withHint("index-to-use"), new Sith(), + org.springframework.data.mongodb.core.ReplaceOptions.replaceOptions().upsert()).subscribe(); + + verify(collection).withWriteConcern(eq(WriteConcern.UNACKNOWLEDGED)); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromString() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsPlainString.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.MINUTES)) + .isEqualTo(10); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromIso8601String() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsIso8601Style.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.DAYS)) + .isEqualTo(1); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpression() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpression.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(11); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithExpirationFromExpressionReturningDuration() { + + template.createCollection(TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getExpireAfter(TimeUnit.SECONDS)) + .isEqualTo(100); + } + + @Test // GH-4099 + void createCollectionShouldSetUpTimeSeriesWithInvalidTimeoutExpiration() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> + template.createCollection(TimeSeriesTypeWithInvalidExpireAfter.class).subscribe() + ); + } + + private void stubFindSubscribe(Document document) { + stubFindSubscribe(document, new AtomicLong()); + } + + private void stubFindSubscribe(Document document, AtomicLong request) { + + Publisher realPublisher = Flux.just(document).doOnRequest(request::addAndGet); + + doAnswer(invocation -> { + Subscriber subscriber = invocation.getArgument(0); + realPublisher.subscribe(subscriber); + return null; + }).when(findPublisher).subscribe(any()); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "star-wars") + static class Person { + + @Id String id; + String firstname; + + public Person() {} + + public Person(String id, String firstname) { + this.id = id; + this.firstname = firstname; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveMongoTemplateUnitTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + ")"; + } + } + + class Wrapper { + + AutogenerateableId foo; + } + + static class PersonExtended extends Person { + + String lastname; + } + + interface PersonProjection { + String getFirstname(); + } + + public interface PersonSpELProjection { + + @Value("#{target.firstname}") + String getName(); + } + + static class Jedi { + + @Field("firstname") String name; + + public Jedi() {} + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveMongoTemplateUnitTests.Jedi(name=" + this.getName() + ")"; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(collation = "de_AT") + static class Sith { + + @Field("firstname") String name; + } + + static class EntityWithListOfSimple { + List grades; + } + + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") Instant timestamp; + Object meta; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "10m") + static class TimeSeriesTypeWithExpireAfterAsPlainString { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "P1D") + static class TimeSeriesTypeWithExpireAfterAsIso8601Style { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{10 + 1 + 's'}") + static class TimeSeriesTypeWithExpireAfterAsExpression { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "#{T(java.time.Duration).ofSeconds(100)}") + static class TimeSeriesTypeWithExpireAfterAsExpressionResultingInDuration { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", expireAfter = "123ops") + static class TimeSeriesTypeWithInvalidExpireAfter { + + String id; + Instant timestamp; + } + + static class ValueCapturingEntityCallback { + + private final List values = new ArrayList<>(1); + + protected void capture(T value) { + values.add(value); + } + + public List getValues() { + return values; + } + + @Nullable + public T getValue() { + return CollectionUtils.lastElement(values); + } + } + + static class ValueCapturingBeforeConvertCallback extends ValueCapturingEntityCallback + implements ReactiveBeforeConvertCallback { + + @Override + public Mono onBeforeConvert(Person entity, String collection) { + + capture(entity); + return Mono.just(entity); + } + } + + static class ValueCapturingBeforeSaveCallback extends ValueCapturingEntityCallback + implements ReactiveBeforeSaveCallback { + + @Override + public Mono onBeforeSave(Person entity, Document document, String collection) { + + capture(entity); + return Mono.just(entity); + } + } + + static class ValueCapturingAfterConvertCallback extends ValueCapturingEntityCallback + implements ReactiveAfterConvertCallback { + + @Override + public Mono onAfterConvert(Person entity, Document document, String collection) { + + capture(entity); + return Mono.just(new Person() { + { + id = "after-convert"; + firstname = entity.firstname; + } + }); + } + } + + static class ValueCapturingAfterSaveCallback extends ValueCapturingEntityCallback + implements ReactiveAfterSaveCallback { + + @Override + public Mono onAfterSave(Person entity, Document document, String collection) { + + capture(entity); + return Mono.just(new Person() { + { + id = "after-save"; + firstname = entity.firstname; + } + }); + } + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUpdateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUpdateTests.java new file mode 100644 index 0000000000..35c27815ff --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUpdateTests.java @@ -0,0 +1,416 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators; +import org.springframework.data.mongodb.core.aggregation.ReplaceWithOperation; +import org.springframework.data.mongodb.core.aggregation.SetOperation; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") +public class ReactiveMongoTemplateUpdateTests { + + static final String DB_NAME = "reactive-update-test"; + + static @Client MongoClient client; + ReactiveMongoTemplate template; + + @BeforeEach + void beforeEach() { + + template = new ReactiveMongoTemplate(new SimpleReactiveMongoDatabaseFactory(client, DB_NAME)); + + MongoTestUtils.createOrReplaceCollection(DB_NAME, template.getCollectionName(Score.class), client).then() + .as(StepVerifier::create).verifyComplete(); + MongoTestUtils.createOrReplaceCollection(DB_NAME, template.getCollectionName(Versioned.class), client).then() + .as(StepVerifier::create).verifyComplete(); + MongoTestUtils.createOrReplaceCollection(DB_NAME, template.getCollectionName(Book.class), client).then() + .as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateWithSet() { + + Score score1 = new Score(1, "Maya", Arrays.asList(10, 5, 10), Arrays.asList(10, 8), 0); + Score score2 = new Score(2, "Ryan", Arrays.asList(5, 6, 5), Arrays.asList(8, 8), 8); + + template.insertAll(Arrays.asList(score1, score2)).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update().set(SetOperation.builder() // + .set("totalHomework").toValueOf(ArithmeticOperators.valueOf("homework").sum()).and() // + .set("totalQuiz").toValueOf(ArithmeticOperators.valueOf("quiz").sum())) // + .set(SetOperation.builder() // + .set("totalScore") + .toValueOf(ArithmeticOperators.valueOf("totalHomework").add("totalQuiz").add("extraCredit"))); + + template.update(Score.class).apply(update).all().then().as(StepVerifier::create).verifyComplete(); + + Flux.from(collection(Score.class).find(new org.bson.Document())).collectList().as(StepVerifier::create) + .consumeNextWith(it -> { + + assertThat(it).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{\"_id\" : 1, \"student\" : \"Maya\", \"homework\" : [ 10, 5, 10 ], \"quiz\" : [ 10, 8 ], \"extraCredit\" : 0, \"totalHomework\" : 25, \"totalQuiz\" : 18, \"totalScore\" : 43, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Score\"}"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"student\" : \"Ryan\", \"homework\" : [ 5, 6, 5 ], \"quiz\" : [ 8, 8 ], \"extraCredit\" : 8, \"totalHomework\" : 16, \"totalQuiz\" : 16, \"totalScore\" : 40, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Score\"}")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void versionedAggregateUpdateWithSet() { + + Versioned source = new Versioned("id-1", "value-0"); + template.insert(Versioned.class).one(source).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update().set("value").toValue("changed"); + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first() + .then().as(StepVerifier::create).verifyComplete(); + + Flux.from(collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1)).collectList() + .as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it).containsExactly( + new org.bson.Document("_id", source.id).append("version", 1L).append("value", "changed").append("_class", + "org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Versioned")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void versionedAggregateUpdateTouchingVersionProperty() { + + Versioned source = new Versioned("id-1", "value-0"); + template.insert(Versioned.class).one(source).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update() + .set(SetOperation.builder().set("value").toValue("changed").and().set("version").toValue(10L)); + + template.update(Versioned.class).matching(Query.query(Criteria.where("id").is(source.id))).apply(update).first() + .then().as(StepVerifier::create).verifyComplete(); + + Flux.from(collection(Versioned.class).find(new org.bson.Document("_id", source.id)).limit(1)).collectList() + .as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it).containsExactly( + new org.bson.Document("_id", source.id).append("version", 10L).append("value", "changed").append("_class", + "org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Versioned")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateWithUnset() { + + Book antelopeAntics = new Book(); + antelopeAntics.id = 1; + antelopeAntics.title = "Antelope Antics"; + antelopeAntics.isbn = "0001122223334"; + antelopeAntics.author = new Author("Auntie", "An"); + antelopeAntics.stock = new ArrayList<>(); + antelopeAntics.stock.add(new Warehouse("A", 5)); + antelopeAntics.stock.add(new Warehouse("B", 15)); + + Book beesBabble = new Book(); + beesBabble.id = 2; + beesBabble.title = "Bees Babble"; + beesBabble.isbn = "999999999333"; + beesBabble.author = new Author("Bee", "Bumble"); + beesBabble.stock = new ArrayList<>(); + beesBabble.stock.add(new Warehouse("A", 2)); + beesBabble.stock.add(new Warehouse("B", 5)); + + template.insertAll(Arrays.asList(antelopeAntics, beesBabble)).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update().unset("isbn", "stock"); + template.update(Book.class).apply(update).all().then().as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactlyInAnyOrder( // + org.bson.Document.parse( + "{ \"_id\" : 1, \"title\" : \"Antelope Antics\", \"author\" : { \"last\" : \"An\", \"first\" : \"Auntie\" }, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\" }"), + org.bson.Document.parse( + "{ \"_id\" : 2, \"title\" : \"Bees Babble\", \"author\" : { \"last\" : \"Bumble\", \"first\" : \"Bee\" }, \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\" }")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateWithReplaceWith() { + + Book one = new Book(); + one.id = 1; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + AggregationUpdate update = AggregationUpdate.update() + .replaceWith(ReplaceWithOperation.replaceWithValueOf("author")); + + template.update(Book.class).apply(update).all().then().as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactlyInAnyOrder( + org.bson.Document.parse("{\"_id\" : 1, \"first\" : \"John\", \"last\" : \"Backus\"}"), + org.bson.Document.parse("{\"_id\" : 2, \"first\" : \"Grace\", \"last\" : \"Hopper\"}")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregationUpdateUpsertsCorrectly() { + + AggregationUpdate update = AggregationUpdate.update().set("title").toValue("The Burning White"); + + template.update(Book.class).matching(Query.query(Criteria.where("id").is(1))).apply(update).upsert().then() + .as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it).containsExactly(org.bson.Document.parse("{\"_id\" : 1, \"title\" : \"The Burning White\" }")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void aggregateUpdateFirstMatch() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + template.update(Book.class).apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")).first() + .then().as(StepVerifier::create).verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}")); + }).verifyComplete(); + } + + @Test // DATAMONGO-2331 + public void findAndModifyAppliesAggregationUpdateCorrectly() { + + Book one = new Book(); + one.id = 1; + one.title = "The Blood Mirror"; + + Book two = new Book(); + two.id = 2; + two.title = "The Broken Eye"; + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + template.update(Book.class) // + .matching(Query.query(Criteria.where("id").is(one.id))) // + .apply(AggregationUpdate.update().set("title").toValue("The Blinding Knife")) // + .findAndModify() // + .as(StepVerifier::create) // + .expectNext(one) // + .verifyComplete(); + + all(Book.class).collectList().as(StepVerifier::create).consumeNextWith(it -> { + + assertThat(it).containsExactly(org.bson.Document.parse( + "{\"_id\" : 1, \"title\" : \"The Blinding Knife\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}"), + org.bson.Document.parse( + "{\"_id\" : 2, \"title\" : \"The Broken Eye\", \"_class\" : \"org.springframework.data.mongodb.core.ReactiveMongoTemplateUpdateTests$Book\"}")); + }).verifyComplete(); + + } + + @ParameterizedTest // GH-4797 + @EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") + @MethodSource("sortedUpdateBookArgs") + void updateFirstWithSort(Class domainType, Sort sort, UpdateDefinition update) { + + Book one = new Book(); + one.id = 1; + one.isbn = "001 001 300"; + one.title = "News isn't fake"; + one.author = new Author("John", "Backus"); + + Book two = new Book(); + two.id = 2; + two.title = "love is love"; + two.isbn = "001 001 100"; + two.author = new Author("Grace", "Hopper"); + + template.insertAll(Arrays.asList(one, two)).then().as(StepVerifier::create).verifyComplete(); + + template.update(domainType) // + .inCollection(template.getCollectionName(Book.class))// + .matching(new Query().with(sort)).apply(update) // + .first().as(StepVerifier::create) // + .assertNext(result -> assertThat(result.getModifiedCount()).isOne()) // + .verifyComplete(); + + Mono.from(collection(Book.class).find(new org.bson.Document("_id", two.id)).first()) // + .as(StepVerifier::create) // + .assertNext(document -> assertThat(document).containsEntry("title", "Science is real!")) // + .verifyComplete(); + } + + + private Flux all(Class type) { + return Flux.from(collection(type).find(new org.bson.Document())); + } + + private MongoCollection collection(Class type) { + return client.getDatabase(DB_NAME).getCollection(template.getCollectionName(type)); + } + + private static Stream sortedUpdateBookArgs() { + + Update update = new Update().set("title", "Science is real!"); + AggregationUpdate aggUpdate = AggregationUpdate.update().set("title").toValue("Science is real!"); + + return Stream.of( // + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), update), // typed, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.lastname"), update), // typed, map `lastname` + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), update), // typed, raw field name + Arguments.of(Object.class, Sort.by(Direction.ASC, "isbn"), update), // untyped, requires raw field name + Arguments.of(Book.class, Sort.by(Direction.ASC, "isbn"), aggUpdate), // aggregation, no field mapping + Arguments.of(Book.class, Sort.by(Direction.DESC, "author.last"), aggUpdate) // aggregation, map `lastname` + ); + } + + @Document("scores") + static class Score { + + Integer id; + String student; + List homework; + List quiz; + Integer extraCredit; + + public Score(Integer id, String student, List homework, List quiz, Integer extraCredit) { + + this.id = id; + this.student = student; + this.homework = homework; + this.quiz = quiz; + this.extraCredit = extraCredit; + } + } + + static class Versioned { + + String id; + @Version Long version; + String value; + + public Versioned(String id, String value) { + this.id = id; + this.value = value; + } + } + + static class Book { + + @Id Integer id; + String title; + String isbn; + Author author; + @Field("copies") Collection stock; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Book book = (Book) o; + return Objects.equals(id, book.id) && Objects.equals(title, book.title) && Objects.equals(isbn, book.isbn) + && Objects.equals(author, book.author) && Objects.equals(stock, book.stock); + } + + @Override + public int hashCode() { + return Objects.hash(id, title, isbn, author, stock); + } + } + + static class Author { + + @Field("first") String firstname; + @Field("last") String lastname; + + public Author(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + } + + static class Warehouse { + + public Warehouse(String location, Integer qty) { + this.location = location; + this.qty = qty; + } + + @Field("warehouse") String location; + Integer qty; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateViewTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateViewTests.java new file mode 100644 index 0000000000..0841ddc37f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateViewTests.java @@ -0,0 +1,216 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.CollectionInfo; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class ReactiveMongoTemplateViewTests { + + static @Client com.mongodb.client.MongoClient client; + static @Client MongoClient reactiveClient; + static final String DB_NAME = "reactive-mongo-template-view-tests"; + + private ReactiveMongoTemplate template; + + Student alex = new Student(22001L, "Alex", 1, 4.0D); + Student bernie = new Student(21001L, "bernie", 2, 3.7D); + Student chris = new Student(20010L, "Chris", 3, 2.5D); + Student drew = new Student(22021L, "Drew", 1, 3.2D); + Student harley1 = new Student(17301L, "harley", 6, 3.1D); + Student farmer = new Student(21022L, "Farmer", 1, 2.2D); + Student george = new Student(20020L, "george", 3, 2.8D); + Student harley2 = new Student(18020, "Harley", 5, 2.8D); + + List students = Arrays.asList(alex, bernie, chris, drew, harley1, farmer, george, harley2); + + @BeforeEach + void beforeEach() { + template = new ReactiveMongoTemplate(reactiveClient, DB_NAME); + } + + @AfterEach + void afterEach() { + client.getDatabase(DB_NAME).drop(); + } + + @Test // GH-2594 + void createsViewFromPipeline() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("firstYears", Student.class, match(where("year").is(1))).then().as(StepVerifier::create) + .verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewTarget()).isEqualTo("student"); + assertThat(collectionInfo.getViewPipeline()).containsExactly(new Document("$match", new Document("year", 1))); + } + + @Test // GH-2594 + void mapsPipelineAgainstDomainObject() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("fakeStudents", Student.class, match(where("studentID").gte("22"))).then() + .as(StepVerifier::create).verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("sID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void takesPipelineAsIsIfNoTypeDefined() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("fakeStudents", "student", AggregationPipeline.of(match(where("studentID").gte("22"))), + ViewOptions.none()).then().as(StepVerifier::create).verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "fakeStudents"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getViewPipeline()) + .containsExactly(new Document("$match", new Document("studentID", new Document("$gte", "22")))); + } + + @Test // GH-2594 + void readsFromView() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + client.getDatabase(DB_NAME).createView("firstYears", "student", + Arrays.asList(new Document("$match", new Document("year", 1)))); + + template.query(Student.class).inCollection("firstYears").all().collectList().as(StepVerifier::create) + .consumeNextWith(it -> assertThat(it).containsExactlyInAnyOrder(alex, drew, farmer)); + } + + @Test // GH-2594 + void appliesCollationToView() { + + template.insertAll(students).then().as(StepVerifier::create).verifyComplete(); + + template.createView("firstYears", Student.class, AggregationPipeline.of(match(where("year").is(1))), + new ViewOptions().collation(Collation.of("en_US"))).then().as(StepVerifier::create).verifyComplete(); + + CollectionInfo collectionInfo = MongoTestUtils.readCollectionInfo(client.getDatabase(DB_NAME), "firstYears"); + assertThat(collectionInfo.isView()).isTrue(); + assertThat(collectionInfo.getCollation().getLocale()).isEqualTo("en_US"); + } + + private static class Student { + + @Field("sID") Long studentID; + + int year; + + double score; + + String name; + + public Student() {} + + public Student(long studentID, String name, int year, double score) { + this.studentID = studentID; + this.name = name; + this.year = year; + this.score = score; + } + + public Long getStudentID() { + return this.studentID; + } + + public int getYear() { + return this.year; + } + + public double getScore() { + return this.score; + } + + public String getName() { + return this.name; + } + + public void setStudentID(Long studentID) { + this.studentID = studentID; + } + + public void setYear(int year) { + this.year = year; + } + + public void setScore(double score) { + this.score = score; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Student student = (Student) o; + return year == student.year && Double.compare(student.score, score) == 0 + && Objects.equals(studentID, student.studentID) && Objects.equals(name, student.name); + } + + @Override + public int hashCode() { + return Objects.hash(studentID, year, score, name); + } + + public String toString() { + return "ReactiveMongoTemplateViewTests.Student(studentID=" + this.getStudentID() + ", year=" + this.getYear() + + ", score=" + this.getScore() + ", name=" + this.getName() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java index b1d7b114f1..5659869705 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,35 +19,42 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; import reactor.test.StepVerifier; -import org.junit.Before; -import org.junit.Test; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; -import com.mongodb.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link ReactiveRemoveOperationSupport}. * * @author Mark Paluch */ -public class ReactiveRemoveOperationSupportTests { +@ExtendWith(MongoClientExtension.class) +class ReactiveRemoveOperationSupportTests { private static final String STAR_WARS = "star-wars"; - MongoTemplate blocking; - ReactiveMongoTemplate template; + private static @Client MongoClient client; + private static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + private MongoTemplate blocking; + private ReactiveMongoTemplate template; - Person han; - Person luke; + private Person han; + private Person luke; - @Before - public void setUp() { + @BeforeEach + void setUp() { - blocking = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableRemoveOperationSupportTests")); + blocking = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, "ExecutableRemoveOperationSupportTests")); blocking.dropCollection(STAR_WARS); han = new Person(); @@ -61,49 +68,122 @@ public void setUp() { blocking.save(han); blocking.save(luke); - template = new ReactiveMongoTemplate(MongoClients.create(), "ExecutableRemoveOperationSupportTests"); + template = new ReactiveMongoTemplate(reactiveClient, "ExecutableRemoveOperationSupportTests"); } @Test // DATAMONGO-1719 - public void removeAll() { + void removeAll() { - StepVerifier.create(template.remove(Person.class).all()).consumeNextWith(actual -> { + template.remove(Person.class).all().as(StepVerifier::create).consumeNextWith(actual -> { assertThat(actual.getDeletedCount()).isEqualTo(2L); }).verifyComplete(); } @Test // DATAMONGO-1719 - public void removeAllMatching() { + void removeAllMatching() { - StepVerifier.create(template.remove(Person.class).matching(query(where("firstname").is("han"))).all()) + template.remove(Person.class).matching(query(where("firstname").is("han"))).all().as(StepVerifier::create) .consumeNextWith(actual -> assertThat(actual.getDeletedCount()).isEqualTo(1L)).verifyComplete(); } @Test // DATAMONGO-1719 - public void removeAllMatchingWithAlternateDomainTypeAndCollection() { + void removeAllMatchingCriteria() { - StepVerifier - .create(template.remove(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all()) + template.remove(Person.class).matching(where("firstname").is("han")).all().as(StepVerifier::create) .consumeNextWith(actual -> assertThat(actual.getDeletedCount()).isEqualTo(1L)).verifyComplete(); } @Test // DATAMONGO-1719 - public void removeAndReturnAllMatching() { + void removeAllMatchingWithAlternateDomainTypeAndCollection() { - StepVerifier.create(template.remove(Person.class).matching(query(where("firstname").is("han"))).findAndRemove()) + template.remove(Jedi.class).inCollection(STAR_WARS).matching(query(where("name").is("luke"))).all() + .as(StepVerifier::create).consumeNextWith(actual -> assertThat(actual.getDeletedCount()).isEqualTo(1L)) + .verifyComplete(); + } + + @Test // DATAMONGO-1719 + void removeAndReturnAllMatching() { + + template.remove(Person.class).matching(query(where("firstname").is("han"))).findAndRemove().as(StepVerifier::create) .expectNext(han).verifyComplete(); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person { + @Id String id; String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveRemoveOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } } - @Data static class Jedi { @Field("firstname") String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveRemoveOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveSessionBoundMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveSessionBoundMongoTemplateUnitTests.java new file mode 100644 index 0000000000..73970d2ad3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveSessionBoundMongoTemplateUnitTests.java @@ -0,0 +1,329 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Proxy; + +import org.bson.Document; +import org.bson.codecs.BsonValueCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.reactivestreams.Publisher; + +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate.ReactiveSessionBoundMongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.DistinctPublisher; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; + +/** + * Unit tests for {@link ReactiveSessionBoundMongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@SuppressWarnings("unchecked") +@RunWith(MockitoJUnitRunner.Silent.class) +public class ReactiveSessionBoundMongoTemplateUnitTests { + + private static final String COLLECTION_NAME = "collection-1"; + + ReactiveSessionBoundMongoTemplate template; + MongoMappingContext mappingContext; + MappingMongoConverter converter; + + ReactiveMongoDatabaseFactory factory; + + @Mock MongoCollection collection; + @Mock MongoDatabase database; + @Mock ClientSession clientSession; + @Mock FindPublisher findPublisher; + Publisher collectionNamesPublisher; + @Mock AggregatePublisher aggregatePublisher; + @Mock DistinctPublisher distinctPublisher; + @Mock Publisher resultPublisher; + @Mock MapReducePublisher mapReducePublisher; + @Mock MongoClient client; + @Mock CodecRegistry codecRegistry; + + @Before + public void setUp() { + + mock(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(database).collectionNamePublisherType()); + when(client.getDatabase(anyString())).thenReturn(database); + when(codecRegistry.get(any(Class.class))).thenReturn(new BsonValueCodec()); + when(database.getCodecRegistry()).thenReturn(codecRegistry); + when(database.getCollection(anyString())).thenReturn(collection); + when(database.getCollection(anyString(), any())).thenReturn(collection); + doReturn(collectionNamesPublisher).when(database).listCollectionNames(any(ClientSession.class)); + when(database.createCollection(any(ClientSession.class), any(), any())).thenReturn(resultPublisher); + when(database.runCommand(any(ClientSession.class), any(), any(Class.class))).thenReturn(resultPublisher); + when(collection.find(any(ClientSession.class))).thenReturn(findPublisher); + when(collection.find(any(ClientSession.class), any(Document.class))).thenReturn(findPublisher); + when(collection.find(any(ClientSession.class), any(Class.class))).thenReturn(findPublisher); + when(collection.find(any(ClientSession.class), any(), any())).thenReturn(findPublisher); + when(collection.deleteMany(any(ClientSession.class), any(), any())).thenReturn(resultPublisher); + when(collection.insertOne(any(ClientSession.class), any(Document.class))).thenReturn(resultPublisher); + when(collection.aggregate(any(ClientSession.class), anyList(), any(Class.class))).thenReturn(aggregatePublisher); + when(collection.countDocuments(any(ClientSession.class), any(), any(CountOptions.class))) + .thenReturn(resultPublisher); + when(collection.drop(any(ClientSession.class))).thenReturn(resultPublisher); + when(collection.findOneAndUpdate(any(ClientSession.class), any(), any(Bson.class), any())) + .thenReturn(resultPublisher); + when(collection.distinct(any(ClientSession.class), any(), any(Bson.class), any())).thenReturn(distinctPublisher); + when(collection.updateOne(any(ClientSession.class), any(), any(Bson.class), any(UpdateOptions.class))) + .thenReturn(resultPublisher); + when(collection.updateMany(any(ClientSession.class), any(), any(Bson.class), any(UpdateOptions.class))) + .thenReturn(resultPublisher); + when(collection.dropIndex(any(ClientSession.class), anyString())).thenReturn(resultPublisher); + when(collection.mapReduce(any(ClientSession.class), any(), any(), any())).thenReturn(mapReducePublisher); + when(findPublisher.projection(any())).thenReturn(findPublisher); + when(findPublisher.limit(anyInt())).thenReturn(findPublisher); + when(findPublisher.collation(any())).thenReturn(findPublisher); + when(findPublisher.first()).thenReturn(resultPublisher); + when(aggregatePublisher.allowDiskUse(anyBoolean())).thenReturn(aggregatePublisher); + + factory = new SimpleReactiveMongoDatabaseFactory(client, "foo"); + + this.mappingContext = new MongoMappingContext(); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + this.template = new ReactiveSessionBoundMongoTemplate(clientSession, new ReactiveMongoTemplate(factory, converter)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedCollectionInCallback() { + + template.execute("collection", MongoCollection::find).subscribe(); + + verify(collection, never()).find(); + verify(collection).find(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedDatabaseInCallback() { + + template.execute(MongoDatabase::listCollectionNames).subscribe(); + + verify(database, never()).listCollectionNames(); + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findOneUsesProxiedCollection() { + + template.findOne(new Query(), Person.class).subscribe(); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void findShouldUseProxiedCollection() { + + template.find(new Query(), Person.class).subscribe(); + + verify(collection).find(eq(clientSession), any(Class.class)); + } + + @Test // DATAMONGO-1880 + public void findAllShouldUseProxiedCollection() { + + template.findAll(Person.class).subscribe(); + + verify(collection).find(eq(clientSession), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void executeCommandShouldUseProxiedDatabase() { + + template.executeCommand("{}").subscribe(); + + verify(database).runCommand(eq(clientSession), any(), any(Class.class)); + } + + @Test // DATAMONGO-1880 + public void removeShouldUseProxiedCollection() { + + template.remove(new Query(), Person.class).subscribe(); + + verify(collection).deleteMany(eq(clientSession), any(), any(DeleteOptions.class)); + } + + @Test // DATAMONGO-1880 + public void insertShouldUseProxiedCollection() { + + template.insert(new Person()).subscribe(); + + verify(collection).insertOne(eq(clientSession), any(Document.class)); + } + + @Test // DATAMONGO-1880 + public void aggregateShouldUseProxiedCollection() { + + template.aggregate(Aggregation.newAggregation(Aggregation.project("foo")), COLLECTION_NAME, Person.class) + .subscribe(); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void collectionExistsShouldUseProxiedDatabase() { + + template.collectionExists(Person.class).subscribe(); + + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880, GH-3522 + public void countShouldUseProxiedCollection() { + + template.count(new Query(), Person.class).subscribe(); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // GH-3522 + public void countShouldDelegateToExactCountNoMatterWhat() { + + template.useEstimatedCount(true); + template.count(new Query(), Person.class).subscribe(); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // DATAMONGO-1880 + public void createCollectionShouldUseProxiedDatabase() { + + template.createCollection(Person.class).subscribe(); + + verify(database).createCollection(eq(clientSession), anyString(), any()); + } + + @Test // DATAMONGO-1880 + public void dropShouldUseProxiedCollection() { + + template.dropCollection(Person.class).subscribe(); + + verify(collection).drop(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findAndModifyShouldUseProxiedCollection() { + + template.findAndModify(new Query(), new Update().set("foo", "bar"), Person.class).subscribe(); + + verify(collection).findOneAndUpdate(eq(clientSession), any(), any(Bson.class), any(FindOneAndUpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void findDistinctShouldUseProxiedCollection() { + + template.findDistinct(new Query(), "firstName", Person.class, String.class).subscribe(); + + verify(collection).distinct(eq(clientSession), anyString(), any(), any()); + } + + @Test // DATAMONGO-1880, DATAMONGO-2264 + public void geoNearShouldUseProxiedDatabase() { + + template.geoNear(NearQuery.near(new Point(0, 0), Metrics.NEUTRAL), Person.class).subscribe(); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880, DATAMONGO-1890, DATAMONGO-257 + public void mapReduceShouldUseProxiedCollection() { + + template.mapReduce(new BasicQuery("{}"), Person.class, COLLECTION_NAME, Person.class, "foo", "bar", + MapReduceOptions.options()).subscribe(); + + verify(collection).mapReduce(eq(clientSession), anyString(), anyString(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void updateFirstShouldUseProxiedCollection() { + + template.updateFirst(new Query(), Update.update("foo", "bar"), Person.class).subscribe(); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void updateMultiShouldUseProxiedCollection() { + + template.updateMulti(new Query(), Update.update("foo", "bar"), Person.class).subscribe(); + + verify(collection).updateMany(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void upsertShouldUseProxiedCollection() { + + template.upsert(new Query(), Update.update("foo", "bar"), Person.class).subscribe(); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void getCollectionShouldShouldJustReturnTheCollection/*No ClientSession binding*/() { + assertThat(template.getCollection(COLLECTION_NAME).block()).isNotInstanceOf(Proxy.class) + .isInstanceOf(MongoCollection.class); + } + + @Test // DATAMONGO-1880 + public void getDbShouldJustReturnTheDatabase/*No ClientSession binding*/() { + assertThat(template.getMongoDatabase().block()).isNotInstanceOf(Proxy.class).isInstanceOf(MongoDatabase.class); + } + + @Test // DATAMONGO-1880 + public void indexOpsShouldUseProxiedCollection() { + + template.indexOps(COLLECTION_NAME).dropIndex("index-name").subscribe(); + + verify(collection).dropIndex(eq(clientSession), eq("index-name")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java index 3f4b9f83a3..3ac99c2b6d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,38 +19,45 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; import reactor.test.StepVerifier; +import java.util.Objects; + import org.bson.BsonString; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; -import com.mongodb.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link ReactiveUpdateOperationSupport}. * * @author Mark Paluch */ -public class ReactiveUpdateOperationSupportTests { +@ExtendWith(MongoClientExtension.class) +class ReactiveUpdateOperationSupportTests { private static final String STAR_WARS = "star-wars"; - MongoTemplate blocking; - ReactiveMongoTemplate template; + private static @Client MongoClient client; + private static @Client com.mongodb.reactivestreams.client.MongoClient reactiveClient; + + private MongoTemplate blocking; + private ReactiveMongoTemplate template; - Person han; - Person luke; + private Person han; + private Person luke; - @Before - public void setUp() { + @BeforeEach + void setUp() { - blocking = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "ExecutableUpdateOperationSupportTests")); + blocking = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, "ExecutableUpdateOperationSupportTests")); blocking.dropCollection(STAR_WARS); han = new Person(); @@ -64,33 +71,34 @@ public void setUp() { blocking.save(han); blocking.save(luke); - template = new ReactiveMongoTemplate(MongoClients.create(), "ExecutableUpdateOperationSupportTests"); + template = new ReactiveMongoTemplate(reactiveClient, "ExecutableUpdateOperationSupportTests"); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void domainTypeIsRequired() { - template.update(null); + @Test // DATAMONGO-1719 + void domainTypeIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void updateIsRequired() { - template.update(Person.class).apply(null); + @Test // DATAMONGO-1719 + void updateIsRequired() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).apply(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void collectionIsRequiredOnSet() { - template.update(Person.class).inCollection(null); + @Test // DATAMONGO-1719 + void collectionIsRequiredOnSet() { + assertThatIllegalArgumentException().isThrownBy(() -> template.update(Person.class).inCollection(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1719 - public void findAndModifyOptionsAreRequiredOnSet() { - template.update(Person.class).apply(new Update()).withOptions(null); + @Test // DATAMONGO-1719 + void findAndModifyOptionsAreRequiredOnSet() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.update(Person.class).apply(new Update()).withOptions(null)); } @Test // DATAMONGO-1719 - public void updateFirst() { + void updateFirst() { - StepVerifier.create(template.update(Person.class).apply(new Update().set("firstname", "Han")).first()) + template.update(Person.class).apply(new Update().set("firstname", "Han")).first().as(StepVerifier::create) .consumeNextWith(actual -> { assertThat(actual.getModifiedCount()).isEqualTo(1L); @@ -100,9 +108,9 @@ public void updateFirst() { } @Test // DATAMONGO-1719 - public void updateAll() { + void updateAll() { - StepVerifier.create(template.update(Person.class).apply(new Update().set("firstname", "Han")).all()) + template.update(Person.class).apply(new Update().set("firstname", "Han")).all().as(StepVerifier::create) .consumeNextWith(actual -> { assertThat(actual.getModifiedCount()).isEqualTo(2L); @@ -111,11 +119,21 @@ public void updateAll() { } @Test // DATAMONGO-1719 - public void updateAllMatching() { + void updateAllMatching() { - StepVerifier - .create(template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")).all()) - .consumeNextWith(actual -> { + template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")).all() + .as(StepVerifier::create).consumeNextWith(actual -> { + + assertThat(actual.getModifiedCount()).isEqualTo(1L); + assertThat(actual.getUpsertedId()).isNull(); + }).verifyComplete(); + } + + @Test // DATAMONGO-2416 + void updateAllMatchingCriteria() { + + template.update(Person.class).matching(where("id").is(han.getId())).apply(new Update().set("firstname", "Han")) + .all().as(StepVerifier::create).consumeNextWith(actual -> { assertThat(actual.getModifiedCount()).isEqualTo(1L); assertThat(actual.getUpsertedId()).isNull(); @@ -123,11 +141,10 @@ public void updateAllMatching() { } @Test // DATAMONGO-1719 - public void updateWithDifferentDomainClassAndCollection() { + void updateWithDifferentDomainClassAndCollection() { - StepVerifier.create(template.update(Jedi.class).inCollection(STAR_WARS) - .matching(query(where("_id").is(han.getId()))).apply(new Update().set("name", "Han")).all()) - .consumeNextWith(actual -> { + template.update(Jedi.class).inCollection(STAR_WARS).matching(query(where("_id").is(han.getId()))) + .apply(new Update().set("name", "Han")).all().as(StepVerifier::create).consumeNextWith(actual -> { assertThat(actual.getModifiedCount()).isEqualTo(1L); assertThat(actual.getUpsertedId()).isNull(); @@ -138,22 +155,20 @@ public void updateWithDifferentDomainClassAndCollection() { } @Test // DATAMONGO-1719 - public void findAndModify() { + void findAndModify() { - StepVerifier.create( - template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")).findAndModify()) - .expectNext(han).verifyComplete(); + template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")).findAndModify() + .as(StepVerifier::create).expectNext(han).verifyComplete(); assertThat(blocking.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Han"); } @Test // DATAMONGO-1719 - public void findAndModifyWithDifferentDomainTypeAndCollection() { + void findAndModifyWithDifferentDomainTypeAndCollection() { - StepVerifier - .create(template.update(Jedi.class).inCollection(STAR_WARS).matching(query(where("_id").is(han.getId()))) - .apply(new Update().set("name", "Han")).findAndModify()) + template.update(Jedi.class).inCollection(STAR_WARS).matching(query(where("_id").is(han.getId()))) + .apply(new Update().set("name", "Han")).findAndModify().as(StepVerifier::create) .consumeNextWith(actual -> assertThat(actual.getName()).isEqualTo("han")).verifyComplete(); assertThat(blocking.findOne(queryHan(), Person.class)).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", @@ -161,45 +176,182 @@ public void findAndModifyWithDifferentDomainTypeAndCollection() { } @Test // DATAMONGO-1719 - public void findAndModifyWithOptions() { + void findAndModifyWithOptions() { - StepVerifier.create(template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) - .withOptions(FindAndModifyOptions.options().returnNew(true)).findAndModify()).consumeNextWith(actual -> { + template.update(Person.class).matching(queryHan()).apply(new Update().set("firstname", "Han")) + .withOptions(FindAndModifyOptions.options().returnNew(true)).findAndModify().as(StepVerifier::create) + .consumeNextWith(actual -> { assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Han"); }).verifyComplete(); } @Test // DATAMONGO-1719 - public void upsert() { + void upsert() { - StepVerifier.create(template.update(Person.class).matching(query(where("id").is("id-3"))) - .apply(new Update().set("firstname", "Chewbacca")).upsert()).consumeNextWith(actual -> { + template.update(Person.class).matching(query(where("id").is("id-3"))) + .apply(new Update().set("firstname", "Chewbacca")).upsert().as(StepVerifier::create).consumeNextWith(actual -> { assertThat(actual.getModifiedCount()).isEqualTo(0L); assertThat(actual.getUpsertedId()).isEqualTo(new BsonString("id-3")); }).verifyComplete(); } + @Test // DATAMONGO-1827 + void findAndReplace() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).matching(queryHan()).replaceWith(luke).findAndReplace() // + .as(StepVerifier::create).expectNext(han).verifyComplete(); + + template.findOne(queryHan(), Person.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithProjection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).matching(queryHan()).replaceWith(luke).as(Jedi.class).findAndReplace() // + .as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getName()).isEqualTo(han.firstname); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithCollection() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).inCollection(STAR_WARS).matching(queryHan()).replaceWith(luke).findAndReplace() // + .as(StepVerifier::create).expectNext(han).verifyComplete(); + + template.findOne(queryHan(), Person.class) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + }).verifyComplete(); + } + + @Test // DATAMONGO-1827 + void findAndReplaceWithOptions() { + + Person luke = new Person(); + luke.firstname = "Luke"; + + template.update(Person.class).matching(queryHan()).replaceWith(luke) + .withOptions(FindAndReplaceOptions.options().returnNew()).findAndReplace() // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual).isNotEqualTo(han).hasFieldOrPropertyWithValue("firstname", "Luke"); + }).verifyComplete(); + } + private Query queryHan() { return query(where("id").is(han.getId())); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = STAR_WARS) static class Person { + @Id String id; String firstname; + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname); + } + + public String toString() { + return "ReactiveUpdateOperationSupportTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ")"; + } } - @Data static class Human { + @Id String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ReactiveUpdateOperationSupportTests.Human(id=" + this.getId() + ")"; + } } - @Data static class Jedi { @Field("firstname") String name; + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Jedi jedi = (Jedi) o; + return Objects.equals(name, jedi.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + public String toString() { + return "ReactiveUpdateOperationSupportTests.Jedi(name=" + this.getName() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ScrollUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ScrollUtilsUnitTests.java new file mode 100644 index 0000000000..ed0010242c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ScrollUtilsUnitTests.java @@ -0,0 +1,56 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.AssertionsForClassTypes.*; +import static org.mockito.Mockito.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.EntityOperations.Entity; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit tests for {@link ScrollUtils}. + * + * @author Mark Paluch + */ +class ScrollUtilsUnitTests { + + @Test // GH-4413 + void positionShouldRetainScrollDirection() { + + Query query = new Query(); + query.with(ScrollPosition.keyset().backward()); + EntityOperations entityOperationsMock = mock(EntityOperations.class); + Entity entityMock = mock(Entity.class); + + when(entityOperationsMock.forEntity(any())).thenReturn(entityMock); + when(entityMock.extractKeys(any(), any())).thenReturn(Map.of("k", "v")); + + Window window = ScrollUtils.createWindow(query, new ArrayList<>(List.of(1, 2, 3)), Integer.class, + entityOperationsMock); + + assertThat(window.positionAt(0)).isInstanceOf(KeysetScrollPosition.class); + assertThat(((KeysetScrollPosition) window.positionAt(0)).scrollsBackward()).isTrue(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java index e9572127cc..1bf9114967 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SerializationUtilsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,14 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import java.util.Arrays; import java.util.Map; import org.bson.Document; -import org.hamcrest.Matcher; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.query.SerializationUtils; import com.mongodb.BasicDBList; @@ -41,26 +39,24 @@ public class SerializationUtilsUnitTests { public void writesSimpleDocument() { Document document = new Document("foo", "bar"); - assertThat(serializeToJsonSafely(document), is("{ \"foo\" : \"bar\" }")); + assertThat(serializeToJsonSafely(document)).isEqualTo("{ \"foo\" : \"bar\"}"); } @Test public void writesComplexObjectAsPlainToString() { Document document = new Document("foo", new Complex()); - assertThat(serializeToJsonSafely(document), - startsWith("{ \"foo\" : { \"$java\" : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex")); + assertThat(serializeToJsonSafely(document).startsWith( + "{ \"foo\" : { \"$java\" : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex")); } @Test public void writesCollection() { Document document = new Document("foo", Arrays.asList("bar", new Complex())); - Matcher expectedOutput = allOf( - startsWith( - "{ \"foo\" : [ \"bar\", { \"$java\" : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex"), - endsWith(" } ] }")); - assertThat(serializeToJsonSafely(document), is(expectedOutput)); + assertThat(serializeToJsonSafely(document)).startsWith( + "{ \"foo\" : [ \"bar\", { \"$java\" : org.springframework.data.mongodb.core.SerializationUtilsUnitTests$Complex") + .endsWith(" } ] }"); } @Test // DATAMONGO-1245 @@ -70,8 +66,7 @@ public void flattenMapShouldFlatOutNestedStructureCorrectly() { document.put("_id", 1); document.put("nested", new Document("value", "conflux")); - assertThat(flattenMap(document), hasEntry("_id", (Object) 1)); - assertThat(flattenMap(document), hasEntry("nested.value", (Object) "conflux")); + assertThat(flattenMap(document)).containsEntry("_id", 1).containsEntry("nested.value", "conflux"); } @Test // DATAMONGO-1245 @@ -84,8 +79,7 @@ public void flattenMapShouldFlatOutNestedStructureWithListCorrectly() { document.put("_id", 1); document.put("nested", new Document("value", dbl)); - assertThat(flattenMap(document), hasEntry("_id", (Object) 1)); - assertThat(flattenMap(document), hasEntry("nested.value", (Object) dbl)); + assertThat(flattenMap(document)).containsEntry("_id", 1).containsEntry("nested.value", dbl); } @Test // DATAMONGO-1245 @@ -97,9 +91,8 @@ public void flattenMapShouldLeaveKeywordsUntouched() { Map map = flattenMap(document); - assertThat(map, hasEntry("_id", (Object) 1)); - assertThat(map.get("nested"), notNullValue()); - assertThat(((Map) map.get("nested")).get("$regex"), is((Object) "^conflux$")); + assertThat(map).containsEntry("_id", 1).containsKey("nested"); + assertThat(((Map) map.get("nested")).get("$regex")).isEqualTo("^conflux$"); } @Test // DATAMONGO-1245 @@ -114,15 +107,14 @@ public void flattenMapShouldAppendCommandsCorrectly() { Map map = flattenMap(document); - assertThat(map, hasEntry("_id", (Object) 1)); - assertThat(map.get("nested"), notNullValue()); - assertThat(((Map) map.get("nested")).get("$regex"), is((Object) "^conflux$")); - assertThat(((Map) map.get("nested")).get("$options"), is((Object) "i")); + assertThat(map).containsEntry("_id", 1).containsKey("nested"); + assertThat(((Map) map.get("nested")).get("$regex")).isEqualTo("^conflux$"); + assertThat(((Map) map.get("nested")).get("$options")).isEqualTo("i"); } @Test // DATAMONGO-1245 public void flattenMapShouldReturnEmptyMapWhenSourceIsNull() { - assertThat(flattenMap(null).isEmpty(), is(true)); + assertThat(flattenMap(null)).isEmpty(); } static class Complex { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateTests.java new file mode 100644 index 0000000000..8769656537 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateTests.java @@ -0,0 +1,444 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Proxy; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.aopalliance.aop.Advice; +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; +import org.springframework.aop.Advisor; +import org.springframework.aop.framework.Advised; +import org.springframework.dao.DataAccessException; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.LazyLoadingException; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SessionAwareMethodInterceptor; +import org.springframework.data.mongodb.core.MongoTemplate.SessionBoundMongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoVersion; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Integration tests for {@link SessionBoundMongoTemplate} operating up an active {@link ClientSession}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +@EnableIfReplicaSetAvailable +public class SessionBoundMongoTemplateTests { + + static @ReplSetClient MongoClient client; + + MongoTemplate template; + SessionBoundMongoTemplate sessionBoundTemplate; + ClientSession session; + volatile List> spiedCollections = new ArrayList<>(); + volatile List spiedDatabases = new ArrayList<>(); + + @BeforeEach + public void setUp() { + + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(client, "session-bound-mongo-template-tests") { + + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + + MongoDatabase spiedDatabase = Mockito.spy(super.getMongoDatabase()); + spiedDatabases.add(spiedDatabase); + return spiedDatabase; + } + }; + + session = client.startSession(ClientSessionOptions.builder().build()); + + this.template = new MongoTemplate(factory); + + this.sessionBoundTemplate = new SessionBoundMongoTemplate(session, + new MongoTemplate(factory, getDefaultMongoConverter(factory))) { + + @Override + protected MongoCollection prepareCollection(MongoCollection collection) { + + injectCollectionSpy(collection); + + return super.prepareCollection(collection); + } + + @SuppressWarnings({ "ConstantConditions", "unchecked" }) + private void injectCollectionSpy(MongoCollection collection) { + + InvocationHandler handler = Proxy.getInvocationHandler(collection); + + Advised advised = (Advised) ReflectionTestUtils.getField(handler, "advised"); + + for (Advisor advisor : advised.getAdvisors()) { + Advice advice = advisor.getAdvice(); + if (advice instanceof SessionAwareMethodInterceptor) { + + MongoCollection spiedCollection = Mockito + .spy((MongoCollection) ReflectionTestUtils.getField(advice, "target")); + spiedCollections.add(spiedCollection); + + ReflectionTestUtils.setField(advice, "target", spiedCollection); + } + } + } + }; + } + + @AfterEach + public void tearDown() { + + session.close(); + } + + @Test // DATAMONGO-1880 + public void findDelegatesToMethodWithSession() { + + sessionBoundTemplate.find(new Query(), Person.class); + + verify(operation(0)).find(eq(session), any(), any()); + } + + @Test // DATAMONGO-1880 + public void fluentFindDelegatesToMethodWithSession() { + + sessionBoundTemplate.query(Person.class).all(); + + verify(operation(0)).find(eq(session), any(), any()); + } + + @Test // DATAMONGO-1880 + public void aggregateDelegatesToMethoddWithSession() { + + sessionBoundTemplate.aggregate(Aggregation.newAggregation(Aggregation.project("firstName")), Person.class, + Person.class); + + verify(operation(0)).aggregate(eq(session), any(), any()); + } + + @Test // DATAMONGO-1880 + public void collectionExistsDelegatesToMethodWithSession() { + + sessionBoundTemplate.collectionExists(Person.class); + + verify(command(0)).listCollectionNames(eq(session)); + } + + @Test // DATAMONGO-1880 + public void shouldLoadDbRefWhenSessionIsActive() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithDbRef wdr = new WithDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + WithDbRef result = sessionBoundTemplate.findById(wdr.id, WithDbRef.class); + + assertThat(result.personRef).isEqualTo(person); + } + + @Test // DATAMONGO-1880 + public void shouldErrorOnLoadDbRefWhenSessionIsClosed() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithDbRef wdr = new WithDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + session.close(); + + assertThatExceptionOfType(ClientSessionException.class) + .isThrownBy(() -> sessionBoundTemplate.findById(wdr.id, WithDbRef.class)); + } + + @Test // DATAMONGO-1880 + public void shouldLoadLazyDbRefWhenSessionIsActive() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithLazyDbRef wdr = new WithLazyDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + WithLazyDbRef result = sessionBoundTemplate.findById(wdr.id, WithLazyDbRef.class); + + assertThat(result.getPersonRef()).isEqualTo(person); + } + + @Test // DATAMONGO-1880 + public void shouldErrorOnLoadLazyDbRefWhenSessionIsClosed() { + + Person person = new Person("Kylar Stern"); + + template.save(person); + + WithLazyDbRef wdr = new WithLazyDbRef(); + wdr.id = "id-1"; + wdr.personRef = person; + + template.save(wdr); + + WithLazyDbRef result = sessionBoundTemplate.findById(wdr.id, WithLazyDbRef.class); + + session.close(); // now close the session + + assertThatExceptionOfType(LazyLoadingException.class).isThrownBy(() -> result.getPersonRef().toString()); + } + + @Test // DATAMONGO-2001 + @MongoVersion(asOf = "4.0") + public void countShouldWorkInTransactions() { + + if (!template.collectionExists(Person.class)) { + template.createCollection(Person.class); + } else { + template.remove(Person.class).all(); + } + + ClientSession session = client.startSession(); + session.startTransaction(); + + MongoTemplate sessionBound = template.withSession(session); + + sessionBound.save(new Person("Kylar Stern")); + + assertThat(sessionBound.query(Person.class).matching(query(where("firstName").is("foobar"))).count()).isZero(); + assertThat(sessionBound.query(Person.class).matching(query(where("firstName").is("Kylar Stern"))).count()).isOne(); + assertThat(sessionBound.query(Person.class).count()).isOne(); + + session.commitTransaction(); + session.close(); + } + + @Test // DATAMONGO-2012 + @MongoVersion(asOf = "4.0") + public void countWithGeoInTransaction() { + + if (!template.collectionExists(Person.class)) { + template.createCollection(Person.class); + template.indexOps(Person.class).ensureIndex(new GeospatialIndex("location")); + } else { + template.remove(Person.class).all(); + } + + ClientSession session = client.startSession(); + session.startTransaction(); + + MongoTemplate sessionBound = template.withSession(session); + + sessionBound.save(new Person("Kylar Stern")); + + assertThat(sessionBound.query(Person.class).matching(query(where("location").near(new Point(1, 0)))).count()) + .isZero(); + + session.commitTransaction(); + session.close(); + } + + @Test // DATAMONGO-2001 + @MongoVersion(asOf = "4.0") + public void countShouldReturnIsolatedCount() throws InterruptedException { + + if (!template.collectionExists(Person.class)) { + template.createCollection(Person.class); + } else { + template.remove(Person.class).all(); + } + + int nrThreads = 2; + CountDownLatch savedInTransaction = new CountDownLatch(nrThreads); + CountDownLatch beforeCommit = new CountDownLatch(nrThreads); + List resultList = new CopyOnWriteArrayList<>(); + + Runnable runnable = () -> { + + ClientSession session = client.startSession(); + session.startTransaction(); + + try { + MongoTemplate sessionBound = template.withSession(session); + + try { + sessionBound.save(new Person("Kylar Stern")); + } finally { + savedInTransaction.countDown(); + } + + savedInTransaction.await(1, TimeUnit.SECONDS); + + try { + resultList.add(sessionBound.query(Person.class).count()); + } finally { + beforeCommit.countDown(); + } + + beforeCommit.await(1, TimeUnit.SECONDS); + } catch (Exception e) { + resultList.add(e); + } + + session.commitTransaction(); + session.close(); + }; + + List threads = IntStream.range(0, nrThreads) // + .mapToObj(i -> new Thread(runnable)) // + .peek(Thread::start) // + .collect(Collectors.toList()); + + for (Thread thread : threads) { + thread.join(); + } + + assertThat(template.query(Person.class).count()).isEqualTo(2L); + assertThat(resultList).hasSize(nrThreads).allMatch(it -> it.equals(1L)); + } + + static class WithDbRef { + + @Id String id; + @DBRef Person personRef; + + public String getId() { + return this.id; + } + + public Person getPersonRef() { + return this.personRef; + } + + public void setId(String id) { + this.id = id; + } + + public void setPersonRef(Person personRef) { + this.personRef = personRef; + } + + public String toString() { + return "SessionBoundMongoTemplateTests.WithDbRef(id=" + this.getId() + ", personRef=" + this.getPersonRef() + ")"; + } + } + + static class WithLazyDbRef { + + @Id String id; + @DBRef(lazy = true) Person personRef; + + public Person getPersonRef() { + return personRef; + } + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public void setPersonRef(Person personRef) { + this.personRef = personRef; + } + + public String toString() { + return "SessionBoundMongoTemplateTests.WithLazyDbRef(id=" + this.getId() + ", personRef=" + this.getPersonRef() + + ")"; + } + } + + // --> Just some helpers for testing + + MongoCollection operation(int index) { + return spiedCollections.get(index); + } + + MongoDatabase command(int index) { + return spiedDatabases.get(index); + } + + private MongoConverter getDefaultMongoConverter(MongoDatabaseFactory factory) { + + DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); + MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(factory); + converter.afterPropertiesSet(); + + return converter; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateUnitTests.java new file mode 100644 index 0000000000..dfa4b00515 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SessionBoundMongoTemplateUnitTests.java @@ -0,0 +1,353 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.lang.reflect.Proxy; +import java.util.Collections; + +import com.mongodb.client.*; +import org.bson.Document; +import org.bson.codecs.BsonValueCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.conversions.Bson; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.data.geo.Metrics; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.MongoTemplate.SessionBoundMongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.UpdateOptions; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; + +/** + * Unit test for {@link SessionBoundMongoTemplate} making sure a proxied {@link MongoCollection} and + * {@link MongoDatabase} is used for executing high level commands like {@link MongoOperations#find(Query, Class)} + * provided by Spring Data. Those commands simply handing over MongoDB base types for interaction like when obtaining a + * {@link MongoCollection} via {@link MongoOperations#getCollection(String)} shall not be proxied as the user can + * control the behavior by using the methods dedicated for {@link ClientSession} directly. + * + * @author Christoph Strobl + * @author Jens Schauder + */ +@SuppressWarnings("unchecked") +@RunWith(MockitoJUnitRunner.Silent.class) +public class SessionBoundMongoTemplateUnitTests { + + private static final String COLLECTION_NAME = "collection-1"; + + SessionBoundMongoTemplate template; + + MongoDatabaseFactory factory; + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection collection; + @Mock MongoDatabase database; + @Mock MongoClient client; + @Mock ClientSession clientSession; + @Mock FindIterable findIterable; + MongoIterable collectionNamesIterable; + @Mock MongoIterable mongoIterable; + @Mock DistinctIterable distinctIterable; + @Mock AggregateIterable aggregateIterable; + @Mock MapReduceIterable mapReduceIterable; + @Mock MongoCursor cursor; + @Mock CodecRegistry codecRegistry; + + MappingMongoConverter converter; + MongoMappingContext mappingContext; + + @Before + public void setUp() { + + collectionNamesIterable = mock(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(database).collectionNameIterableType()); + when(client.getDatabase(anyString())).thenReturn(database); + when(codecRegistry.get(any(Class.class))).thenReturn(new BsonValueCodec()); + when(database.getCodecRegistry()).thenReturn(codecRegistry); + when(database.getCollection(anyString(), any())).thenReturn(collection); + doReturn(collectionNamesIterable).when(database).listCollectionNames(any(ClientSession.class)); + when(collection.find(any(ClientSession.class), any(), any())).thenReturn(findIterable); + when(collection.aggregate(any(ClientSession.class), anyList(), any())).thenReturn(aggregateIterable); + when(collection.distinct(any(ClientSession.class), any(), any(), any())).thenReturn(distinctIterable); + when(collection.mapReduce(any(ClientSession.class), any(), any(), any())).thenReturn(mapReduceIterable); + when(findIterable.iterator()).thenReturn(cursor); + when(aggregateIterable.collation(any())).thenReturn(aggregateIterable); + when(aggregateIterable.allowDiskUse(anyBoolean())).thenReturn(aggregateIterable); + when(aggregateIterable.batchSize(anyInt())).thenReturn(aggregateIterable); + when(aggregateIterable.map(any())).thenReturn(aggregateIterable); + when(aggregateIterable.into(any())).thenReturn(Collections.emptyList()); + when(mongoIterable.iterator()).thenReturn(cursor); + when(collectionNamesIterable.iterator()).thenReturn(cursor); + when(distinctIterable.map(any())).thenReturn(distinctIterable); + when(distinctIterable.into(any())).thenReturn(Collections.emptyList()); + when(mapReduceIterable.sort(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.filter(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.map(any())).thenReturn(mapReduceIterable); + when(mapReduceIterable.iterator()).thenReturn(cursor); + when(cursor.hasNext()).thenReturn(false); + when(findIterable.projection(any())).thenReturn(findIterable); + + factory = new SimpleMongoClientDatabaseFactory(client, "foo"); + + this.mappingContext = new MongoMappingContext(); + this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext); + this.template = new SessionBoundMongoTemplate(clientSession, new MongoTemplate(factory, converter)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedCollectionInCallback() { + + template.execute("collection", MongoCollection::find); + + verify(collection, never()).find(); + verify(collection).find(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void executeUsesProxiedDatabaseInCallback() { + + template.execute(MongoDatabase::listCollectionNames); + + verify(database, never()).listCollectionNames(); + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findOneUsesProxiedCollection() { + + template.findOne(new Query(), Person.class); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void findShouldUseProxiedCollection() { + + template.find(new Query(), Person.class); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void findAllShouldUseProxiedCollection() { + + template.findAll(Person.class); + + verify(collection).find(eq(clientSession), any(), any()); + } + + @Test // DATAMONGO-1880 + public void executeCommandShouldUseProxiedDatabase() { + + template.executeCommand("{}"); + + verify(database).runCommand(eq(clientSession), any(), any(Class.class)); + } + + @Test // DATAMONGO-1880 + public void removeShouldUseProxiedCollection() { + + template.remove(new Query(), Person.class); + + verify(collection).deleteMany(eq(clientSession), any(), any(DeleteOptions.class)); + } + + @Test // DATAMONGO-1880 + public void insertShouldUseProxiedCollection() { + + template.insert(new Person()); + + verify(collection).insertOne(eq(clientSession), any(Document.class)); + } + + @Test // DATAMONGO-1880 + public void aggregateShouldUseProxiedCollection() { + + template.aggregate(Aggregation.newAggregation(Aggregation.project("foo")), COLLECTION_NAME, Person.class); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void aggregateStreamShouldUseProxiedCollection() { + + template.aggregateStream(Aggregation.newAggregation(Aggregation.project("foo")), COLLECTION_NAME, Person.class); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void collectionExistsShouldUseProxiedDatabase() { + + template.collectionExists(Person.class); + + verify(database).listCollectionNames(eq(clientSession)); + } + + @Test // DATAMONGO-1880, GH-3522 + public void countShouldUseProxiedCollection() { + + template.count(new Query(), Person.class); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // DATAMONGO-1880, GH-3522 + public void countShouldDelegateToExactCountNoMatterWhat() { + + template.useEstimatedCount(true); + template.count(new Query(), Person.class); + + verify(collection).countDocuments(eq(clientSession), any(), any(CountOptions.class)); + } + + @Test // DATAMONGO-1880 + public void createCollectionShouldUseProxiedDatabase() { + + template.createCollection(Person.class); + + verify(database).createCollection(eq(clientSession), anyString(), any()); + } + + @Test // DATAMONGO-1880 + public void dropShouldUseProxiedCollection() { + + template.dropCollection(Person.class); + + verify(collection).drop(eq(clientSession)); + } + + @Test // DATAMONGO-1880 + public void findAndModifyShouldUseProxiedCollection() { + + template.findAndModify(new Query(), new Update().set("foo", "bar"), Person.class); + + verify(collection).findOneAndUpdate(eq(clientSession), any(), any(Bson.class), any(FindOneAndUpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void findDistinctShouldUseProxiedCollection() { + + template.findDistinct(new Query(), "firstName", Person.class, String.class); + + verify(collection).distinct(eq(clientSession), anyString(), any(), any()); + } + + @Test // DATAMONGO-1880, DATAMONGO-2264 + public void geoNearShouldUseProxiedDatabase() { + + when(database.runCommand(any(ClientSession.class), any(), eq(Document.class))) + .thenReturn(new Document("results", Collections.emptyList())); + template.geoNear(NearQuery.near(new Point(0, 0), Metrics.NEUTRAL), Person.class); + + verify(collection).aggregate(eq(clientSession), anyList(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void mapReduceShouldUseProxiedCollection() { + + template.mapReduce(COLLECTION_NAME, "foo", "bar", Person.class); + + verify(collection).mapReduce(eq(clientSession), anyString(), anyString(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void streamShouldUseProxiedCollection() { + + template.stream(new Query(), Person.class); + + verify(collection).find(eq(clientSession), any(), eq(Document.class)); + } + + @Test // DATAMONGO-1880 + public void updateFirstShouldUseProxiedCollection() { + + template.updateFirst(new Query(), Update.update("foo", "bar"), Person.class); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void updateMultiShouldUseProxiedCollection() { + + template.updateMulti(new Query(), Update.update("foo", "bar"), Person.class); + + verify(collection).updateMany(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void upsertShouldUseProxiedCollection() { + + template.upsert(new Query(), Update.update("foo", "bar"), Person.class); + + verify(collection).updateOne(eq(clientSession), any(), any(Bson.class), any(UpdateOptions.class)); + } + + @Test // DATAMONGO-1880 + public void getCollectionShouldShouldJustReturnTheCollection/*No ClientSession binding*/() { + assertThat(template.getCollection(COLLECTION_NAME)).isNotInstanceOf(Proxy.class); + } + + @Test // DATAMONGO-1880 + public void getDbShouldJustReturnTheDatabase/*No ClientSession binding*/() { + assertThat(template.getDb()).isNotInstanceOf(Proxy.class); + } + + @Test // DATAMONGO-1880 + public void indexOpsShouldUseProxiedCollection() { + + template.indexOps(COLLECTION_NAME).dropIndex("index-name"); + + verify(collection).dropIndex(eq(clientSession), eq("index-name")); + } + + @Test // DATAMONGO-1880 + public void bulkOpsShouldUseProxiedCollection() { + + BulkOperations bulkOps = template.bulkOps(BulkMode.ORDERED, COLLECTION_NAME); + bulkOps.insert(new Document()); + + bulkOps.execute(); + + verify(collection).bulkWrite(eq(clientSession), anyList(), any()); + } + + @Test // DATAMONGO-1880 + public void scriptOpsShouldUseProxiedDatabase() { + + when(database.runCommand(eq(clientSession), any())).thenReturn(new Document("retval", new Object())); + template.scriptOps().call("W-O-P-R"); + + verify(database).runCommand(eq(clientSession), any()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithDefaultShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithDefaultShardKey.java new file mode 100644 index 0000000000..20debee659 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithDefaultShardKey.java @@ -0,0 +1,89 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded +public class ShardedEntityWithDefaultShardKey { + + private @Id String id; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedEntityWithDefaultShardKey(String id, String country, Integer userId) { + + this.id = id; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedEntityWithDefaultShardKey that = (ShardedEntityWithDefaultShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(country, that.country) && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, country, userId); + } + + public String toString() { + return "ShardedEntityWithDefaultShardKey(id=" + this.getId() + ", country=" + this.getCountry() + ", userId=" + + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultImmutableShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultImmutableShardKey.java new file mode 100644 index 0000000000..a69ad39359 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultImmutableShardKey.java @@ -0,0 +1,89 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded(shardKey = { "country", "userId" }, immutableKey = true) +public class ShardedEntityWithNonDefaultImmutableShardKey { + + private @Id String id; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedEntityWithNonDefaultImmutableShardKey(String id, String country, Integer userId) { + + this.id = id; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedEntityWithNonDefaultImmutableShardKey that = (ShardedEntityWithNonDefaultImmutableShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(country, that.country) && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, country, userId); + } + + public String toString() { + return "ShardedEntityWithNonDefaultImmutableShardKey(id=" + this.getId() + ", country=" + this.getCountry() + + ", userId=" + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultShardKey.java new file mode 100644 index 0000000000..3dc130a89d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedEntityWithNonDefaultShardKey.java @@ -0,0 +1,90 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded(shardKey = { "country", "userId" }) +public class ShardedEntityWithNonDefaultShardKey { + + private @Id String id; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedEntityWithNonDefaultShardKey(String id, String country, Integer userId) { + + this.id = id; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedEntityWithNonDefaultShardKey that = (ShardedEntityWithNonDefaultShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(country, that.country) && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, country, userId); + } + + public String toString() { + return "ShardedEntityWithNonDefaultShardKey(id=" + this.getId() + ", country=" + this.getCountry() + ", userId=" + + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedVersionedEntityWithNonDefaultShardKey.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedVersionedEntityWithNonDefaultShardKey.java new file mode 100644 index 0000000000..fc22a40103 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ShardedVersionedEntityWithNonDefaultShardKey.java @@ -0,0 +1,102 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Objects; + +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Version; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Sharded; + +/** + * @author Christoph Strobl + */ +@Sharded(shardKey = { "country", "userId" }) +public class ShardedVersionedEntityWithNonDefaultShardKey { + + private @Id String id; + + private @Version Long version; + + private String country; + + @Field("userid") // + private Integer userId; + + public ShardedVersionedEntityWithNonDefaultShardKey(String id, Long version, String country, Integer userId) { + + this.id = id; + this.version = version; + this.country = country; + this.userId = userId; + } + + public String getId() { + return this.id; + } + + public Long getVersion() { + return this.version; + } + + public String getCountry() { + return this.country; + } + + public Integer getUserId() { + return this.userId; + } + + public void setId(String id) { + this.id = id; + } + + public void setVersion(Long version) { + this.version = version; + } + + public void setCountry(String country) { + this.country = country; + } + + public void setUserId(Integer userId) { + this.userId = userId; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ShardedVersionedEntityWithNonDefaultShardKey that = (ShardedVersionedEntityWithNonDefaultShardKey) o; + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(country, that.country) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, country, userId); + } + + public String toString() { + return "ShardedVersionedEntityWithNonDefaultShardKey(id=" + this.getId() + ", version=" + this.getVersion() + + ", country=" + this.getCountry() + ", userId=" + this.getUserId() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactoryUnitTests.java new file mode 100644 index 0000000000..5e64eed4fd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactoryUnitTests.java @@ -0,0 +1,110 @@ +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Proxy; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.framework.AopProxyUtils; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ConnectionString; +import com.mongodb.client.ClientSession; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoDatabase; + +/** + * Unit tests for {@link SimpleMongoClientDatabaseFactory}. + * + * @author Oliver Gierke + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +class SimpleMongoClientDatabaseFactoryUnitTests { + + @Mock MongoClient mongo; + @Mock ClientSession clientSession; + @Mock MongoDatabase database; + + @Test // DATADOC-254, DATAMONGO-1903 + void rejectsIllegalDatabaseNames() { + + rejectsDatabaseName("foo.bar"); + rejectsDatabaseName("foo$bar"); + rejectsDatabaseName("foo\\bar"); + rejectsDatabaseName("foo//bar"); + rejectsDatabaseName("foo bar"); + rejectsDatabaseName("foo\"bar"); + } + + @Test // DATADOC-254 + void allowsDatabaseNames() { + new SimpleMongoClientDatabaseFactory(mongo, "foo-bar"); + new SimpleMongoClientDatabaseFactory(mongo, "foo_bar"); + new SimpleMongoClientDatabaseFactory(mongo, "foo01231bar"); + } + + @Test // DATADOC-295 + void mongoUriConstructor() { + + ConnectionString mongoURI = new ConnectionString( + "mongodb://myUsername:myPassword@localhost/myDatabase.myCollection"); + MongoDatabaseFactory mongoDbFactory = new SimpleMongoClientDatabaseFactory(mongoURI); + + assertThat(mongoDbFactory).hasFieldOrPropertyWithValue("databaseName", "myDatabase"); + } + + @Test // DATAMONGO-1158 + void constructsMongoClientAccordingToMongoUri() { + + ConnectionString uri = new ConnectionString( + "mongodb://myUserName:myPassWord@127.0.0.1:27017/myDataBase.myCollection"); + SimpleMongoClientDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(uri); + + assertThat(factory).hasFieldOrPropertyWithValue("databaseName", "myDataBase"); + } + + @Test // DATAMONGO-1880 + void cascadedWithSessionUsesRootFactory() { + + when(mongo.getDatabase("foo")).thenReturn(database); + + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(mongo, "foo"); + MongoDatabaseFactory wrapped = factory.withSession(clientSession).withSession(clientSession); + + InvocationHandler invocationHandler = Proxy.getInvocationHandler(wrapped.getMongoDatabase()); + + Object singletonTarget = AopProxyUtils + .getSingletonTarget(ReflectionTestUtils.getField(invocationHandler, "advised")); + + assertThat(singletonTarget).isSameAs(database); + } + + private void rejectsDatabaseName(String databaseName) { + assertThatThrownBy(() -> new SimpleMongoClientDatabaseFactory(mongo, databaseName)) + .isInstanceOf(IllegalArgumentException.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoDbFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoDbFactoryUnitTests.java deleted file mode 100644 index a882e8f8b0..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleMongoDbFactoryUnitTests.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; -import static org.springframework.test.util.ReflectionTestUtils.*; - -import java.net.UnknownHostException; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.data.mongodb.MongoDbFactory; - -import com.mongodb.Mongo; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; - -/** - * Unit tests for {@link SimpleMongoDbFactory}. - * - * @author Oliver Gierke - * @author Christoph Strobl - */ -@RunWith(MockitoJUnitRunner.class) -public class SimpleMongoDbFactoryUnitTests { - - public @Rule ExpectedException expectedException = ExpectedException.none(); - @Mock MongoClient mongo; - - @Test // DATADOC-254 - public void rejectsIllegalDatabaseNames() { - rejectsDatabaseName("foo.bar"); - rejectsDatabaseName("foo!bar"); - } - - @Test // DATADOC-254 - @SuppressWarnings("deprecation") - public void allowsDatabaseNames() { - new SimpleMongoDbFactory(mongo, "foo-bar"); - new SimpleMongoDbFactory(mongo, "foo_bar"); - new SimpleMongoDbFactory(mongo, "foo01231bar"); - } - - @Test // DATADOC-295 - @SuppressWarnings("deprecation") - public void mongoUriConstructor() throws UnknownHostException { - - MongoClientURI mongoURI = new MongoClientURI("mongodb://myUsername:myPassword@localhost/myDatabase.myCollection"); - MongoDbFactory mongoDbFactory = new SimpleMongoDbFactory(mongoURI); - - assertThat(getField(mongoDbFactory, "databaseName").toString(), is("myDatabase")); - } - - @Test // DATAMONGO-1158 - public void constructsMongoClientAccordingToMongoUri() throws UnknownHostException { - - MongoClientURI uri = new MongoClientURI("mongodb://myUserName:myPassWord@127.0.0.1:27017/myDataBase.myCollection"); - SimpleMongoDbFactory factory = new SimpleMongoDbFactory(uri); - - assertThat(getField(factory, "databaseName").toString(), is("myDataBase")); - } - - @SuppressWarnings("deprecation") - private void rejectsDatabaseName(String databaseName) { - - try { - new SimpleMongoDbFactory(mongo, databaseName); - fail("Expected database name " + databaseName + " to be rejected!"); - } catch (IllegalArgumentException ex) { - - } - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactoryUnitTests.java new file mode 100644 index 0000000000..3882acb4a4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactoryUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.Proxy; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.aop.framework.AopProxyUtils; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Unit tests for {@link SimpleReactiveMongoDatabaseFactory}. + * + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +class SimpleReactiveMongoDatabaseFactoryUnitTests { + + @Mock MongoClient mongoClient; + @Mock ClientSession clientSession; + @Mock MongoDatabase database; + + @Test // DATAMONGO-1880 + void cascadedWithSessionUsesRootFactory() { + + when(mongoClient.getDatabase("foo")).thenReturn(database); + + ReactiveMongoDatabaseFactory factory = new SimpleReactiveMongoDatabaseFactory(mongoClient, "foo"); + ReactiveMongoDatabaseFactory wrapped = factory.withSession(clientSession).withSession(clientSession); + + InvocationHandler invocationHandler = Proxy.getInvocationHandler(wrapped.getMongoDatabase().block()); + + Object singletonTarget = AopProxyUtils + .getSingletonTarget(ReflectionTestUtils.getField(invocationHandler, "advised")); + + assertThat(singletonTarget).isSameAs(database); + } + + @Test // DATAMONGO-1903 + void rejectsIllegalDatabaseNames() { + + rejectsDatabaseName("foo.bar"); + rejectsDatabaseName("foo$bar"); + rejectsDatabaseName("foo\\bar"); + rejectsDatabaseName("foo//bar"); + rejectsDatabaseName("foo bar"); + rejectsDatabaseName("foo\"bar"); + } + + private void rejectsDatabaseName(String databaseName) { + assertThatThrownBy(() -> new SimpleReactiveMongoDatabaseFactory(mongoClient, databaseName)) + .isInstanceOf(IllegalArgumentException.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestEntities.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestEntities.java new file mode 100644 index 0000000000..45cb804eca --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestEntities.java @@ -0,0 +1,105 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; + +/** + * A simple collection of grouped test entities used throughout the test suite. + * + * @author Christoph Strobl + */ +public class TestEntities { + + private static final GeoEntities GEO = new GeoEntities(); + + public static GeoEntities geolocation() { + return GEO; + } + + public static class GeoEntities { + + /** + *
          +		 * X: -73.99408
          +		 * Y: 40.75057
          +		 * 
          + * + * @return new {@link Venue} + */ + public Venue pennStation() { + return new Venue("Penn Station", -73.99408, 40.75057); + } + + /** + *
          +		 * X: -73.99171
          +		 * Y: 40.738868
          +		 * 
          + * + * @return new {@link Venue} + */ + + public Venue tenGenOffice() { + return new Venue("10gen Office", -73.99171, 40.738868); + } + + /** + *
          +		 * X: -73.988135
          +		 * Y: 40.741404
          +		 * 
          + * + * @return new {@link Venue} + */ + public Venue flatironBuilding() { + return new Venue("Flatiron Building", -73.988135, 40.741404); + } + + /** + *
          +		 * X: -74.2713
          +		 * Y: 40.73137
          +		 * 
          + * + * @return new {@link Venue} + */ + public Venue maplewoodNJ() { + return new Venue("Maplewood, NJ", -74.2713, 40.73137); + } + + public List newYork() { + + List venues = new ArrayList<>(); + + venues.add(pennStation()); + venues.add(tenGenOffice()); + venues.add(flatironBuilding()); + venues.add(new Venue("Players Club", -73.997812, 40.739128)); + venues.add(new Venue("City Bakery ", -73.992491, 40.738673)); + venues.add(new Venue("Splash Bar", -73.992491, 40.738673)); + venues.add(new Venue("Momofuku Milk Bar", -73.985839, 40.731698)); + venues.add(new Venue("Shake Shack", -73.98820, 40.74164)); + venues.add(new Venue("Penn Station", -73.99408, 40.75057)); + venues.add(new Venue("Empire State Building", -73.98602, 40.74894)); + venues.add(new Venue("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); + venues.add(maplewoodNJ()); + + return venues; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java index 319d895b6d..216d4447d0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TestMongoConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,41 +16,60 @@ package org.springframework.data.mongodb.core; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.Set; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Primary; import org.springframework.core.convert.converter.Converter; -import org.springframework.data.convert.CustomConversions; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.test.util.MongoTestUtils; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; -public class TestMongoConfiguration extends AbstractMongoConfiguration { +public class TestMongoConfiguration extends AbstractMongoClientConfiguration { @Override public String getDatabaseName() { return "database"; } + @Primary + @Bean + @Override + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + return super.mappingMongoConverter(databaseFactory, customConversions, mappingContext); + } + @Override @Bean public MongoClient mongoClient() { - return new MongoClient("127.0.0.1", 27017); + return MongoTestUtils.client(); } @Override - public String getMappingBasePackage() { - return MongoMappingContext.class.getPackage().getName(); + protected Collection getMappingBasePackages() { + return Collections.singleton(MongoMappingContext.class.getPackage().getName()); } @Override - public CustomConversions customConversions() { + public MongoCustomConversions customConversions() { List> converters = new ArrayList<>(2); converters.add(new org.springframework.data.mongodb.core.PersonReadConverter()); converters.add(new org.springframework.data.mongodb.core.PersonWriteConverter()); return new MongoCustomConversions(converters); } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java index 24f79738d2..d673c8ad95 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Trade.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TransactionOptionsTestService.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TransactionOptionsTestService.java new file mode 100644 index 0000000000..8968f53a74 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/TransactionOptionsTestService.java @@ -0,0 +1,107 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; +import java.util.function.UnaryOperator; + +import org.springframework.lang.Nullable; +import org.springframework.transaction.annotation.Transactional; + +/** + * Helper class for integration tests of {@link Transactional#label()} MongoDb options in non-reactive context. + * + * @param root document type + * @author Yan Kardziyaka + * @see org.springframework.data.mongodb.ReactiveTransactionOptionsTestService + */ +public class TransactionOptionsTestService { + + private final Function findByIdFunction; + private final UnaryOperator saveFunction; + + public TransactionOptionsTestService(MongoOperations operations, Class entityClass) { + this.findByIdFunction = id -> operations.findById(id, entityClass); + this.saveFunction = operations::save; + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=-PT6H3M" }) + public T saveWithInvalidMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:maxCommitTime=PT1M" }) + public T saveWithinMaxCommitTime(T entity) { + return saveFunction.apply(entity); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=available" }) + public T availableReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=invalid" }) + public T invalidReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=${tx.read.concern}" }) + public T environmentReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readConcern=majority" }) + public T majorityReadConcernFind(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primaryPreferred" }) + public T findFromPrimaryPreferredReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=invalid" }) + public T findFromInvalidReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Nullable + @Transactional(transactionManager = "txManager", label = { "mongo:readPreference=primary" }) + public T findFromPrimaryReplica(Object id) { + return findByIdFunction.apply(id); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=unacknowledged" }) + public T unacknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=invalid" }) + public T invalidWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } + + @Transactional(transactionManager = "txManager", label = { "mongo:writeConcern=acknowledged" }) + public T acknowledgedWriteConcernSave(T entity) { + return saveFunction.apply(entity); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java deleted file mode 100644 index b70514e436..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UnwrapAndReadDocumentCallbackUnitTests.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2013-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.when; - -import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoTemplate.UnwrapAndReadDocumentCallback; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; - -/** - * Unit tests for {@link UnwrapAndReadDocumentCallback}. - * - * @author Oliver Gierke - * @author Mark Paluch - */ -@RunWith(MockitoJUnitRunner.class) -public class UnwrapAndReadDocumentCallbackUnitTests { - - @Mock MongoDbFactory factory; - @Mock MongoExceptionTranslator exceptionTranslatorMock; - - UnwrapAndReadDocumentCallback callback; - - @Before - public void setUp() { - - when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock); - - MongoTemplate template = new MongoTemplate(factory); - MappingMongoConverter converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), - new MongoMappingContext()); - - this.callback = template.new UnwrapAndReadDocumentCallback(converter, Target.class, "collection-1"); - } - - @Test - public void usesFirstLevelValues() { - - Target target = callback.doWith(new Document("foo", "bar")); - - assertThat(target.id, is(nullValue())); - assertThat(target.foo, is("bar")); - } - - @Test - public void unwrapsUnderscoreIdIfDocument() { - - Target target = callback.doWith(new Document("_id", new Document("foo", "bar"))); - - assertThat(target.id, is(nullValue())); - assertThat(target.foo, is("bar")); - } - - @Test - public void firstLevelPropertiesTrumpNestedOnes() { - - Target target = callback.doWith(new Document("_id", new Document("foo", "bar")).append("foo", "foobar")); - - assertThat(target.id, is(nullValue())); - assertThat(target.foo, is("foobar")); - } - - @Test - public void keepsUnderscoreIdIfScalarValue() { - - Target target = callback.doWith(new Document("_id", "bar").append("foo", "foo")); - - assertThat(target.id, is("bar")); - assertThat(target.foo, is("foo")); - } - - static class Target { - - String id; - String foo; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UpdateOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UpdateOperationsUnitTests.java new file mode 100644 index 0000000000..d4c2f37f63 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/UpdateOperationsUnitTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; + +import com.mongodb.MongoClientSettings; + +/** + * Unit test for {@link com.mongodb.internal.operation.UpdateOperation}. + * + * @author Christoph Strobl + */ +class UpdateOperationsUnitTests { + + static final Document SHARD_KEY = new Document("country", "AT").append("userid", "4230"); + static final Document SOURCE_DOC = appendShardKey(new Document("_id", "id-1")); + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + QueryMapper queryMapper = new QueryMapper(mongoConverter); + UpdateMapper updateMapper = new UpdateMapper(mongoConverter); + EntityOperations entityOperations = new EntityOperations(mongoConverter, this.queryMapper); + PropertyOperations propertyOperations = new PropertyOperations(mongoConverter.getMappingContext()); + + ExtendedQueryOperations queryOperations = new ExtendedQueryOperations(queryMapper, updateMapper, entityOperations, propertyOperations, + MongoClientSettings::getDefaultCodecRegistry); + + @Test // DATAMONGO-2341 + void appliesShardKeyToFilter() { + + Document sourceFilter = new Document("name", "kaladin"); + assertThat(shardedFilter(sourceFilter, ShardedEntityWithNonDefaultShardKey.class, null)) + .isEqualTo(appendShardKey(sourceFilter)); + } + + @Test + void applyShardKeyDoesNotAlterSourceFilter() { + + Document sourceFilter = new Document("name", "kaladin"); + shardedFilter(sourceFilter, ShardedEntityWithNonDefaultShardKey.class, null); + assertThat(sourceFilter).isEqualTo(new Document("name", "kaladin")); + } + + @Test // DATAMONGO-2341 + void appliesExistingShardKeyToFilter() { + + Document sourceFilter = new Document("name", "kaladin"); + Document existing = new Document("country", "GB").append("userid", "007"); + + assertThat(shardedFilter(sourceFilter, ShardedEntityWithNonDefaultShardKey.class, existing)) + .isEqualTo(new Document(existing).append("name", "kaladin")); + } + + @Test // DATAMONGO-2341 + void recognizesExistingShardKeyInFilter() { + + Document sourceFilter = appendShardKey(new Document("name", "kaladin")); + + assertThat(queryOperations.replaceSingleContextFor(SOURCE_DOC).requiresShardKey(sourceFilter, + entityOf(ShardedEntityWithNonDefaultShardKey.class))).isFalse(); + } + + @Test // DATAMONGO-2341 + void recognizesIdPropertyAsShardKey() { + + Document sourceFilter = new Document("_id", "id-1"); + + assertThat(queryOperations.replaceSingleContextFor(SOURCE_DOC).requiresShardKey(sourceFilter, + entityOf(ShardedEntityWithDefaultShardKey.class))).isFalse(); + } + + @Test // DATAMONGO-2341 + void returnsMappedShardKey() { + + queryOperations.replaceSingleContextFor(SOURCE_DOC) + .getMappedShardKeyFields(entityOf(ShardedEntityWithDefaultShardKey.class)) + .containsAll(Arrays.asList("country", "userid")); + } + + @NonNull + private Document shardedFilter(Document sourceFilter, Class entity, Document existing) { + return queryOperations.replaceSingleContextFor(SOURCE_DOC).applyShardKey(entity, sourceFilter, existing); + } + + private static Document appendShardKey(Document source) { + + Document target = new Document(source); + target.putAll(SHARD_KEY); + return target; + } + + MongoPersistentEntity entityOf(Class type) { + return mappingContext.getPersistentEntity(type); + } + + class ExtendedQueryOperations extends QueryOperations { + + ExtendedQueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations, PropertyOperations propertyOperations, + CodecRegistryProvider codecRegistryProvider) { + super(queryMapper, updateMapper, entityOperations, propertyOperations, codecRegistryProvider); + } + + @NonNull + private ExtendedUpdateContext replaceSingleContextFor(Document source) { + return new ExtendedUpdateContext(MappedDocument.of(source), true); + } + + MongoPersistentEntity entityOf(Class type) { + return mappingContext.getPersistentEntity(type); + } + + class ExtendedUpdateContext extends UpdateContext { + + ExtendedUpdateContext(MappedDocument update, boolean upsert) { + super(update, upsert); + } + + Document applyShardKey(Class domainType, Document filter, @Nullable Document existing) { + return applyShardKey(entityOf(domainType), filter, existing); + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java index 5b17fc4738..25fbbbcb83 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/User.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core; +import org.springframework.lang.Nullable; + public class User { @Override @@ -27,7 +29,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) return true; if (obj == null) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java index c433bf3013..09a0605ed7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Venue.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core; import java.util.Arrays; +import java.util.Date; -import org.joda.time.LocalDate; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.mongodb.core.mapping.Document; @@ -28,7 +28,7 @@ public class Venue { @Id private String id; private String name; private double[] location; - private LocalDate openingDate; + private Date openingDate; @PersistenceConstructor Venue(String name, double[] location) { @@ -51,11 +51,11 @@ public double[] getLocation() { return location; } - public LocalDate getOpeningDate() { + public Date getOpeningDate() { return openingDate; } - public void setOpeningDate(LocalDate openingDate) { + public void setOpeningDate(Date openingDate) { this.openingDate = openingDate; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java new file mode 100644 index 0000000000..5b24d85c3b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -0,0 +1,173 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.*; + +import java.util.Arrays; +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; + +/** + * Unit tests for {@link AccumulatorOperators}. + * + * @author Christoph Strobl + * @author Julia Lee + */ +class AccumulatorOperatorsUnitTests { + + @Test // GH-3712 + void rendersCovariancePopWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovariancePopWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithNumberOfHistoricDocuments() { + + assertThat(valueOf("price").expMovingAvg().historicalDocuments(2).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", N: 2 } }")); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithAlpha() { + + assertThat(valueOf("price").expMovingAvg().alpha(0.75).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", alpha: 0.75 } }")); + } + + @Test // GH-4139 + void rendersMax() { + + assertThat(valueOf("price").max().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $max: \"$price\" }")); + } + + @Test // GH-4139 + void rendersMaxN() { + + assertThat(valueOf("price").max(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $maxN: { n: 3, input : \"$price\" } }")); + } + + @Test // GH-4139 + void rendersMin() { + + assertThat(valueOf("price").min().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $min: \"$price\" }")); + } + + @Test // GH-4139 + void rendersMinN() { + + assertThat(valueOf("price").min(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $minN: { n: 3, input : \"$price\" } }")); + } + + @Test // GH-4473 + void rendersPercentileWithFieldReference() { + + assertThat(valueOf("score").percentile(0.2).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: \"$score\", method: \"approximate\", p: [0.2] } }")); + + assertThat(valueOf("score").percentile(0.3, 0.9).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: \"$score\", method: \"approximate\", p: [0.3, 0.9] } }")); + + assertThat(valueOf("score").percentile(0.3, 0.9).and("scoreTwo").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\", p: [0.3, 0.9] } }")); + } + + @Test // GH-4473 + void rendersPercentileWithExpression() { + + assertThat(valueOf(Sum.sumOf("score")).percentile(0.1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: {\"$sum\": \"$score\"}, method: \"approximate\", p: [0.1] } }")); + + assertThat(valueOf("scoreOne").percentile(0.1, 0.2).and(Sum.sumOf("scoreTwo")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $percentile: { input: [\"$scoreOne\", {\"$sum\": \"$scoreTwo\"}], method: \"approximate\", p: [0.1, 0.2] } }")); + } + + @Test // GH-4472 + void rendersMedianWithFieldReference() { + + assertThat(valueOf("score").median().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: \"$score\", method: \"approximate\" } }")); + + assertThat(valueOf("score").median().and("scoreTwo").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\" } }")); + } + + @Test // GH-4472 + void rendersMedianWithExpression() { + + assertThat(valueOf(Sum.sumOf("score")).median().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: {\"$sum\": \"$score\"}, method: \"approximate\" } }")); + + assertThat(valueOf("scoreOne").median().and(Sum.sumOf("scoreTwo")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $median: { input: [\"$scoreOne\", {\"$sum\": \"$scoreTwo\"}], method: \"approximate\" } }")); + } + + @Test // GH-4472 + void rendersMedianCorrectlyWithTypedAggregationContext() { + + assertThat(valueOf("midichlorianCount").median() + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(Document.parse("{ $median: { input: \"$force\", method: \"approximate\" } }")); + } + + static class Jedi { + + String name; + + Date birthdate; + + @Field("force") Integer midichlorianCount; + + Integer balance; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperationUnitTests.java new file mode 100644 index 0000000000..8dcf96231c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperationUnitTests.java @@ -0,0 +1,177 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link AddFieldsOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Kim Sumin + */ +class AddFieldsOperationUnitTests { + + @Test // DATAMONGO-2363 + void raisesErrorOnNullField() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new AddFieldsOperation(null, "value")); + } + + @Test // DATAMONGO-2363 + void rendersFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("name", "value").toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersMultipleEntriesCorrectly() { + + assertThat(new AddFieldsOperation("name", "value").addField("field-2", "value2") + .toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"value\", \"field-2\":\"value2\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersMappedFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("student", "value").toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersNestedMappedFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("scoresWithMappedField.student", "value") + .toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"scoresWithMappedField.student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersTargetValueFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("name", Fields.field("value")).toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"$value\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersMappedTargetValueFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("student", Fields.field("homework")) + .toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"student_name\":\"$home_work\"}}")); + } + + @Test // DATAMONGO-2363 + void rendersNestedMappedTargetValueFieldReferenceCorrectly() { + + assertThat(new AddFieldsOperation("scoresWithMappedField.student", Fields.field("scoresWithMappedField.homework")) + .toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document.parse( + "{\"$addFields\" : {\"scoresWithMappedField.student_name\":\"$scoresWithMappedField.home_work\"}}")); + } + + @Test // DATAMONGO-2363 + void appliesSpelExpressionCorrectly() { + + AddFieldsOperation operation = AddFieldsOperation.builder().addField("totalHomework") + .withValueOfExpression("sum(homework) * [0]", 2) // + .build(); + + assertThat(operation.toPipelineStages(contextFor(ScoresWrapper.class))).contains( + Document.parse("{\"$addFields\" : {\"totalHomework\": { $multiply : [{ \"$sum\" : [\"$homework\"] }, 2] }}}")); + } + + @Test // DATAMONGO-2363 + void rendersTargetValueExpressionCorrectly() { + + assertThat(AddFieldsOperation.builder().addField("totalHomework") + .withValueOf(ArithmeticOperators.valueOf("homework").sum()).build().toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"totalHomework\": { \"$sum\" : \"$homework\" }}}")); + } + + @Test // DATAMONGO-2363 + void exposesFieldsCorrectly() { + + ExposedFields fields = AddFieldsOperation.builder().addField("totalHomework").withValue("A+") // + .addField("totalQuiz").withValue("B-") // + .addField("computed").withValueOfExpression("totalHomework").build().getFields(); + + assertThat(fields.getField("totalHomework")).isNotNull(); + assertThat(fields.getField("totalQuiz")).isNotNull(); + assertThat(fields.getField("computed")).isNotNull(); + assertThat(fields.getField("does-not-exist")).isNull(); + } + + @Test // GH-4933 + void rendersStringValueAsFieldReferenceCorrectly() { + + AddFieldsOperation operation = AddFieldsOperation.builder().addField("name").withValueOf("value").build(); + + assertThat(operation.toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"name\":\"$value\"}}")); + + AddFieldsOperation mappedOperation = AddFieldsOperation.builder().addField("totalHomework").withValueOf("homework") + .build(); + + assertThat(mappedOperation.toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$addFields\" : {\"totalHomework\":\"$home_work\"}}")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference(); + } + + static class Scores { + + String student; + List homework; + } + + static class ScoresWithMappedField { + + @Field("student_name") String student; + @Field("home_work") List homework; + } + + static class ScoresWrapper { + + Scores scores; + ScoresWithMappedField scoresWithMappedField; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRendererUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRendererUnitTests.java new file mode 100644 index 0000000000..5cc93ee5b9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRendererUnitTests.java @@ -0,0 +1,160 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.mockito.Mockito.*; +import static org.springframework.data.domain.Sort.Direction.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +import java.time.ZonedDateTime; +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.ConverterBuilder; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.CustomConversions.StoreConversions; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; + +/** + * @author Christoph Strobl + */ +public class AggregationOperationRendererUnitTests { + + @Test // GH-4443 + void nonFieldsExposingAggregationOperationContinuesWithSameContextForNextStage() { + + AggregationOperationContext rootContext = mock(AggregationOperationContext.class); + AggregationOperation stage1 = mock(AggregationOperation.class); + AggregationOperation stage2 = mock(AggregationOperation.class); + + AggregationOperationRenderer.toDocument(List.of(stage1, stage2), rootContext); + + verify(stage1).toPipelineStages(eq(rootContext)); + verify(stage2).toPipelineStages(eq(rootContext)); + } + + @Test // GH-4722 + void contextShouldCarryOnRelaxedFieldMapping() { + + MongoTestMappingContext ctx = new MongoTestMappingContext(cfg -> { + cfg.initialEntitySet(TestRecord.class); + }); + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, ctx); + + Aggregation agg = Aggregation.newAggregation(Aggregation.unwind("layerOne.layerTwo"), + project().and("layerOne.layerTwo.layerThree").as("layerOne.layerThree"), + sort(DESC, "layerOne.layerThree.fieldA")); + + AggregationOperationRenderer.toDocument(agg.getPipeline().getOperations(), + new RelaxedTypeBasedAggregationOperationContext(TestRecord.class, ctx, new QueryMapper(mongoConverter))); + } + + @Test // GH-4722 + void appliesConversionToValuesUsedInAggregation() { + + MongoTestMappingContext ctx = new MongoTestMappingContext(cfg -> { + cfg.initialEntitySet(TestRecord.class); + }); + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, ctx); + mongoConverter.setCustomConversions(new CustomConversions(StoreConversions.NONE, + Set.copyOf(ConverterBuilder.writing(ZonedDateTime.class, String.class, ZonedDateTime::toString) + .andReading(it -> ZonedDateTime.parse(it)).getConverters()))); + mongoConverter.afterPropertiesSet(); + + var agg = Aggregation.newAggregation(Aggregation.sort(Direction.DESC, "version"), + Aggregation.group("entityId").first(Aggregation.ROOT).as("value"), Aggregation.replaceRoot("value"), + Aggregation.match(Criteria.where("createdDate").lt(ZonedDateTime.now())) // here is the problem + ); + + List document = AggregationOperationRenderer.toDocument(agg.getPipeline().getOperations(), + new RelaxedTypeBasedAggregationOperationContext(TestRecord.class, ctx, new QueryMapper(mongoConverter))); + Assertions.assertThat(document).last() + .extracting(it -> it.getEmbedded(List.of("$match", "createdDate", "$lt"), Object.class)) + .isInstanceOf(String.class); + } + + @ParameterizedTest // GH-4722 + @MethodSource("studentAggregationContexts") + void mapsOperationThatDoesNotExposeDedicatedFieldsCorrectly(AggregationOperationContext aggregationContext) { + + var agg = newAggregation(Student.class, Aggregation.unwind("grades"), Aggregation.replaceRoot("grades"), + Aggregation.project("grades")); + + List mappedPipeline = AggregationOperationRenderer.toDocument(agg.getPipeline().getOperations(), + aggregationContext); + + Assertions.assertThat(mappedPipeline).last().isEqualTo(Document.parse("{\"$project\": {\"grades\": 1}}")); + } + + private static Stream studentAggregationContexts() { + + MongoTestMappingContext ctx = new MongoTestMappingContext(cfg -> { + cfg.initialEntitySet(Student.class); + }); + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, ctx); + mongoConverter.afterPropertiesSet(); + + QueryMapper queryMapper = new QueryMapper(mongoConverter); + + return Stream.of( + Arguments + .of(new TypeBasedAggregationOperationContext(Student.class, ctx, queryMapper, FieldLookupPolicy.strict())), + Arguments.of( + new TypeBasedAggregationOperationContext(Student.class, ctx, queryMapper, FieldLookupPolicy.relaxed()))); + } + + record TestRecord(@Id String field1, String field2, LayerOne layerOne) { + record LayerOne(List layerTwo) { + } + + record LayerTwo(LayerThree layerThree) { + } + + record LayerThree(int fieldA, int fieldB) { + } + } + + static class Student { + + @Field("mark") List grades; + + } + + static class Grade { + + int points; + String grades; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java index 1a37a77464..ab65236f7c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationOptionsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,12 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link AggregationOptions}. @@ -29,47 +28,93 @@ * @author Thomas Darimont * @author Mark Paluch * @author Christoph Strobl + * @author Yadhukrishna S Pai * @since 1.6 */ -public class AggregationOptionsTests { +class AggregationOptionsTests { + private final Document dummyHint = new Document("dummyField", 1); AggregationOptions aggregationOptions; - @Before - public void setup() { + @BeforeEach + void setup() { aggregationOptions = newAggregationOptions().explain(true) // .cursorBatchSize(1) // .allowDiskUse(true) // + .comment("hola") // + .hint(dummyHint) // .build(); } - @Test // DATAMONGO-960 - public void aggregationOptionsBuilderShouldSetOptionsAccordingly() { + @Test // DATAMONGO-960, DATAMONGO-1836 + void aggregationOptionsBuilderShouldSetOptionsAccordingly() { - assertThat(aggregationOptions.isAllowDiskUse(), is(true)); - assertThat(aggregationOptions.isExplain(), is(true)); - assertThat(aggregationOptions.getCursor().get(), is(new Document("batchSize", 1))); + assertThat(aggregationOptions.isAllowDiskUse()).isTrue(); + assertThat(aggregationOptions.isExplain()).isTrue(); + assertThat(aggregationOptions.getCursor()).contains(new Document("batchSize", 1)); + assertThat(aggregationOptions.getHint()).contains(dummyHint); + assertThat(aggregationOptions.getHintObject()).contains(dummyHint); } - @Test // DATAMONGO-1637 - public void shouldInitializeFromDocument() { + @Test // DATAMONGO-1637, DATAMONGO-2153, DATAMONGO-1836 + void shouldInitializeFromDocument() { Document document = new Document(); document.put("cursor", new Document("batchSize", 1)); document.put("explain", true); document.put("allowDiskUse", true); + document.put("comment", "hola"); + document.put("hint", dummyHint); aggregationOptions = AggregationOptions.fromDocument(document); - assertThat(aggregationOptions.isAllowDiskUse(), is(true)); - assertThat(aggregationOptions.isExplain(), is(true)); - assertThat(aggregationOptions.getCursor().get(), is(new Document("batchSize", 1))); - assertThat(aggregationOptions.getCursorBatchSize(), is(1)); + assertThat(aggregationOptions.isAllowDiskUse()).isTrue(); + assertThat(aggregationOptions.isExplain()).isTrue(); + assertThat(aggregationOptions.getCursor()).contains(new Document("batchSize", 1)); + assertThat(aggregationOptions.getCursorBatchSize()).isEqualTo(1); + assertThat(aggregationOptions.getComment()).contains("hola"); + assertThat(aggregationOptions.getHint()).contains(dummyHint); + assertThat(aggregationOptions.getHintObject()).contains(dummyHint); } - @Test // DATAMONGO-960 - public void aggregationOptionsToString() { - assertThat(aggregationOptions.toDocument(), - is(Document.parse("{ \"allowDiskUse\" : true , \"explain\" : true , \"cursor\" : { \"batchSize\" : 1}}"))); + @Test // GH-4664 + void omitsAllowDiskUseByDefault() { + + aggregationOptions = AggregationOptions.fromDocument(new Document()); + + assertThat(aggregationOptions.isAllowDiskUse()).isFalse(); + assertThat(aggregationOptions.isAllowDiskUseSet()).isFalse(); + + assertThat(aggregationOptions.toDocument()).doesNotContainKey("allowDiskUse"); + } + + @Test // GH-4664 + void applyOptionsDoesNotChangeAllowDiskUseDefault() { + + aggregationOptions = AggregationOptions.fromDocument(new Document()); + + Document empty = new Document(); + aggregationOptions.applyAndReturnPotentiallyChangedCommand(empty); + + assertThat(empty).doesNotContainKey("allowDiskUse"); + } + + @Test // GH-4664 + void applyOptionsDoesNotChangeExistingAllowDiskUse() { + + aggregationOptions = AggregationOptions.fromDocument(new Document()); + + Document existing = new Document("allowDiskUse", true); + aggregationOptions.applyAndReturnPotentiallyChangedCommand(existing); + + assertThat(existing).containsEntry("allowDiskUse", true); + } + + @Test // DATAMONGO-960, DATAMONGO-2153, DATAMONGO-1836 + void aggregationOptionsToString() { + + assertThat(aggregationOptions.toDocument()).isEqualTo(Document + .parse("{ " + "\"allowDiskUse\" : true , " + "\"explain\" : true , " + "\"cursor\" : { \"batchSize\" : 1}, " + + "\"comment\": \"hola\", " + "\"hint\" : { \"dummyField\" : 1}" + "}")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java index 5167eb2dff..99579b34a7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,63 +15,67 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.springframework.data.domain.Sort.Direction.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import static org.springframework.data.mongodb.core.aggregation.Fields.*; import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; - -import lombok.Builder; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.io.BufferedInputStream; import java.text.ParseException; import java.text.SimpleDateFormat; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.Objects; import java.util.Scanner; +import java.util.stream.Stream; +import org.assertj.core.data.Offset; import org.bson.Document; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.LocalDateTime; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.core.io.ClassPathResource; -import org.springframework.dao.DataAccessException; import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.geo.Box; import org.springframework.data.geo.Metrics; -import org.springframework.data.mapping.MappingException; -import org.springframework.data.mongodb.core.CollectionCallback; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.BulkOperations; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.TestEntities; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.aggregation.AggregationTests.CarDescriptor.Entry; import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; +import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let; import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; import org.springframework.data.mongodb.test.util.MongoVersion; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.CloseableIterator; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.util.ObjectUtils; -import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; /** @@ -85,52 +89,40 @@ * @author Nikolay Bogdanov * @author Maninder Singh * @author Sergey Shcherbakov + * @author Minsu Kim + * @author Sangyong Choi + * @author Julia Lee */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class AggregationTests { private static final String INPUT_COLLECTION = "aggregation_test_collection"; - private static final Logger LOGGER = LoggerFactory.getLogger(AggregationTests.class); private static boolean initialized = false; + private static List documents = parseDocuments(); - @Autowired MongoTemplate mongoTemplate; - - @Rule public ExpectedException exception = ExpectedException.none(); - @Rule public MongoVersionRule mongoVersion = MongoVersionRule.any(); + @Template // + private static MongoTestTemplate mongoTemplate; - @Before - public void setUp() { + @BeforeEach + void setUp() { cleanDb(); initSampleDataIfNecessary(); } - @After - public void cleanUp() { + @AfterEach + void cleanUp() { cleanDb(); } private void cleanDb() { + mongoTemplate.flush(Product.class, UserWithLikes.class, DATAMONGO753.class, Data.class, DATAMONGO788.class, + User.class, Person.class, Reservation.class, Venue.class, MeterData.class, LineItem.class, InventoryItem.class, + Sales.class, Sales2.class, Employee.class, Art.class, Venue.class, Item.class); + mongoTemplate.dropCollection(INPUT_COLLECTION); - mongoTemplate.dropCollection(Product.class); - mongoTemplate.dropCollection(UserWithLikes.class); - mongoTemplate.dropCollection(DATAMONGO753.class); - mongoTemplate.dropCollection(Data.class); - mongoTemplate.dropCollection(DATAMONGO788.class); - mongoTemplate.dropCollection(User.class); - mongoTemplate.dropCollection(Person.class); - mongoTemplate.dropCollection(Reservation.class); - mongoTemplate.dropCollection(Venue.class); - mongoTemplate.dropCollection(MeterData.class); - mongoTemplate.dropCollection(LineItem.class); - mongoTemplate.dropCollection(InventoryItem.class); - mongoTemplate.dropCollection(Sales.class); - mongoTemplate.dropCollection(Sales2.class); - mongoTemplate.dropCollection(Employee.class); - mongoTemplate.dropCollection(Art.class); mongoTemplate.dropCollection("personQueryTemp"); } @@ -144,56 +136,58 @@ private void initSampleDataIfNecessary() { if (!initialized) { - LOGGER.debug("Server uses MongoDB Version: {}", mongoVersion); - mongoTemplate.dropCollection(ZipInfo.class); - mongoTemplate.execute(ZipInfo.class, new CollectionCallback() { - - @Override - public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - - Scanner scanner = null; - try { - scanner = new Scanner(new BufferedInputStream(new ClassPathResource("zips.json").getInputStream())); - while (scanner.hasNextLine()) { - String zipInfoRecord = scanner.nextLine(); - collection.insertOne(Document.parse(zipInfoRecord)); - } - } catch (Exception e) { - if (scanner != null) { - scanner.close(); - } - throw new RuntimeException("Could not load mongodb sample dataset!", e); - } - - return null; - } - }); + + mongoTemplate.bulkOps(BulkOperations.BulkMode.UNORDERED, ZipInfo.class).insert(documents).execute(); long count = mongoTemplate.count(new Query(), ZipInfo.class); - assertThat(count, is(29467L)); + assertThat(count).isEqualTo(29467L); initialized = true; } } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-586 - public void shouldHandleMissingInputCollection() { - mongoTemplate.aggregate(newAggregation(), (String) null, TagCount.class); + static List parseDocuments() { + + Scanner scanner = null; + List documents = new ArrayList<>(30000); + + try { + scanner = new Scanner(new BufferedInputStream(new ClassPathResource("zips.json").getInputStream())); + while (scanner.hasNextLine()) { + String zipInfoRecord = scanner.nextLine(); + documents.add(Document.parse(zipInfoRecord)); + } + } catch (Exception e) { + if (scanner != null) { + scanner.close(); + } + throw new RuntimeException("Could not load mongodb sample dataset", e); + } + + return documents; + } + + @Test // DATAMONGO-586 + void shouldHandleMissingInputCollection() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mongoTemplate.aggregate(newAggregation(), (String) null, TagCount.class)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-586 - public void shouldHandleMissingAggregationPipeline() { - mongoTemplate.aggregate(null, INPUT_COLLECTION, TagCount.class); + @Test // DATAMONGO-586 + void shouldHandleMissingAggregationPipeline() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mongoTemplate.aggregate(null, INPUT_COLLECTION, TagCount.class)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-586 - public void shouldHandleMissingEntityClass() { - mongoTemplate.aggregate(newAggregation(), INPUT_COLLECTION, null); + @Test // DATAMONGO-586 + void shouldHandleMissingEntityClass() { + assertThatIllegalArgumentException() + .isThrownBy(() -> mongoTemplate.aggregate(newAggregation(), INPUT_COLLECTION, null)); } @Test // DATAMONGO-586 - public void shouldAggregate() { + void shouldAggregate() { createTagDocuments(); @@ -209,12 +203,12 @@ public void shouldAggregate() { AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(3)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(3); assertTagCount("spring", 3, tagCount.get(0)); assertTagCount("mongodb", 2, tagCount.get(1)); @@ -222,7 +216,7 @@ public void shouldAggregate() { } @Test // DATAMONGO-1637 - public void shouldAggregateAndStream() { + void shouldAggregateAndStream() { createTagDocuments(); @@ -236,22 +230,21 @@ public void shouldAggregateAndStream() { sort(DESC, "n") // ).withOptions(new AggregationOptions(true, false, 1)); - CloseableIterator iterator = mongoTemplate.aggregateStream(agg, INPUT_COLLECTION, TagCount.class); + try (Stream stream = mongoTemplate.aggregateStream(agg, INPUT_COLLECTION, TagCount.class)) { - assertThat(iterator, is(notNullValue())); - List tagCount = toList(iterator); - iterator.close(); + List tagCount = stream.toList(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(3)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(3); - assertTagCount("spring", 3, tagCount.get(0)); - assertTagCount("mongodb", 2, tagCount.get(1)); - assertTagCount("nosql", 1, tagCount.get(2)); + assertTagCount("spring", 3, tagCount.get(0)); + assertTagCount("mongodb", 2, tagCount.get(1)); + assertTagCount("nosql", 1, tagCount.get(2)); + } } @Test // DATAMONGO-586 - public void shouldAggregateEmptyCollection() { + void shouldAggregateEmptyCollection() { Aggregation aggregation = newAggregation(// project("tags"), // @@ -265,16 +258,16 @@ public void shouldAggregateEmptyCollection() { AggregationResults results = mongoTemplate.aggregate(aggregation, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(0)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(0); } @Test // DATAMONGO-1637 - public void shouldAggregateEmptyCollectionAndStream() { + void shouldAggregateEmptyCollectionAndStream() { Aggregation aggregation = newAggregation(// project("tags"), // @@ -286,19 +279,16 @@ public void shouldAggregateEmptyCollectionAndStream() { sort(DESC, "n") // ); - CloseableIterator results = mongoTemplate.aggregateStream(aggregation, INPUT_COLLECTION, TagCount.class); - - assertThat(results, is(notNullValue())); + try (Stream stream = mongoTemplate.aggregateStream(aggregation, INPUT_COLLECTION, TagCount.class)) { - List tagCount = toList(results); - results.close(); + List tagCount = stream.toList(); - assertThat(tagCount.size(), is(0)); + assertThat(tagCount.size()).isEqualTo(0); + } } @Test // DATAMONGO-1391 - @MongoVersion(asOf = "3.2") - public void shouldUnwindWithIndex() { + void shouldUnwindWithIndex() { MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); @@ -315,17 +305,17 @@ public void shouldUnwindWithIndex() { AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(3)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(3); } @Test // DATAMONGO-1391 - @MongoVersion(asOf = "3.2") - public void shouldUnwindPreserveEmpty() { + @EnableIfMongoServerVersion(isLessThan = "6.0") // $sort does not seem to have an effect on $unwind + void shouldUnwindPreserveEmpty() { MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); @@ -340,18 +330,18 @@ public void shouldUnwindPreserveEmpty() { AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, Document.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(4)); - assertThat(tagCount.get(0), isBsonObject().containing("n", 2L)); - assertThat(tagCount.get(3), isBsonObject().notContaining("n")); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(4); + assertThat(tagCount.get(0)).containsEntry("n", 2L); + assertThat(tagCount.get(3)).containsEntry("n", null); } @Test // DATAMONGO-586 - public void shouldDetectResultMismatch() { + void shouldDetectResultMismatch() { createTagDocuments(); @@ -365,18 +355,18 @@ public void shouldDetectResultMismatch() { AggregationResults results = mongoTemplate.aggregate(aggregation, INPUT_COLLECTION, TagCount.class); - assertThat(results, is(notNullValue())); + assertThat(results).isNotNull(); List tagCount = results.getMappedResults(); - assertThat(tagCount, is(notNullValue())); - assertThat(tagCount.size(), is(2)); + assertThat(tagCount).isNotNull(); + assertThat(tagCount.size()).isEqualTo(2); assertTagCount(null, 0, tagCount.get(0)); assertTagCount(null, 0, tagCount.get(1)); } @Test // DATAMONGO-1637 - public void shouldDetectResultMismatchWhileStreaming() { + void shouldDetectResultMismatchWhileStreaming() { createTagDocuments(); @@ -388,20 +378,18 @@ public void shouldDetectResultMismatchWhileStreaming() { limit(2) // ); - CloseableIterator results = mongoTemplate.aggregateStream(aggregation, INPUT_COLLECTION, TagCount.class); - - assertThat(results, is(notNullValue())); + try (Stream stream = mongoTemplate.aggregateStream(aggregation, INPUT_COLLECTION, TagCount.class)) { - List tagCount = toList(results); - results.close(); + List tagCount = stream.toList(); - assertThat(tagCount.size(), is(2)); - assertTagCount(null, 0, tagCount.get(0)); - assertTagCount(null, 0, tagCount.get(1)); + assertThat(tagCount.size()).isEqualTo(2); + assertTagCount(null, 0, tagCount.get(0)); + assertTagCount(null, 0, tagCount.get(1)); + } } @Test // DATAMONGO-586 - public void complexAggregationFrameworkUsageLargestAndSmallestCitiesByState() { + void complexAggregationFrameworkUsageLargestAndSmallestCitiesByState() { /* //complex mongodb aggregation framework example from https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state db.zipInfo.aggregate( @@ -477,43 +465,43 @@ public void complexAggregationFrameworkUsageLargestAndSmallestCitiesByState() { sort(ASC, "state") // ); - assertThat(aggregation, is(notNullValue())); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation).isNotNull(); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(51)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(51); ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); - assertThat(firstZipInfoStats, is(notNullValue())); - assertThat(firstZipInfoStats.id, is(nullValue())); - assertThat(firstZipInfoStats.state, is("AK")); - assertThat(firstZipInfoStats.smallestCity, is(notNullValue())); - assertThat(firstZipInfoStats.smallestCity.name, is("CHEVAK")); - assertThat(firstZipInfoStats.smallestCity.population, is(0)); - assertThat(firstZipInfoStats.biggestCity, is(notNullValue())); - assertThat(firstZipInfoStats.biggestCity.name, is("ANCHORAGE")); - assertThat(firstZipInfoStats.biggestCity.population, is(183987)); + assertThat(firstZipInfoStats).isNotNull(); + assertThat(firstZipInfoStats.id).isNull(); + assertThat(firstZipInfoStats.state).isEqualTo("AK"); + assertThat(firstZipInfoStats.smallestCity).isNotNull(); + assertThat(firstZipInfoStats.smallestCity.name).isEqualTo("CHEVAK"); + assertThat(firstZipInfoStats.smallestCity.population).isEqualTo(0); + assertThat(firstZipInfoStats.biggestCity).isNotNull(); + assertThat(firstZipInfoStats.biggestCity.name).isEqualTo("ANCHORAGE"); + assertThat(firstZipInfoStats.biggestCity.population).isEqualTo(183987); ZipInfoStats lastZipInfoStats = result.getMappedResults().get(50); - assertThat(lastZipInfoStats, is(notNullValue())); - assertThat(lastZipInfoStats.id, is(nullValue())); - assertThat(lastZipInfoStats.state, is("WY")); - assertThat(lastZipInfoStats.smallestCity, is(notNullValue())); - assertThat(lastZipInfoStats.smallestCity.name, is("LOST SPRINGS")); - assertThat(lastZipInfoStats.smallestCity.population, is(6)); - assertThat(lastZipInfoStats.biggestCity, is(notNullValue())); - assertThat(lastZipInfoStats.biggestCity.name, is("CHEYENNE")); - assertThat(lastZipInfoStats.biggestCity.population, is(70185)); + assertThat(lastZipInfoStats).isNotNull(); + assertThat(lastZipInfoStats.id).isNull(); + assertThat(lastZipInfoStats.state).isEqualTo("WY"); + assertThat(lastZipInfoStats.smallestCity).isNotNull(); + assertThat(lastZipInfoStats.smallestCity.name).isEqualTo("LOST SPRINGS"); + assertThat(lastZipInfoStats.smallestCity.population).isEqualTo(6); + assertThat(lastZipInfoStats.biggestCity).isNotNull(); + assertThat(lastZipInfoStats.biggestCity.name).isEqualTo("CHEYENNE"); + assertThat(lastZipInfoStats.biggestCity.population).isEqualTo(70185); } @Test // DATAMONGO-586 - public void findStatesWithPopulationOver10MillionAggregationExample() { + void findStatesWithPopulationOver10MillionAggregationExample() { /* //complex mongodb aggregation framework example from https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state - + db.zipcodes.aggregate( { $group: { @@ -539,19 +527,19 @@ public void findStatesWithPopulationOver10MillionAggregationExample() { match(where("totalPop").gte(10 * 1000 * 1000)) // ); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(7)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(7); StateStats stateStats = result.getMappedResults().get(0); - assertThat(stateStats, is(notNullValue())); - assertThat(stateStats.id, is("CA")); - assertThat(stateStats.state, is(nullValue())); - assertThat(stateStats.totalPopulation, is(29760021)); + assertThat(stateStats).isNotNull(); + assertThat(stateStats.id).isEqualTo("CA"); + assertThat(stateStats.state).isNull(); + assertThat(stateStats.totalPopulation).isEqualTo(29760021); } /** @@ -559,7 +547,7 @@ public void findStatesWithPopulationOver10MillionAggregationExample() { * Framework: $cond */ @Test // DATAMONGO-861 - public void aggregationUsingConditionalProjectionToCalculateDiscount() { + void aggregationUsingConditionalProjectionToCalculateDiscount() { /* db.inventory.aggregate( @@ -589,22 +577,22 @@ public void aggregationUsingConditionalProjectionToCalculateDiscount() { .then(30) // .otherwise(20))); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(3)); + assertThat(result.getMappedResults().size()).isEqualTo(3); Document first = result.getMappedResults().get(0); - assertThat(first.get("_id"), is((Object) 1)); - assertThat(first.get("discount"), is((Object) 30)); + assertThat(first.get("_id")).isEqualTo((Object) 1); + assertThat(first.get("discount")).isEqualTo((Object) 30); Document second = result.getMappedResults().get(1); - assertThat(second.get("_id"), is((Object) 2)); - assertThat(second.get("discount"), is((Object) 20)); + assertThat(second.get("_id")).isEqualTo((Object) 2); + assertThat(second.get("discount")).isEqualTo((Object) 20); Document third = result.getMappedResults().get(2); - assertThat(third.get("_id"), is((Object) 3)); - assertThat(third.get("discount"), is((Object) 30)); + assertThat(third.get("_id")).isEqualTo((Object) 3); + assertThat(third.get("discount")).isEqualTo((Object) 30); } /** @@ -612,7 +600,7 @@ public void aggregationUsingConditionalProjectionToCalculateDiscount() { * Framework: $ifNull */ @Test // DATAMONGO-861 - public void aggregationUsingIfNullToProjectSaneDefaults() { + void aggregationUsingIfNullToProjectSaneDefaults() { /* db.inventory.aggregate( @@ -637,22 +625,22 @@ public void aggregationUsingIfNullToProjectSaneDefaults() { .as("description")// ); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(3)); + assertThat(result.getMappedResults().size()).isEqualTo(3); Document first = result.getMappedResults().get(0); - assertThat(first.get("_id"), is((Object) 1)); - assertThat(first.get("description"), is((Object) "product 1")); + assertThat(first.get("_id")).isEqualTo((Object) 1); + assertThat(first.get("description")).isEqualTo((Object) "product 1"); Document second = result.getMappedResults().get(1); - assertThat(second.get("_id"), is((Object) 2)); - assertThat(second.get("description"), is((Object) "Unspecified")); + assertThat(second.get("_id")).isEqualTo((Object) 2); + assertThat(second.get("description")).isEqualTo((Object) "Unspecified"); } @Test // DATAMONGO-861 - public void aggregationUsingConditionalProjection() { + void aggregationUsingConditionalProjection() { TypedAggregation aggregation = newAggregation(ZipInfo.class, // project() // @@ -662,19 +650,19 @@ public void aggregationUsingConditionalProjection() { .otherwise(false)) // .and("population").as("population")); - assertThat(aggregation, is(notNullValue())); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation).isNotNull(); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(29467)); + assertThat(result.getMappedResults().size()).isEqualTo(29467); Document firstZipInfoStats = result.getMappedResults().get(0); - assertThat(firstZipInfoStats.get("largePopulation"), is((Object) false)); - assertThat(firstZipInfoStats.get("population"), is((Object) 6055)); + assertThat(firstZipInfoStats.get("largePopulation")).isEqualTo((Object) false); + assertThat(firstZipInfoStats.get("population")).isEqualTo((Object) 6055); } @Test // DATAMONGO-861 - public void aggregationUsingNestedConditionalProjection() { + void aggregationUsingNestedConditionalProjection() { TypedAggregation aggregation = newAggregation(ZipInfo.class, // project() // @@ -685,19 +673,19 @@ public void aggregationUsingNestedConditionalProjection() { .otherwise("small")) // .and("population").as("population")); - assertThat(aggregation, is(notNullValue())); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation).isNotNull(); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(29467)); + assertThat(result.getMappedResults().size()).isEqualTo(29467); Document firstZipInfoStats = result.getMappedResults().get(0); - assertThat(firstZipInfoStats.get("size"), is((Object) "small")); - assertThat(firstZipInfoStats.get("population"), is((Object) 6055)); + assertThat(firstZipInfoStats.get("size")).isEqualTo((Object) "small"); + assertThat(firstZipInfoStats.get("population")).isEqualTo((Object) 6055); } @Test // DATAMONGO-861 - public void aggregationUsingIfNullProjection() { + void aggregationUsingIfNullProjection() { mongoTemplate.insert(new LineItem("id", "caption", 0)); mongoTemplate.insert(new LineItem("idonly", null, 0)); @@ -708,20 +696,20 @@ public void aggregationUsingIfNullProjection() { .applyCondition(ConditionalOperators.ifNull("caption").then("unknown")), sort(ASC, "id")); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(2)); + assertThat(result.getMappedResults().size()).isEqualTo(2); Document id = result.getMappedResults().get(0); - assertThat((String) id.get("caption"), is(equalTo("caption"))); + assertThat((String) id.get("caption")).isEqualTo("caption"); Document idonly = result.getMappedResults().get(1); - assertThat((String) idonly.get("caption"), is(equalTo("unknown"))); + assertThat((String) idonly.get("caption")).isEqualTo("unknown"); } @Test // DATAMONGO-861 - public void aggregationUsingIfNullReplaceWithFieldReferenceProjection() { + void aggregationUsingIfNullReplaceWithFieldReferenceProjection() { mongoTemplate.insert(new LineItem("id", "caption", 0)); mongoTemplate.insert(new LineItem("idonly", null, 0)); @@ -732,20 +720,20 @@ public void aggregationUsingIfNullReplaceWithFieldReferenceProjection() { .applyCondition(ConditionalOperators.ifNull("caption").thenValueOf("id")), sort(ASC, "id")); - assertThat(aggregation.toString(), is(notNullValue())); + assertThat(aggregation.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(2)); + assertThat(result.getMappedResults().size()).isEqualTo(2); Document id = result.getMappedResults().get(0); - assertThat((String) id.get("caption"), is(equalTo("caption"))); + assertThat((String) id.get("caption")).isEqualTo("caption"); Document idonly = result.getMappedResults().get(1); - assertThat((String) idonly.get("caption"), is(equalTo("idonly"))); + assertThat((String) idonly.get("caption")).isEqualTo("idonly"); } @Test // DATAMONGO-861 - public void shouldAllowGroupingUsingConditionalExpressions() { + void shouldAllowGroupingUsingConditionalExpressions() { mongoTemplate.dropCollection(CarPerson.class); @@ -771,24 +759,25 @@ public void shouldAllowGroupingUsingConditionalExpressions() { group("make").avg(ConditionalOperators // .when(Criteria.where("year").gte(2012)) // .then(1) // - .otherwise(9000)).as("score"), - sort(ASC, "make")); + .otherwise(9000)) // + .as("score"), + sort(ASC, "score")); AggregationResults result = mongoTemplate.aggregate(agg, Document.class); - assertThat(result.getMappedResults(), hasSize(2)); + assertThat(result.getMappedResults()).hasSize(2); Document meh = result.getMappedResults().get(0); - assertThat((String) meh.get("_id"), is(equalTo("meh"))); - assertThat(((Number) meh.get("score")).longValue(), is(equalTo(1L))); + assertThat((String) meh.get("_id")).isEqualTo("meh"); + assertThat(((Number) meh.get("score")).longValue()).isEqualTo(1L); Document good = result.getMappedResults().get(1); - assertThat((String) good.get("_id"), is(equalTo("good"))); - assertThat(((Number) good.get("score")).longValue(), is(equalTo(9000L))); + assertThat((String) good.get("_id")).isEqualTo("good"); + assertThat(((Number) good.get("score")).longValue()).isEqualTo(9000L); } - @Test // DATAMONGO-1784 - public void shouldAllowSumUsingConditionalExpressions() { + @Test // DATAMONGO-1784, DATAMONGO-2264 + void shouldAllowSumUsingConditionalExpressions() { mongoTemplate.dropCollection(CarPerson.class); @@ -815,19 +804,19 @@ public void shouldAllowSumUsingConditionalExpressions() { .when(Criteria.where("year").gte(2012)) // .then(1) // .otherwise(9000)).as("score"), - sort(ASC, "make")); + sort(ASC, "score")); AggregationResults result = mongoTemplate.aggregate(agg, Document.class); - assertThat(result.getMappedResults(), hasSize(2)); + assertThat(result.getMappedResults()).hasSize(2); Document meh = result.getMappedResults().get(0); - assertThat(meh.get("_id"), is(equalTo("meh"))); - assertThat(((Number) meh.get("score")).longValue(), is(equalTo(2L))); + assertThat(meh.get("_id")).isEqualTo("meh"); + assertThat(((Number) meh.get("score")).longValue()).isEqualTo(2L); Document good = result.getMappedResults().get(1); - assertThat(good.get("_id"), is(equalTo("good"))); - assertThat(((Number) good.get("score")).longValue(), is(equalTo(18000L))); + assertThat(good.get("_id")).isEqualTo("good"); + assertThat(((Number) good.get("score")).longValue()).isEqualTo(18000L); } /** @@ -836,19 +825,19 @@ public void shouldAllowSumUsingConditionalExpressions() { * the Five Most Common “Likes” */ @Test // DATAMONGO-586 - public void returnFiveMostCommonLikesAggregationFrameworkExample() { + void returnFiveMostCommonLikesAggregationFrameworkExample() { createUserWithLikesDocuments(); TypedAggregation agg = createUsersWithCommonLikesAggregation(); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(5)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(5); assertLikeStats(result.getMappedResults().get(0), "a", 4); assertLikeStats(result.getMappedResults().get(1), "b", 2); @@ -857,7 +846,7 @@ public void returnFiveMostCommonLikesAggregationFrameworkExample() { assertLikeStats(result.getMappedResults().get(4), "e", 3); } - protected TypedAggregation createUsersWithCommonLikesAggregation() { + TypedAggregation createUsersWithCommonLikesAggregation() { return newAggregation(UserWithLikes.class, // unwind("likes"), // group("likes").count().as("number"), // @@ -868,7 +857,7 @@ protected TypedAggregation createUsersWithCommonLikesAggregation( } @Test // DATAMONGO-586 - public void arithmenticOperatorsInProjectionExample() { + void arithmenticOperatorsInProjectionExample() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -890,27 +879,28 @@ public void arithmenticOperatorsInProjectionExample() { AggregationResults result = mongoTemplate.aggregate(agg, Document.class); List resultList = result.getMappedResults(); - assertThat(resultList, is(notNullValue())); - assertThat((String) resultList.get(0).get("_id"), is(product.id)); - assertThat((String) resultList.get(0).get("name"), is(product.name)); - assertThat((Double) resultList.get(0).get("netPricePlus1"), is(product.netPrice + 1)); - assertThat((Double) resultList.get(0).get("netPriceMinus1"), is(product.netPrice - 1)); - assertThat((Double) resultList.get(0).get("netPriceMul2"), is(product.netPrice * 2)); - assertThat((Double) resultList.get(0).get("netPriceDiv119"), is(product.netPrice / 1.19)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMod2"), is(product.spaceUnits % 2)); - assertThat((Integer) resultList.get(0).get("spaceUnitsPlusSpaceUnits"), - is(product.spaceUnits + product.spaceUnits)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMinusSpaceUnits"), - is(product.spaceUnits - product.spaceUnits)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMultiplySpaceUnits"), - is(product.spaceUnits * product.spaceUnits)); - assertThat((Double) resultList.get(0).get("spaceUnitsDivideSpaceUnits"), - is((double) (product.spaceUnits / product.spaceUnits))); - assertThat((Integer) resultList.get(0).get("spaceUnitsModSpaceUnits"), is(product.spaceUnits % product.spaceUnits)); + assertThat(resultList).isNotNull(); + assertThat((String) resultList.get(0).get("_id")).isEqualTo(product.id); + assertThat((String) resultList.get(0).get("name")).isEqualTo(product.name); + assertThat((Double) resultList.get(0).get("netPricePlus1")).isEqualTo(product.netPrice + 1); + assertThat((Double) resultList.get(0).get("netPriceMinus1")).isEqualTo(product.netPrice - 1); + assertThat((Double) resultList.get(0).get("netPriceMul2")).isEqualTo(product.netPrice * 2); + assertThat((Double) resultList.get(0).get("netPriceDiv119")).isEqualTo(product.netPrice / 1.19); + assertThat((Integer) resultList.get(0).get("spaceUnitsMod2")).isEqualTo(product.spaceUnits % 2); + assertThat((Integer) resultList.get(0).get("spaceUnitsPlusSpaceUnits")) + .isEqualTo(product.spaceUnits + product.spaceUnits); + assertThat((Integer) resultList.get(0).get("spaceUnitsMinusSpaceUnits")) + .isEqualTo(product.spaceUnits - product.spaceUnits); + assertThat((Integer) resultList.get(0).get("spaceUnitsMultiplySpaceUnits")) + .isEqualTo(product.spaceUnits * product.spaceUnits); + assertThat((Double) resultList.get(0).get("spaceUnitsDivideSpaceUnits")) + .isEqualTo((double) (product.spaceUnits / product.spaceUnits)); + assertThat((Integer) resultList.get(0).get("spaceUnitsModSpaceUnits")) + .isEqualTo(product.spaceUnits % product.spaceUnits); } @Test // DATAMONGO-774 - public void expressionsInProjectionExample() { + void expressionsInProjectionExample() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -929,21 +919,20 @@ public void expressionsInProjectionExample() { AggregationResults result = mongoTemplate.aggregate(agg, Document.class); List resultList = result.getMappedResults(); - assertThat(resultList, is(notNullValue())); - assertThat((String) resultList.get(0).get("_id"), is(product.id)); - assertThat((String) resultList.get(0).get("name"), is(product.name)); - assertThat((Double) resultList.get(0).get("netPricePlus1"), is(product.netPrice + 1)); - assertThat((Double) resultList.get(0).get("netPriceMinus1"), is(product.netPrice - 1)); - assertThat((Double) resultList.get(0).get("netPriceDiv2"), is(product.netPrice / 2)); - assertThat((Double) resultList.get(0).get("grossPrice"), is(product.netPrice * 1.19)); - assertThat((Integer) resultList.get(0).get("spaceUnitsMod2"), is(product.spaceUnits % 2)); - assertThat((Double) resultList.get(0).get("grossPriceIncludingDiscountAndCharge"), - is((product.netPrice * 0.8 + 1.2) * 1.19)); + assertThat(resultList).isNotNull(); + assertThat((String) resultList.get(0).get("_id")).isEqualTo(product.id); + assertThat((String) resultList.get(0).get("name")).isEqualTo(product.name); + assertThat((Double) resultList.get(0).get("netPricePlus1")).isEqualTo(product.netPrice + 1); + assertThat((Double) resultList.get(0).get("netPriceMinus1")).isEqualTo(product.netPrice - 1); + assertThat((Double) resultList.get(0).get("netPriceDiv2")).isEqualTo(product.netPrice / 2); + assertThat((Double) resultList.get(0).get("grossPrice")).isEqualTo(product.netPrice * 1.19); + assertThat((Integer) resultList.get(0).get("spaceUnitsMod2")).isEqualTo(product.spaceUnits % 2); + assertThat((Double) resultList.get(0).get("grossPriceIncludingDiscountAndCharge")) + .isEqualTo((product.netPrice * 0.8 + 1.2) * 1.19); } @Test // DATAMONGO-774 - @MongoVersion(asOf = "2.4") - public void stringExpressionsInProjectionExample() { + void stringExpressionsInProjectionExample() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -956,14 +945,14 @@ public void stringExpressionsInProjectionExample() { AggregationResults result = mongoTemplate.aggregate(agg, Document.class); List resultList = result.getMappedResults(); - assertThat(resultList, is(notNullValue())); - assertThat((String) resultList.get(0).get("_id"), is(product.id)); - assertThat((String) resultList.get(0).get("name"), is(product.name)); - assertThat((String) resultList.get(0).get("name_bubu"), is(product.name + "_bubu")); + assertThat(resultList).isNotNull(); + assertThat((String) resultList.get(0).get("_id")).isEqualTo(product.id); + assertThat((String) resultList.get(0).get("name")).isEqualTo(product.name); + assertThat((String) resultList.get(0).get("name_bubu")).isEqualTo(product.name + "_bubu"); } @Test // DATAMONGO-774 - public void expressionsInProjectionExampleShowcase() { + void expressionsInProjectionExampleShowcase() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); mongoTemplate.insert(product); @@ -978,38 +967,21 @@ public void expressionsInProjectionExampleShowcase() { AggregationResults result = mongoTemplate.aggregate(agg, Document.class); List resultList = result.getMappedResults(); - assertThat(resultList, is(notNullValue())); + assertThat(resultList).isNotNull(); Document firstItem = resultList.get(0); - assertThat((String) firstItem.get("_id"), is(product.id)); - assertThat((String) firstItem.get("name"), is(product.name)); - assertThat((Double) firstItem.get("salesPrice"), - is((product.netPrice * (1 - product.discountRate) + shippingCosts) * (1 + product.taxRate))); - } - - @Test - public void shouldThrowExceptionIfUnknownFieldIsReferencedInArithmenticExpressionsInProjection() { - - exception.expect(MappingException.class); - exception.expectMessage("unknown"); - - Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); - mongoTemplate.insert(product); - - TypedAggregation agg = newAggregation(Product.class, // - project("name", "netPrice") // - .andExpression("unknown + 1").as("netPricePlus1") // - ); - - mongoTemplate.aggregate(agg, Document.class); + assertThat((String) firstItem.get("_id")).isEqualTo(product.id); + assertThat((String) firstItem.get("name")).isEqualTo(product.name); + assertThat((Double) firstItem.get("salesPrice")) + .isEqualTo((product.netPrice * (1 - product.discountRate) + shippingCosts) * (1 + product.taxRate)); } /** * @see Spring + * "https://stackoverflow.com/questions/18653574/spring-data-mongodb-aggregation-framework-invalid-reference-in-group-operati">Spring * Data MongoDB - Aggregation Framework - invalid reference in group Operation */ @Test // DATAMONGO-753 - public void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() { + void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() { mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("A", 1), new PD("B", 1), new PD("C", 1))); mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("B", 1), new PD("B", 1), new PD("C", 1))); @@ -1018,27 +990,28 @@ public void allowsNestedFieldReferencesAsGroupIdsInGroupExpressions() { unwind("pd"), // group("pd.pDch") // the nested field expression .sum("pd.up").as("uplift"), // - project("_id", "uplift")); + project("_id", "uplift"), // + sort(Sort.by("uplift"))); AggregationResults result = mongoTemplate.aggregate(agg, Document.class); List stats = result.getMappedResults(); - assertThat(stats.size(), is(3)); - assertThat(stats.get(0).get("_id").toString(), is("C")); - assertThat((Integer) stats.get(0).get("uplift"), is(2)); - assertThat(stats.get(1).get("_id").toString(), is("B")); - assertThat((Integer) stats.get(1).get("uplift"), is(3)); - assertThat(stats.get(2).get("_id").toString(), is("A")); - assertThat((Integer) stats.get(2).get("uplift"), is(1)); + assertThat(stats.size()).isEqualTo(3); + assertThat(stats.get(0).get("_id").toString()).isEqualTo("A"); + assertThat((Integer) stats.get(0).get("uplift")).isEqualTo(1); + assertThat(stats.get(1).get("_id").toString()).isEqualTo("C"); + assertThat((Integer) stats.get(1).get("uplift")).isEqualTo(2); + assertThat(stats.get(2).get("_id").toString()).isEqualTo("B"); + assertThat((Integer) stats.get(2).get("uplift")).isEqualTo(3); } /** * @see Spring + * "https://stackoverflow.com/questions/18653574/spring-data-mongodb-aggregation-framework-invalid-reference-in-group-operati">Spring * Data MongoDB - Aggregation Framework - invalid reference in group Operation */ @Test // DATAMONGO-753 - public void aliasesNestedFieldInProjectionImmediately() { + void aliasesNestedFieldInProjectionImmediately() { mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("A", 1), new PD("B", 1), new PD("C", 1))); mongoTemplate.insert(new DATAMONGO753().withPDs(new PD("B", 1), new PD("B", 1), new PD("C", 1))); @@ -1050,81 +1023,76 @@ public void aliasesNestedFieldInProjectionImmediately() { AggregationResults results = mongoTemplate.aggregate(agg, Document.class); List mappedResults = results.getMappedResults(); - assertThat(mappedResults, hasSize(6)); + assertThat(mappedResults).hasSize(6); for (Document element : mappedResults) { - assertThat(element.get("up"), is((Object) 1)); + assertThat(element.get("up")).isEqualTo((Object) 1); } } @Test // DATAMONGO-774 - @MongoVersion(asOf = "2.4") - public void shouldPerformDateProjectionOperatorsCorrectly() throws ParseException { + void shouldPerformDateProjectionOperatorsCorrectly() throws ParseException { Data data = new Data(); data.stringValue = "ABC"; mongoTemplate.insert(data); - TypedAggregation agg = newAggregation(Data.class, - project() // - .andExpression("concat(stringValue, 'DE')").as("concat") // - .andExpression("strcasecmp(stringValue,'XYZ')").as("strcasecmp") // - .andExpression("substr(stringValue,1,1)").as("substr") // - .andExpression("toLower(stringValue)").as("toLower") // - .andExpression("toUpper(toLower(stringValue))").as("toUpper") // + TypedAggregation agg = newAggregation(Data.class, project() // + .andExpression("concat(stringValue, 'DE')").as("concat") // + .andExpression("strcasecmp(stringValue,'XYZ')").as("strcasecmp") // + .andExpression("substr(stringValue,1,1)").as("substr") // + .andExpression("toLower(stringValue)").as("toLower") // + .andExpression("toUpper(toLower(stringValue))").as("toUpper") // ); AggregationResults results = mongoTemplate.aggregate(agg, Document.class); Document document = results.getUniqueMappedResult(); - assertThat(document, is(notNullValue())); - assertThat((String) document.get("concat"), is("ABCDE")); - assertThat((Integer) document.get("strcasecmp"), is(-1)); - assertThat((String) document.get("substr"), is("B")); - assertThat((String) document.get("toLower"), is("abc")); - assertThat((String) document.get("toUpper"), is("ABC")); + assertThat(document).isNotNull(); + assertThat((String) document.get("concat")).isEqualTo("ABCDE"); + assertThat((Integer) document.get("strcasecmp")).isEqualTo(-1); + assertThat((String) document.get("substr")).isEqualTo("B"); + assertThat((String) document.get("toLower")).isEqualTo("abc"); + assertThat((String) document.get("toUpper")).isEqualTo("ABC"); } @Test // DATAMONGO-774 - @MongoVersion(asOf = "2.4") - public void shouldPerformStringProjectionOperatorsCorrectly() throws ParseException { + void shouldPerformStringProjectionOperatorsCorrectly() throws ParseException { Data data = new Data(); data.dateValue = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss.SSSZ").parse("29.08.1983 12:34:56.789+0000"); mongoTemplate.insert(data); - TypedAggregation agg = newAggregation(Data.class, - project() // - .andExpression("dayOfYear(dateValue)").as("dayOfYear") // - .andExpression("dayOfMonth(dateValue)").as("dayOfMonth") // - .andExpression("dayOfWeek(dateValue)").as("dayOfWeek") // - .andExpression("year(dateValue)").as("year") // - .andExpression("month(dateValue)").as("month") // - .andExpression("week(dateValue)").as("week") // - .andExpression("hour(dateValue)").as("hour") // - .andExpression("minute(dateValue)").as("minute") // - .andExpression("second(dateValue)").as("second") // - .andExpression("millisecond(dateValue)").as("millisecond") // + TypedAggregation agg = newAggregation(Data.class, project() // + .andExpression("dayOfYear(dateValue)").as("dayOfYear") // + .andExpression("dayOfMonth(dateValue)").as("dayOfMonth") // + .andExpression("dayOfWeek(dateValue)").as("dayOfWeek") // + .andExpression("year(dateValue)").as("year") // + .andExpression("month(dateValue)").as("month") // + .andExpression("week(dateValue)").as("week") // + .andExpression("hour(dateValue)").as("hour") // + .andExpression("minute(dateValue)").as("minute") // + .andExpression("second(dateValue)").as("second") // + .andExpression("millisecond(dateValue)").as("millisecond") // ); AggregationResults results = mongoTemplate.aggregate(agg, Document.class); Document document = results.getUniqueMappedResult(); - assertThat(document, is(notNullValue())); - assertThat((Integer) document.get("dayOfYear"), is(241)); - assertThat((Integer) document.get("dayOfMonth"), is(29)); - assertThat((Integer) document.get("dayOfWeek"), is(2)); - assertThat((Integer) document.get("year"), is(1983)); - assertThat((Integer) document.get("month"), is(8)); - assertThat((Integer) document.get("week"), is(35)); - assertThat((Integer) document.get("hour"), is(12)); - assertThat((Integer) document.get("minute"), is(34)); - assertThat((Integer) document.get("second"), is(56)); - assertThat((Integer) document.get("millisecond"), is(789)); + assertThat(document).isNotNull(); + assertThat((Integer) document.get("dayOfYear")).isEqualTo(241); + assertThat((Integer) document.get("dayOfMonth")).isEqualTo(29); + assertThat((Integer) document.get("dayOfWeek")).isEqualTo(2); + assertThat((Integer) document.get("year")).isEqualTo(1983); + assertThat((Integer) document.get("month")).isEqualTo(8); + assertThat((Integer) document.get("week")).isEqualTo(35); + assertThat((Integer) document.get("hour")).isEqualTo(12); + assertThat((Integer) document.get("minute")).isEqualTo(34); + assertThat((Integer) document.get("second")).isEqualTo(56); + assertThat((Integer) document.get("millisecond")).isEqualTo(789); } @Test // DATAMONGO-1550 - @MongoVersion(asOf = "3.4") - public void shouldPerformReplaceRootOperatorCorrectly() throws ParseException { + void shouldPerformReplaceRootOperatorCorrectly() throws ParseException { Data data = new Data(); DataItem dataItem = new DataItem(); @@ -1139,13 +1107,13 @@ public void shouldPerformReplaceRootOperatorCorrectly() throws ParseException { AggregationResults results = mongoTemplate.aggregate(agg, Document.class); Document resultDocument = results.getUniqueMappedResult(); - assertThat(resultDocument, is(notNullValue())); - assertThat((Integer) resultDocument.get("my_primitiveIntValue"), is(42)); - assertThat((Integer) resultDocument.keySet().size(), is(1)); + assertThat(resultDocument).isNotNull(); + assertThat((Integer) resultDocument.get("my_primitiveIntValue")).isEqualTo(42); + assertThat((Integer) resultDocument.keySet().size()).isEqualTo(1); } - @Test // DATAMONGO-788 - public void referencesToGroupIdsShouldBeRenderedProperly() { + @Test // DATAMONGO-788, DATAMONGO-2264 + void referencesToGroupIdsShouldBeRenderedProperly() { mongoTemplate.insert(new DATAMONGO788(1, 1)); mongoTemplate.insert(new DATAMONGO788(1, 1)); @@ -1158,29 +1126,29 @@ public void referencesToGroupIdsShouldBeRenderedProperly() { AggregationOperation project = Aggregation.project("xPerY", "x", "y").andExclude("_id"); TypedAggregation aggregation = Aggregation.newAggregation(DATAMONGO788.class, projectFirst, group, - project); + project, Aggregation.sort(Sort.by("xPerY"))); AggregationResults aggResults = mongoTemplate.aggregate(aggregation, Document.class); List items = aggResults.getMappedResults(); - assertThat(items.size(), is(2)); - assertThat((Integer) items.get(0).get("xPerY"), is(2)); - assertThat((Integer) items.get(0).get("x"), is(2)); - assertThat((Integer) items.get(0).get("y"), is(1)); - assertThat((Integer) items.get(1).get("xPerY"), is(3)); - assertThat((Integer) items.get(1).get("x"), is(1)); - assertThat((Integer) items.get(1).get("y"), is(1)); + assertThat(items.size()).isEqualTo(2); + assertThat((Integer) items.get(0).get("xPerY")).isEqualTo(2); + assertThat((Integer) items.get(0).get("x")).isEqualTo(2); + assertThat((Integer) items.get(0).get("y")).isEqualTo(1); + assertThat((Integer) items.get(1).get("xPerY")).isEqualTo(3); + assertThat((Integer) items.get(1).get("x")).isEqualTo(1); + assertThat((Integer) items.get(1).get("y")).isEqualTo(1); } @Test // DATAMONGO-806 - public void shouldAllowGroupByIdFields() { + void shouldAllowGroupByIdFields() { mongoTemplate.dropCollection(User.class); - LocalDateTime now = new LocalDateTime(); + Instant now = Instant.now(); - User user1 = new User("u1", new PushMessage("1", "aaa", now.toDate())); - User user2 = new User("u2", new PushMessage("2", "bbb", now.minusDays(2).toDate())); - User user3 = new User("u3", new PushMessage("3", "ccc", now.minusDays(1).toDate())); + User user1 = new User("u1", new PushMessage("1", "aaa", now)); + User user2 = new User("u2", new PushMessage("2", "bbb", now.minus(2, ChronoUnit.DAYS))); + User user3 = new User("u3", new PushMessage("3", "ccc", now.minus(1, ChronoUnit.DAYS))); mongoTemplate.save(user1); mongoTemplate.save(user2); @@ -1189,7 +1157,7 @@ public void shouldAllowGroupByIdFields() { Aggregation agg = newAggregation( // project("id", "msgs"), // unwind("msgs"), // - match(where("msgs.createDate").gt(now.minusDays(1).toDate())), // + match(where("msgs.createDate").gt(Date.from(now.minus(1, ChronoUnit.DAYS)))), // group("id").push("msgs").as("msgs") // ); @@ -1198,12 +1166,12 @@ public void shouldAllowGroupByIdFields() { List mappedResults = results.getMappedResults(); Document firstItem = mappedResults.get(0); - assertThat(firstItem.get("_id"), is(notNullValue())); - assertThat(String.valueOf(firstItem.get("_id")), is("u1")); + assertThat(firstItem.get("_id")).isNotNull(); + assertThat(String.valueOf(firstItem.get("_id"))).isEqualTo("u1"); } @Test // DATAMONGO-840 - public void shouldAggregateOrderDataToAnInvoice() { + void shouldAggregateOrderDataToAnInvoice() { mongoTemplate.dropCollection(Order.class); @@ -1232,15 +1200,15 @@ public void shouldAggregateOrderDataToAnInvoice() { Invoice invoice = results.getUniqueMappedResult(); - assertThat(invoice, is(notNullValue())); - assertThat(invoice.getOrderId(), is(order.getId())); - assertThat(invoice.getNetAmount(), is(closeTo(8.3, 000001))); - assertThat(invoice.getTaxAmount(), is(closeTo(1.577, 000001))); - assertThat(invoice.getTotalAmount(), is(closeTo(9.877, 000001))); + assertThat(invoice).isNotNull(); + assertThat(invoice.getOrderId()).isEqualTo(order.getId()); + assertThat(invoice.getNetAmount()).isCloseTo(8.3, Offset.offset(000001D)); + assertThat(invoice.getTaxAmount()).isCloseTo(1.577, Offset.offset(000001D)); + assertThat(invoice.getTotalAmount()).isCloseTo(9.877, Offset.offset(000001D)); } @Test // DATAMONGO-924 - public void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() { + void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() { mongoTemplate.dropCollection(CarPerson.class); @@ -1267,25 +1235,24 @@ public void shouldAllowGroupingByAliasedFieldDefinedInFormerAggregationStage() { AggregationResults result = mongoTemplate.aggregate(agg, Document.class); - assertThat(result.getMappedResults(), hasSize(3)); + assertThat(result.getMappedResults()).hasSize(3); } @Test // DATAMONGO-960 - @MongoVersion(asOf = "2.6") - public void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabled() { + void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabled() { createUserWithLikesDocuments(); TypedAggregation agg = createUsersWithCommonLikesAggregation() // .withOptions(newAggregationOptions().allowDiskUse(true).build()); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); - assertThat(result, is(notNullValue())); - assertThat(result.getMappedResults(), is(notNullValue())); - assertThat(result.getMappedResults().size(), is(5)); + assertThat(result).isNotNull(); + assertThat(result.getMappedResults()).isNotNull(); + assertThat(result.getMappedResults().size()).isEqualTo(5); assertLikeStats(result.getMappedResults().get(0), "a", 4); assertLikeStats(result.getMappedResults().get(1), "b", 2); @@ -1295,35 +1262,32 @@ public void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOp } @Test // DATAMONGO-1637 - @MongoVersion(asOf = "2.6") - public void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabledWhileStreaming() { + void returnFiveMostCommonLikesAggregationFrameworkExampleWithSortOnDiskOptionEnabledWhileStreaming() { createUserWithLikesDocuments(); TypedAggregation agg = createUsersWithCommonLikesAggregation() // .withOptions(newAggregationOptions().allowDiskUse(true).build()); - assertThat(agg, is(notNullValue())); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg).isNotNull(); + assertThat(agg.toString()).isNotNull(); - CloseableIterator iterator = mongoTemplate.aggregateStream(agg, LikeStats.class); - List result = toList(iterator); - iterator.close(); + try (Stream stream = mongoTemplate.aggregateStream(agg, LikeStats.class)) { - assertThat(result, is(notNullValue())); - assertThat(result, is(notNullValue())); - assertThat(result.size(), is(5)); + List result = stream.toList(); - assertLikeStats(result.get(0), "a", 4); - assertLikeStats(result.get(1), "b", 2); - assertLikeStats(result.get(2), "c", 4); - assertLikeStats(result.get(3), "d", 2); - assertLikeStats(result.get(4), "e", 3); + assertThat(result.size()).isEqualTo(5); + + assertLikeStats(result.get(0), "a", 4); + assertLikeStats(result.get(1), "b", 2); + assertLikeStats(result.get(2), "c", 4); + assertLikeStats(result.get(3), "d", 2); + assertLikeStats(result.get(4), "e", 3); + } } @Test // DATAMONGO-960 - @MongoVersion(asOf = "2.6") - public void returnFiveMostCommonLikesShouldReturnStageExecutionInformationWithExplainOptionEnabled() { + void returnFiveMostCommonLikesShouldReturnStageExecutionInformationWithExplainOptionEnabled() { createUserWithLikesDocuments(); @@ -1332,17 +1296,16 @@ public void returnFiveMostCommonLikesShouldReturnStageExecutionInformationWithEx AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); - assertThat(result.getMappedResults(), is(empty())); + assertThat(result.getMappedResults()).isEmpty(); Document rawResult = result.getRawResults(); - assertThat(rawResult, is(notNullValue())); - assertThat(rawResult.containsKey("stages"), is(true)); + assertThat(rawResult).isNotNull(); + assertThat(rawResult.containsKey("stages")).isEqualTo(true); } - @Test // DATAMONGO-954 - @MongoVersion(asOf = "2.6") - public void shouldSupportReturningCurrentAggregationRoot() { + @Test // DATAMONGO-954, DATAMONGO-2264 + void shouldSupportReturningCurrentAggregationRoot() { mongoTemplate.save(new Person("p1_first", "p1_last", 25)); mongoTemplate.save(new Person("p2_first", "p2_last", 32)); @@ -1352,23 +1315,22 @@ public void shouldSupportReturningCurrentAggregationRoot() { List personsWithAge25 = mongoTemplate.find(Query.query(where("age").is(25)), Document.class, mongoTemplate.getCollectionName(Person.class)); - Aggregation agg = newAggregation(group("age").push(Aggregation.ROOT).as("users")); + Aggregation agg = newAggregation(group("age").push(Aggregation.ROOT).as("users"), sort(Sort.by("_id"))); AggregationResults result = mongoTemplate.aggregate(agg, Person.class, Document.class); - assertThat(result.getMappedResults(), hasSize(3)); - Document o = result.getMappedResults().get(2); + assertThat(result.getMappedResults()).hasSize(3); + Document o = result.getMappedResults().get(1); - assertThat(o.get("_id"), is((Object) 25)); - assertThat((List) o.get("users"), hasSize(2)); - assertThat((List) o.get("users"), is(contains(personsWithAge25.toArray()))); + assertThat(o.get("_id")).isEqualTo((Object) 25); + assertThat((List) o.get("users")).hasSize(2); + assertThat((List) o.get("users")).contains(personsWithAge25.toArray()); } /** - * {@link http://stackoverflow.com/questions/24185987/using-root-inside-spring-data-mongodb-for-retrieving-whole-document} + * {@link https://stackoverflow.com/questions/24185987/using-root-inside-spring-data-mongodb-for-retrieving-whole-document} */ - @Test // DATAMONGO-954 - @MongoVersion(asOf = "2.6") - public void shouldSupportReturningCurrentAggregationRootInReference() { + @Test // DATAMONGO-954, DATAMONGO-2264 + void shouldSupportReturningCurrentAggregationRootInReference() { mongoTemplate.save(new Reservation("0123", "42", 100)); mongoTemplate.save(new Reservation("0360", "43", 200)); @@ -1383,12 +1345,11 @@ public void shouldSupportReturningCurrentAggregationRootInReference() { ); AggregationResults result = mongoTemplate.aggregate(agg, Reservation.class, Document.class); - assertThat(result.getMappedResults(), hasSize(2)); + assertThat(result.getMappedResults()).hasSize(2); } @Test // DATAMONGO-1549 - @MongoVersion(asOf = "3.4") - public void shouldApplyCountCorrectly() { + void shouldApplyCountCorrectly() { mongoTemplate.save(new Reservation("0123", "42", 100)); mongoTemplate.save(new Reservation("0360", "43", 200)); @@ -1400,24 +1361,21 @@ public void shouldApplyCountCorrectly() { .andExpression("documents * 2").as("twice")); AggregationResults result = mongoTemplate.aggregate(agg, Reservation.class, Document.class); - assertThat(result.getMappedResults(), hasSize(1)); + assertThat(result.getMappedResults()).hasSize(1); Document document = result.getMappedResults().get(0); - assertThat(document, isBsonObject().containing("documents", 3).containing("twice", 6)); + assertThat(document).containsEntry("documents", 3).containsEntry("twice", 6); } @Test // DATAMONGO-975 - public void shouldRetrieveDateTimeFragementsCorrectly() throws Exception { + void shouldRetrieveDateTimeFragementsCorrectly() throws Exception { mongoTemplate.dropCollection(ObjectWithDate.class); - DateTime dateTime = new DateTime() // - .withYear(2014) // - .withMonthOfYear(2) // - .withDayOfMonth(7) // - .withTime(3, 4, 5, 6).toDateTime(DateTimeZone.UTC).toDateTimeISO(); + ZonedDateTime dateTime = ZonedDateTime.of(LocalDateTime.of(LocalDate.of(2014, 2, 7), LocalTime.of(3, 4, 5, 6)), + ZoneId.of("UTC")); - ObjectWithDate owd = new ObjectWithDate(dateTime.toDate()); + ObjectWithDate owd = new ObjectWithDate(Date.from(dateTime.toInstant())); mongoTemplate.insert(owd); ProjectionOperation dateProjection = Aggregation.project() // @@ -1438,49 +1396,93 @@ public void shouldRetrieveDateTimeFragementsCorrectly() throws Exception { Aggregation agg = newAggregation(dateProjection); AggregationResults result = mongoTemplate.aggregate(agg, ObjectWithDate.class, Document.class); - assertThat(result.getMappedResults(), hasSize(1)); + assertThat(result.getMappedResults()).hasSize(1); Document document = result.getMappedResults().get(0); - assertThat(document.get("hour"), is((Object) dateTime.getHourOfDay())); - assertThat(document.get("min"), is((Object) dateTime.getMinuteOfHour())); - assertThat(document.get("second"), is((Object) dateTime.getSecondOfMinute())); - assertThat(document.get("millis"), is((Object) dateTime.getMillisOfSecond())); - assertThat(document.get("year"), is((Object) dateTime.getYear())); - assertThat(document.get("month"), is((Object) dateTime.getMonthOfYear())); + assertThat(document.get("hour")).isEqualTo((Object) dateTime.getHour()); + assertThat(document.get("min")).isEqualTo((Object) dateTime.getMinute()); + assertThat(document.get("second")).isEqualTo((Object) dateTime.getSecond()); + assertThat(document.get("millis")).isEqualTo((Object) dateTime.get(ChronoField.MILLI_OF_SECOND)); + assertThat(document.get("year")).isEqualTo((Object) dateTime.getYear()); + assertThat(document.get("month")).isEqualTo((Object) dateTime.getMonthValue()); // dateTime.getWeekOfWeekyear()) returns 6 since for MongoDB the week starts on sunday and not on monday. - assertThat(document.get("week"), is((Object) 5)); - assertThat(document.get("dayOfYear"), is((Object) dateTime.getDayOfYear())); - assertThat(document.get("dayOfMonth"), is((Object) dateTime.getDayOfMonth())); + assertThat(document.get("week")).isEqualTo((Object) 5); + assertThat(document.get("dayOfYear")).isEqualTo((Object) dateTime.getDayOfYear()); + assertThat(document.get("dayOfMonth")).isEqualTo((Object) dateTime.getDayOfMonth()); // dateTime.getDayOfWeek() - assertThat(document.get("dayOfWeek"), is((Object) 6)); - assertThat(document.get("dayOfYearPlus1Day"), is((Object) dateTime.plusDays(1).getDayOfYear())); - assertThat(document.get("dayOfYearPlus1DayManually"), is((Object) dateTime.plusDays(1).getDayOfYear())); + assertThat(document.get("dayOfWeek")).isEqualTo((Object) 6); + assertThat(document.get("dayOfYearPlus1Day")).isEqualTo((Object) dateTime.plusDays(1).getDayOfYear()); + assertThat(document.get("dayOfYearPlus1DayManually")).isEqualTo((Object) dateTime.plusDays(1).getDayOfYear()); } @Test // DATAMONGO-1127 - public void shouldSupportGeoNearQueriesForAggregationWithDistanceField() { + void shouldSupportGeoNearQueriesForAggregationWithDistanceField() { + + mongoTemplate.insertAll(Arrays.asList(TestEntities.geolocation().pennStation(), + TestEntities.geolocation().tenGenOffice(), TestEntities.geolocation().flatironBuilding())); + + mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location")); + + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(150); + + Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance")); + AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, Document.class); + + assertThat(result.getMappedResults()).hasSize(3); + + Document firstResult = result.getMappedResults().get(0); + assertThat(firstResult.containsKey("distance")).isEqualTo(true); + assertThat((Double) firstResult.get("distance")).isCloseTo(117.620092203928, Offset.offset(0.00001D)); + } + + @Test // DATAMONGO-1348 + void shouldSupportGeoJsonInGeoNearQueriesForAggregationWithDistanceField() { mongoTemplate.insert(new Venue("Penn Station", -73.99408, 40.75057)); mongoTemplate.insert(new Venue("10gen Office", -73.99171, 40.738868)); mongoTemplate.insert(new Venue("Flatiron Building", -73.988135, 40.741404)); - mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location")); + mongoTemplate.indexOps(Venue.class) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); + + Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance")); + AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, Document.class); + + assertThat(result.getMappedResults()).hasSize(3); + + Document firstResult = result.getMappedResults().get(0); + assertThat(firstResult.containsKey("distance")).isEqualTo(true); + assertThat((Double) firstResult.get("distance")).isCloseTo(117.61940988193759, Offset.offset(0.00001D)); + } + + @Test // DATAMONGO-1348 + void shouldSupportGeoJsonInGeoNearQueriesForAggregationWithDistanceFieldInMiles() { + + mongoTemplate.insert(new Venue("Penn Station", -73.99408, 40.75057)); + mongoTemplate.insert(new Venue("10gen Office", -73.99171, 40.738868)); + mongoTemplate.insert(new Venue("Flatiron Building", -73.988135, 40.741404)); + + mongoTemplate.indexOps(Venue.class) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150); + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150) + .inMiles(); Aggregation agg = newAggregation(Aggregation.geoNear(geoNear, "distance")); AggregationResults result = mongoTemplate.aggregate(agg, Venue.class, Document.class); - assertThat(result.getMappedResults(), hasSize(3)); + assertThat(result.getMappedResults()).hasSize(3); Document firstResult = result.getMappedResults().get(0); - assertThat(firstResult.containsKey("distance"), is(true)); - assertThat((Double) firstResult.get("distance"), closeTo(117.620092203928, 0.00001)); + assertThat(firstResult.containsKey("distance")).isEqualTo(true); + assertThat((Double) firstResult.get("distance")).isCloseTo(73.08517, Offset.offset(0.00001D)); } @Test // DATAMONGO-1133 - public void shouldHonorFieldAliasesForFieldReferences() { + void shouldHonorFieldAliasesForFieldReferences() { mongoTemplate.insert(new MeterData("m1", "counter1", 42)); mongoTemplate.insert(new MeterData("m1", "counter1", 13)); @@ -1492,16 +1494,15 @@ public void shouldHonorFieldAliasesForFieldReferences() { AggregationResults results = mongoTemplate.aggregate(agg, Document.class); - assertThat(results.getMappedResults(), hasSize(1)); + assertThat(results.getMappedResults()).hasSize(1); Document result = results.getMappedResults().get(0); - assertThat(result.get("_id"), is(equalTo((Object) "counter1"))); - assertThat(result.get("totalValue"), is(equalTo((Object) 100.0))); + assertThat(result.get("_id")).isEqualTo("counter1"); + assertThat(result.get("totalValue")).isEqualTo(100.0); } @Test // DATAMONGO-1326 - @MongoVersion(asOf = "3.2") - public void shouldLookupPeopleCorectly() { + void shouldLookupPeopleCorectly() { createUsersWithReferencedPersons(); @@ -1515,13 +1516,51 @@ public void shouldLookupPeopleCorectly() { Document firstItem = mappedResults.get(0); - assertThat(firstItem, isBsonObject().containing("_id", "u1")); - assertThat(firstItem, isBsonObject().containing("linkedPerson.[0].firstname", "u1")); + assertThat(firstItem).containsEntry("_id", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); + } + + @Test // GH-3322 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void shouldLookupPeopleCorrectlyWithPipeline() { + createUsersWithReferencedPersons(); + + TypedAggregation agg = newAggregation(User.class, // + lookup().from("person").localField("_id").foreignField("firstname").pipeline(match(where("firstname").is("u1"))).as("linkedPerson"), // + sort(ASC, "id")); + + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); + + List mappedResults = results.getMappedResults(); + + Document firstItem = mappedResults.get(0); + + assertThat(firstItem).containsEntry("_id", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); + } + + @Test // GH-3322 + @EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") + void shouldLookupPeopleCorrectlyWithPipelineAndLet() { + createUsersWithReferencedPersons(); + + TypedAggregation agg = newAggregation(User.class, // + lookup().from("person").localField("_id").foreignField("firstname").let(Let.ExpressionVariable.newVariable("the_id").forField("_id")).pipeline( + match(ctx -> new Document("$expr", new Document("$eq", List.of("$$the_id", "u1"))))).as("linkedPerson"), + sort(ASC, "id")); + + AggregationResults results = mongoTemplate.aggregate(agg, User.class, Document.class); + + List mappedResults = results.getMappedResults(); + + Document firstItem = mappedResults.get(0); + + assertThat(firstItem).containsEntry("_id", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); } @Test // DATAMONGO-1326 - @MongoVersion(asOf = "3.2") - public void shouldGroupByAndLookupPeopleCorectly() { + void shouldGroupByAndLookupPeopleCorrectly() { createUsersWithReferencedPersons(); @@ -1536,13 +1575,13 @@ public void shouldGroupByAndLookupPeopleCorectly() { Document firstItem = mappedResults.get(0); - assertThat(firstItem, isBsonObject().containing("foreignKey", "u1")); - assertThat(firstItem, isBsonObject().containing("linkedPerson.[0].firstname", "u1")); + assertThat(firstItem).containsEntry("foreignKey", "u1"); + assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1"); } @Test // DATAMONGO-1418, DATAMONGO-1824 @MongoVersion(asOf = "2.6") - public void shouldCreateOutputCollection() { + void shouldCreateOutputCollection() { createPersonDocuments(); @@ -1554,20 +1593,19 @@ public void shouldCreateOutputCollection() { AggregationResults results = mongoTemplate.aggregate(agg, Document.class); - assertThat(results.getMappedResults(), hasSize(2)); + assertThat(results.getMappedResults()).hasSize(2); List list = mongoTemplate.findAll(Document.class, tempOutCollection); - assertThat(list, hasSize(2)); - assertThat(list.get(0), isBsonObject().containing("_id", "MALE").containing("count", 3)); - assertThat(list.get(1), isBsonObject().containing("_id", "FEMALE").containing("count", 2)); + assertThat(list).hasSize(2); + assertThat(list.get(0)).containsEntry("_id", "MALE").containsEntry("count", 3); + assertThat(list.get(1)).containsEntry("_id", "FEMALE").containsEntry("count", 2); mongoTemplate.dropCollection(tempOutCollection); } @Test // DATAMONGO-1637 - @MongoVersion(asOf = "2.6") - public void shouldCreateOutputCollectionWhileStreaming() { + void shouldCreateOutputCollectionWhileStreaming() { createPersonDocuments(); @@ -1581,16 +1619,15 @@ public void shouldCreateOutputCollectionWhileStreaming() { List list = mongoTemplate.findAll(Document.class, tempOutCollection); - assertThat(list, hasSize(2)); - assertThat(list.get(0), isBsonObject().containing("_id", "MALE").containing("count", 3)); - assertThat(list.get(1), isBsonObject().containing("_id", "FEMALE").containing("count", 2)); + assertThat(list).hasSize(2); + assertThat(list.get(0)).containsEntry("_id", "MALE").containsEntry("count", 3); + assertThat(list.get(1)).containsEntry("_id", "FEMALE").containsEntry("count", 2); mongoTemplate.dropCollection(tempOutCollection); } @Test // DATAMONGO-1637 - @MongoVersion(asOf = "2.6") - public void shouldReturnDocumentsWithOutputCollectionWhileStreaming() { + void shouldReturnDocumentsWithOutputCollectionWhileStreaming() { createPersonDocuments(); @@ -1600,13 +1637,14 @@ public void shouldReturnDocumentsWithOutputCollectionWhileStreaming() { sort(DESC, "count"), // out(tempOutCollection)); - CloseableIterator iterator = mongoTemplate.aggregateStream(agg, Document.class); + try (Stream stream = mongoTemplate.aggregateStream(agg, Document.class)) { - List result = toList(iterator); + List result = stream.toList(); - assertThat(result, hasSize(2)); - assertThat(result.get(0), isBsonObject().containing("_id", "MALE").containing("count", 3)); - assertThat(result.get(1), isBsonObject().containing("_id", "FEMALE").containing("count", 2)); + assertThat(result).hasSize(2); + assertThat(result.get(0)).containsEntry("_id", "MALE").containsEntry("count", 3); + assertThat(result.get(1)).containsEntry("_id", "FEMALE").containsEntry("count", 2); + } mongoTemplate.dropCollection(tempOutCollection); } @@ -1620,18 +1658,16 @@ private void createPersonDocuments() { mongoTemplate.save(new Person("Leoniv", "Yakubov", 55, Person.Sex.MALE)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1418 - public void outShouldOutBeTheLastOperation() { - - newAggregation(match(new Criteria()), // + @Test // DATAMONGO-1418, DATAMONGO-2536 + void outShouldOutBeTheLastOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(match(new Criteria()), // group("field1").count().as("totalCount"), // out("collection1"), // - skip(100L)); + skip(100L)).toPipeline(DEFAULT_CONTEXT)); } @Test // DATAMONGO-1325 - @MongoVersion(asOf = "3.2") - public void shouldApplySampleCorrectly() { + void shouldApplySampleCorrectly() { createUserWithLikesDocuments(); @@ -1640,15 +1676,15 @@ public void shouldApplySampleCorrectly() { sample(3) // ); - assertThat(agg.toString(), is(notNullValue())); + assertThat(agg.toString()).isNotNull(); AggregationResults result = mongoTemplate.aggregate(agg, LikeStats.class); - assertThat(result.getMappedResults().size(), is(3)); + assertThat(result.getMappedResults().size()).isEqualTo(3); } @Test // DATAMONGO-1457 @MongoVersion(asOf = "3.2") - public void sliceShouldBeAppliedCorrectly() { + void sliceShouldBeAppliedCorrectly() { createUserWithLikesDocuments(); @@ -1657,21 +1693,19 @@ public void sliceShouldBeAppliedCorrectly() { AggregationResults result = mongoTemplate.aggregate(agg, UserWithLikes.class); - assertThat(result.getMappedResults(), hasSize(9)); + assertThat(result.getMappedResults()).hasSize(9); for (UserWithLikes user : result) { - assertThat(user.likes.size() <= 2, is(true)); + assertThat(user.likes.size() <= 2).isEqualTo(true); } } @Test // DATAMONGO-1491 - @MongoVersion(asOf = "3.2") - public void filterShouldBeAppliedCorrectly() { + void filterShouldBeAppliedCorrectly() { Item item43 = Item.builder().itemId("43").quantity(2).price(2L).build(); Item item2 = Item.builder().itemId("2").quantity(1).price(240L).build(); - Sales sales1 = Sales.builder().id("0") - .items(Arrays.asList( // - item43, item2)) // + Sales sales1 = Sales.builder().id("0").items(Arrays.asList( // + item43, item2)) // .build(); Item item23 = Item.builder().itemId("23").quantity(3).price(110L).build(); @@ -1687,17 +1721,16 @@ public void filterShouldBeAppliedCorrectly() { mongoTemplate.insert(Arrays.asList(sales1, sales2, sales3), Sales.class); TypedAggregation agg = newAggregation(Sales.class, project().and("items") - .filter("item", AggregationFunctionExpressions.GTE.of(field("item.price"), 100)).as("items")); + .filter("item", ComparisonOperators.valueOf("item.price").greaterThanEqualToValue(100)).as("items")); - assertThat(mongoTemplate.aggregate(agg, Sales.class).getMappedResults(), - contains(Sales.builder().id("0").items(Collections.singletonList(item2)).build(), - Sales.builder().id("1").items(Arrays.asList(item23, item38)).build(), - Sales.builder().id("2").items(Collections. emptyList()).build())); + assertThat(mongoTemplate.aggregate(agg, Sales.class).getMappedResults()).contains( + Sales.builder().id("0").items(Collections.singletonList(item2)).build(), + Sales.builder().id("1").items(Arrays.asList(item23, item38)).build(), + Sales.builder().id("2").items(Collections. emptyList()).build()); } @Test // DATAMONGO-1538 - @MongoVersion(asOf = "3.2") - public void letShouldBeAppliedCorrectly() { + void letShouldBeAppliedCorrectly() { Sales2 sales1 = Sales2.builder().id("1").price(10).tax(0.5F).applyDiscount(true).build(); Sales2 sales2 = Sales2.builder().id("2").price(10).tax(0.25F).applyDiscount(false).build(); @@ -1705,24 +1738,21 @@ public void letShouldBeAppliedCorrectly() { mongoTemplate.insert(Arrays.asList(sales1, sales2), Sales2.class); ExpressionVariable total = ExpressionVariable.newVariable("total") - .forExpression(AggregationFunctionExpressions.ADD.of(Fields.field("price"), Fields.field("tax"))); + .forExpression(ArithmeticOperators.valueOf("price").sum().and("tax")); ExpressionVariable discounted = ExpressionVariable.newVariable("discounted") .forExpression(ConditionalOperators.Cond.when("applyDiscount").then(0.9D).otherwise(1.0D)); TypedAggregation agg = Aggregation.newAggregation(Sales2.class, - Aggregation.project() - .and(VariableOperators.Let.define(total, discounted).andApply( - AggregationFunctionExpressions.MULTIPLY.of(Fields.field("total"), Fields.field("discounted")))) - .as("finalTotal")); + Aggregation.project().and(VariableOperators.Let.define(total, discounted) + .andApply(ArithmeticOperators.valueOf("total").multiplyBy("discounted"))).as("finalTotal")); AggregationResults result = mongoTemplate.aggregate(agg, Document.class); - assertThat(result.getMappedResults(), contains(new Document("_id", "1").append("finalTotal", 9.450000000000001D), - new Document("_id", "2").append("finalTotal", 10.25D))); + assertThat(result.getMappedResults()).contains(new Document("_id", "1").append("finalTotal", 9.450000000000001D), + new Document("_id", "2").append("finalTotal", 10.25D)); } - @Test // DATAMONGO-1551 - @MongoVersion(asOf = "3.4") - public void graphLookupShouldBeAppliedCorrectly() { + @Test // DATAMONGO-1551, DATAMONGO-2264 + void graphLookupShouldBeAppliedCorrectly() { Employee em1 = Employee.builder().id(1).name("Dev").build(); Employee em2 = Employee.builder().id(2).name("Eliot").reportsTo("Dev").build(); @@ -1738,21 +1768,23 @@ public void graphLookupShouldBeAppliedCorrectly() { .connectTo("name") // .depthField("depth") // .maxDepth(5) // - .as("reportingHierarchy")); + .as("reportingHierarchy"), // + project("id", "depth", "name", "reportsTo", "reportingHierarchy")); AggregationResults result = mongoTemplate.aggregate(agg, Document.class); Document object = result.getUniqueMappedResult(); List list = (List) object.get("reportingHierarchy"); - assertThat(object, isBsonObject().containing("reportingHierarchy", List.class)); - assertThat((Document) list.get(0), isBsonObject().containing("name", "Dev").containing("depth", 1L)); - assertThat((Document) list.get(1), isBsonObject().containing("name", "Eliot").containing("depth", 0L)); + assertThat(object).containsEntry("name", "Andrew").containsEntry("reportsTo", "Eliot"); + assertThat(list).containsOnly( + new Document("_id", 2).append("name", "Eliot").append("reportsTo", "Dev").append("depth", 0L).append("_class", + Employee.class.getName()), + new Document("_id", 1).append("name", "Dev").append("depth", 1L).append("_class", Employee.class.getName())); } @Test // DATAMONGO-1552 - @MongoVersion(asOf = "3.4") - public void bucketShouldCollectDocumentsIntoABucket() { + void bucketShouldCollectDocumentsIntoABucket() { Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); @@ -1770,25 +1802,23 @@ public void bucketShouldCollectDocumentsIntoABucket() { .andOutputExpression("price * 10").sum().as("sum")); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(3)); + assertThat(result.getMappedResults().size()).isEqualTo(3); // { "_id" : 0 , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} Document bound0 = result.getMappedResults().get(0); - assertThat(bound0, isBsonObject().containing("count", 1).containing("titles.[0]", "Dancer")); - assertThat((Double) bound0.get("sum"), is(closeTo(760.40, 0.1))); + assertThat(bound0).containsEntry("count", 1).containsEntry("titles.[0]", "Dancer"); + assertThat((Double) bound0.get("sum")).isCloseTo(760.40, Offset.offset(0.1D)); // { "_id" : 100 , "count" : 2 , "titles" : [ "The Pillars of Society" , "The Great Wave off Kanagawa"] , "sum" : // 3672.9} Document bound100 = result.getMappedResults().get(1); - assertThat(bound100, isBsonObject().containing("count", 2).containing("_id", 100)); - assertThat((List) bound100.get("titles"), - hasItems("The Pillars of Society", "The Great Wave off Kanagawa")); - assertThat((Double) bound100.get("sum"), is(closeTo(3672.9, 0.1))); + assertThat(bound100).containsEntry("count", 2).containsEntry("_id", 100); + assertThat((List) bound100.get("titles")).contains("The Pillars of Society", "The Great Wave off Kanagawa"); + assertThat((Double) bound100.get("sum")).isCloseTo(3672.9, Offset.offset(0.1D)); } - @Test // DATAMONGO-1552 - @MongoVersion(asOf = "3.4") - public void bucketAutoShouldCollectDocumentsIntoABucket() { + @Test // DATAMONGO-1552, DATAMONGO-2437 + void bucketAutoShouldCollectDocumentsIntoABucket() { Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); @@ -1805,23 +1835,23 @@ public void bucketAutoShouldCollectDocumentsIntoABucket() { .andOutputExpression("price * 10").sum().as("sum")); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(3)); + assertThat(result.getMappedResults().size()).isEqualTo(3); - // { "min" : 680.0 , "max" : 820.0 , "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} + // { "_id" : { "min" : 680.0 , "max" : 820.0 }, "count" : 1 , "titles" : [ "Dancer"] , "sum" : 760.4000000000001} Document bound0 = result.getMappedResults().get(0); - assertThat(bound0, isBsonObject().containing("count", 1).containing("titles.[0]", "Dancer").containing("min", 680.0) - .containing("max")); + assertThat(bound0).containsEntry("count", 1).containsEntry("titles.[0]", "Dancer").containsEntry("_id.min", 680.0) + .containsKey("_id.max"); - // { "min" : 820.0 , "max" : 1800.0 , "count" : 1 , "titles" : [ "The Great Wave off Kanagawa"] , "sum" : 1673.0} + // { "_id" : { "min" : 820.0 , "max" : 1800.0 }, "count" : 1 , "titles" : [ "The Great Wave off Kanagawa"] , "sum" : + // 1673.0} Document bound1 = result.getMappedResults().get(1); - assertThat(bound1, isBsonObject().containing("count", 1).containing("min", 820.0)); - assertThat((List) bound1.get("titles"), hasItems("The Great Wave off Kanagawa")); - assertThat((Double) bound1.get("sum"), is(closeTo(1673.0, 0.1))); + assertThat(bound1).containsEntry("count", 1).containsEntry("_id.min", 820.0); + assertThat((List) bound1.get("titles")).contains("The Great Wave off Kanagawa"); + assertThat((Double) bound1.get("sum")).isCloseTo(1673.0, Offset.offset(0.1D)); } @Test // DATAMONGO-1552 - @MongoVersion(asOf = "3.4") - public void facetShouldCreateFacets() { + void facetShouldCreateFacets() { Art a1 = Art.builder().id(1).title("The Pillars of Society").artist("Grosz").year(1926).price(199.99).build(); Art a2 = Art.builder().id(2).title("Melancholy III").artist("Munch").year(1902).price(280.00).build(); @@ -1843,7 +1873,7 @@ public void facetShouldCreateFacets() { .and(bucketAuto("year", 3)).as("categorizeByYear")); AggregationResults result = mongoTemplate.aggregate(aggregation, Document.class); - assertThat(result.getMappedResults().size(), is(1)); + assertThat(result.getMappedResults().size()).isEqualTo(1); Document mappedResult = result.getUniqueMappedResult(); @@ -1854,13 +1884,193 @@ public void facetShouldCreateFacets() { // { "_id" : { "min" : 1800.0 , "max" : 3300.0} , "count" : 2 , "titles" : [ "The Pillars of Society" , "Melancholy // III"] , "sum" : 4799.9}] List categorizeByPrice = (List) mappedResult.get("categorizeByPrice"); - assertThat(categorizeByPrice, hasSize(3)); + assertThat(categorizeByPrice).hasSize(3); // [ { "_id" : { "min" : null , "max" : 1902} , "count" : 1} , // { "_id" : { "min" : 1902-2018 , "max" : 1925} , "count" : 1} , // { "_id" : { "min" : 1925-2018 , "max" : 1926} , "count" : 2}] List categorizeByYear = (List) mappedResult.get("categorizeByYear"); - assertThat(categorizeByYear, hasSize(3)); + assertThat(categorizeByYear).hasSize(3); + } + + @Test // GH-4473 + @EnableIfMongoServerVersion(isGreaterThanEqual = "7.0") + void percentileShouldBeAppliedCorrectly() { + + DATAMONGO788 objectToSave = new DATAMONGO788(62, 81, 80); + DATAMONGO788 objectToSave2 = new DATAMONGO788(60, 83, 79); + + mongoTemplate.insert(objectToSave); + mongoTemplate.insert(objectToSave2); + + Aggregation agg = Aggregation.newAggregation( + project().and(ArithmeticOperators.valueOf("x").percentile(0.9, 0.4).and("y").and("xField")) + .as("percentileValues")); + + AggregationResults result = mongoTemplate.aggregate(agg, DATAMONGO788.class, Document.class); + + // MongoDB server returns $percentile as an array of doubles + List rawResults = (List) result.getRawResults().get("results"); + assertThat((List) rawResults.get(0).get("percentileValues")).containsExactly(81.0, 80.0); + assertThat((List) rawResults.get(1).get("percentileValues")).containsExactly(83.0, 79.0); + } + + @Test // GH-4472 + @EnableIfMongoServerVersion(isGreaterThanEqual = "7.0") + void medianShouldBeAppliedCorrectly() { + + DATAMONGO788 objectToSave = new DATAMONGO788(62, 81, 80); + DATAMONGO788 objectToSave2 = new DATAMONGO788(60, 83, 79); + + mongoTemplate.insert(objectToSave); + mongoTemplate.insert(objectToSave2); + + Aggregation agg = Aggregation.newAggregation( + project().and(ArithmeticOperators.valueOf("x").median().and("y").and("xField")) + .as("medianValue")); + + AggregationResults result = mongoTemplate.aggregate(agg, DATAMONGO788.class, Document.class); + + // MongoDB server returns $median a Double + List rawResults = (List) result.getRawResults().get("results"); + assertThat(rawResults.get(0).get("medianValue")).isEqualTo(80.0); + assertThat(rawResults.get(1).get("medianValue")).isEqualTo(79.0); + } + + @Test // DATAMONGO-1986 + void runMatchOperationCriteriaThroughQueryMapperForTypedAggregation() { + + mongoTemplate.insertAll(TestEntities.geolocation().newYork()); + + Aggregation aggregation = newAggregation(Venue.class, + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + AggregationResults groupResults = mongoTemplate.aggregate(aggregation, "newyork", Document.class); + + assertThat(groupResults.getMappedResults().size()).isEqualTo(4); + } + + @Test // DATAMONGO-1986 + void runMatchOperationCriteriaThroughQueryMapperForUntypedAggregation() { + + mongoTemplate.insertAll(TestEntities.geolocation().newYork()); + + Aggregation aggregation = newAggregation( + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + AggregationResults groupResults = mongoTemplate.aggregate(aggregation, "newyork", Document.class); + + assertThat(groupResults.getMappedResults().size()).isEqualTo(4); + } + + @Test // DATAMONGO-2437 + void shouldReadComplexIdValueCorrectly() { + + WithComplexId source = new WithComplexId(); + source.id = new ComplexId(); + source.id.p1 = "v1"; + source.id.p2 = "v2"; + + mongoTemplate.save(source); + + AggregationResults result = mongoTemplate.aggregate(newAggregation(project("id")), + WithComplexId.class, WithComplexId.class); + assertThat(result.getMappedResults()).containsOnly(source); + } + + @Test // DATAMONGO-2536 + void skipOutputDoesNotReadBackAggregationResults() { + + createTagDocuments(); + + Aggregation agg = newAggregation( // + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ).withOptions(AggregationOptions.builder().skipOutput().build()); + + AggregationResults results = mongoTemplate.aggregate(agg, INPUT_COLLECTION, TagCount.class); + + assertThat(results.getMappedResults()).isEmpty(); + assertThat(results.getRawResults()).isEmpty(); + } + + @Test // DATAMONGO-2635 + void mapsEnumsInMatchClauseUsingInCriteriaCorrectly() { + + WithEnum source = new WithEnum(); + source.enumValue = MyEnum.TWO; + source.id = "id-1"; + + mongoTemplate.save(source); + + Aggregation agg = newAggregation(match(where("enumValue").in(Collections.singletonList(MyEnum.TWO)))); + + AggregationResults results = mongoTemplate.aggregate(agg, mongoTemplate.getCollectionName(WithEnum.class), + Document.class); + assertThat(results.getMappedResults()).hasSize(1); + } + + @Test // GH-4043 + void considersMongoIdWithinTypedCollections() { + + UserRef userRef = new UserRef(); + userRef.id = "4ee921aca44fd11b3254e001"; + userRef.name = "u-1"; + + Widget widget = new Widget(); + widget.id = "w-1"; + widget.users = List.of(userRef); + + mongoTemplate.save(widget); + + Criteria criteria = Criteria.where("users").elemMatch(Criteria.where("id").is("4ee921aca44fd11b3254e001")); + AggregationResults aggregate = mongoTemplate.aggregate(newAggregation(match(criteria)), Widget.class, Widget.class); + assertThat(aggregate.getMappedResults()).contains(widget); + } + + @Test // GH-4443 + void shouldHonorFieldAliasesForFieldReferencesUsingFieldExposingOperation() { + + Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build(); + Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build(); + mongoTemplate.insert(Arrays.asList(item1, item2), Item.class); + + TypedAggregation aggregation = newAggregation(Item.class, + match(where("itemId").is("1")), + unwind("tags"), + match(where("itemId").is("1").and("tags").is("c"))); + AggregationResults results = mongoTemplate.aggregate(aggregation, Document.class); + List mappedResults = results.getMappedResults(); + assertThat(mappedResults).hasSize(1); + assertThat(mappedResults.get(0)).containsEntry("item_id", "1"); + } + + @Test // GH-4443 + void projectShouldResetContextToAvoidMappingFieldsAgainstANoLongerExistingTarget() { + + Item item1 = Item.builder().itemId("1").tags(Arrays.asList("a", "b")).build(); + Item item2 = Item.builder().itemId("1").tags(Arrays.asList("a", "c")).build(); + mongoTemplate.insert(Arrays.asList(item1, item2), Item.class); + + TypedAggregation aggregation = newAggregation(Item.class, + match(where("itemId").is("1")), + unwind("tags"), + project().and("itemId").as("itemId").and("tags").as("tags"), + match(where("itemId").is("1").and("tags").is("c"))); + + AggregationResults results = mongoTemplate.aggregate(aggregation, Document.class); + List mappedResults = results.getMappedResults(); + assertThat(mappedResults).hasSize(1); + assertThat(mappedResults.get(0)).containsEntry("itemId", "1"); } private void createUsersWithReferencedPersons() { @@ -1886,9 +2096,9 @@ private void createUsersWithReferencedPersons() { private void assertLikeStats(LikeStats like, String id, long count) { - assertThat(like, is(notNullValue())); - assertThat(like.id, is(id)); - assertThat(like.count, is(count)); + assertThat(like).isNotNull(); + assertThat(like.id).isEqualTo(id); + assertThat(like.count).isEqualTo(count); } private void createUserWithLikesDocuments() { @@ -1927,18 +2137,8 @@ private static Document createDocument(String title, String... tags) { private static void assertTagCount(String tag, int n, TagCount tagCount) { - assertThat(tagCount.getTag(), is(tag)); - assertThat(tagCount.getN(), is(n)); - } - - private static List toList(CloseableIterator results) { - - List result = new ArrayList(); - while (results.hasNext()) { - result.add(results.next()); - } - - return result; + assertThat(tagCount.getTag()).isEqualTo(tag); + assertThat(tagCount.getN()).isEqualTo(n); } static class DATAMONGO753 { @@ -1956,7 +2156,7 @@ static class PD { String pDch; @org.springframework.data.mongodb.core.mapping.Field("alias") int up; - public PD(String pDch, int up) { + PD(String pDch, int up) { this.pDch = pDch; this.up = up; } @@ -1971,12 +2171,18 @@ static class DATAMONGO788 { public DATAMONGO788() {} - public DATAMONGO788(int x, int y) { + DATAMONGO788(int x, int y) { this.x = x; this.xField = x; this.y = y; this.yField = y; } + + public DATAMONGO788(int x, int y, int xField) { + this.x = x; + this.y = y; + this.xField = xField; + } } // DATAMONGO-806 @@ -1987,7 +2193,7 @@ static class User { public User() {} - public User(String id, PushMessage... msgs) { + User(String id, PushMessage... msgs) { this.id = id; this.msgs = Arrays.asList(msgs); } @@ -2002,7 +2208,11 @@ static class PushMessage { public PushMessage() {} - public PushMessage(String id, String content, Date createDate) { + PushMessage(String id, String content, Instant createDate) { + this(id, content, Date.from(createDate)); + } + + PushMessage(String id, String content, Date createDate) { this.id = id; this.content = content; this.createDate = createDate; @@ -2017,7 +2227,7 @@ static class CarPerson { private String lastName; private Descriptors descriptors; - public CarPerson(String firstname, String lastname, Entry... entries) { + CarPerson(String firstname, String lastname, Entry... entries) { this.firstName = firstname; this.lastName = lastname; @@ -2037,7 +2247,7 @@ static class CarDescriptor { private List entries = new ArrayList(); - public CarDescriptor(Entry... entries) { + CarDescriptor(Entry... entries) { for (Entry entry : entries) { this.entries.add(entry); @@ -2053,7 +2263,7 @@ static class Entry { public Entry() {} - public Entry(String make, String model, int year) { + Entry(String make, String model, int year) { this.make = make; this.model = model; this.year = year; @@ -2069,7 +2279,7 @@ static class Reservation { public Reservation() {} - public Reservation(String hotelCode, String confirmationNumber, int timestamp) { + Reservation(String hotelCode, String confirmationNumber, int timestamp) { this.hotelCode = hotelCode; this.confirmationNumber = confirmationNumber; this.timestamp = timestamp; @@ -2080,7 +2290,7 @@ static class ObjectWithDate { Date dateValue; - public ObjectWithDate(Date dateValue) { + ObjectWithDate(Date dateValue) { this.dateValue = dateValue; } } @@ -2096,14 +2306,14 @@ static class InventoryItem { public InventoryItem() {} - public InventoryItem(int id, String item, int qty) { + InventoryItem(int id, String item, int qty) { this.id = id; this.item = item; this.qty = qty; } - public InventoryItem(int id, String item, String description, int qty) { + InventoryItem(int id, String item, String description, int qty) { this.id = id; this.item = item; @@ -2113,49 +2323,402 @@ public InventoryItem(int id, String item, String description, int qty) { } // DATAMONGO-1491 - @lombok.Data - @Builder static class Sales { @Id String id; List items; + + Sales(String id, List items) { + this.id = id; + this.items = items; + } + + public static SalesBuilder builder() { + return new SalesBuilder(); + } + + public String getId() { + return this.id; + } + + public List getItems() { + return this.items; + } + + public void setId(String id) { + this.id = id; + } + + public void setItems(List items) { + this.items = items; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sales sales = (Sales) o; + return Objects.equals(id, sales.id) && Objects.equals(items, sales.items); + } + + @Override + public int hashCode() { + return Objects.hash(id, items); + } + + public String toString() { + return "AggregationTests.Sales(id=" + this.getId() + ", items=" + this.getItems() + ")"; + } + + public static class SalesBuilder { + + private String id; + private List items; + + SalesBuilder() {} + + public SalesBuilder id(String id) { + this.id = id; + return this; + } + + public SalesBuilder items(List items) { + this.items = items; + return this; + } + + public Sales build() { + return new Sales(id, items); + } + + public String toString() { + return "AggregationTests.Sales.SalesBuilder(id=" + this.id + ", items=" + this.items + ")"; + } + } } - // DATAMONGO-1491 - @lombok.Data - @Builder + // DATAMONGO-1491, GH-4443 static class Item { @org.springframework.data.mongodb.core.mapping.Field("item_id") // String itemId; Integer quantity; Long price; + List tags = new ArrayList<>(); + + Item(String itemId, Integer quantity, Long price, List tags) { + + this.itemId = itemId; + this.quantity = quantity; + this.price = price; + this.tags = tags; + } + + public static ItemBuilder builder() { + return new ItemBuilder(); + } + + public String getItemId() { + return this.itemId; + } + + public Integer getQuantity() { + return this.quantity; + } + + public Long getPrice() { + return this.price; + } + + public void setItemId(String itemId) { + this.itemId = itemId; + } + + public void setQuantity(Integer quantity) { + this.quantity = quantity; + } + + public void setPrice(Long price) { + this.price = price; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Item item = (Item) o; + return Objects.equals(itemId, item.itemId) && Objects.equals(quantity, item.quantity) + && Objects.equals(price, item.price); + } + + @Override + public int hashCode() { + return Objects.hash(itemId, quantity, price); + } + + public String toString() { + return "AggregationTests.Item(itemId=" + this.getItemId() + ", quantity=" + this.getQuantity() + ", price=" + + this.getPrice() + ")"; + } + + public static class ItemBuilder { + + private String itemId; + private Integer quantity; + private Long price; + private List tags; + + ItemBuilder() {} + + public ItemBuilder itemId(String itemId) { + this.itemId = itemId; + return this; + } + + public ItemBuilder quantity(Integer quantity) { + this.quantity = quantity; + return this; + } + + public ItemBuilder price(Long price) { + this.price = price; + return this; + } + + public ItemBuilder tags(List tags) { + this.tags = tags; + return this; + } + + public Item build() { + return new Item(itemId, quantity, price, tags); + } + + public String toString() { + return "AggregationTests.Item.ItemBuilder(itemId=" + this.itemId + ", quantity=" + this.quantity + ", price=" + + this.price + ")"; + } + } } // DATAMONGO-1538 - @lombok.Data - @Builder static class Sales2 { String id; Integer price; Float tax; boolean applyDiscount; + + Sales2(String id, Integer price, Float tax, boolean applyDiscount) { + + this.id = id; + this.price = price; + this.tax = tax; + this.applyDiscount = applyDiscount; + } + + public static Sales2Builder builder() { + return new Sales2Builder(); + } + + public String getId() { + return this.id; + } + + public Integer getPrice() { + return this.price; + } + + public Float getTax() { + return this.tax; + } + + public boolean isApplyDiscount() { + return this.applyDiscount; + } + + public void setId(String id) { + this.id = id; + } + + public void setPrice(Integer price) { + this.price = price; + } + + public void setTax(Float tax) { + this.tax = tax; + } + + public void setApplyDiscount(boolean applyDiscount) { + this.applyDiscount = applyDiscount; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sales2 sales2 = (Sales2) o; + return applyDiscount == sales2.applyDiscount && Objects.equals(id, sales2.id) + && Objects.equals(price, sales2.price) && Objects.equals(tax, sales2.tax); + } + + @Override + public int hashCode() { + return Objects.hash(id, price, tax, applyDiscount); + } + + public String toString() { + return "AggregationTests.Sales2(id=" + this.getId() + ", price=" + this.getPrice() + ", tax=" + this.getTax() + + ", applyDiscount=" + this.isApplyDiscount() + ")"; + } + + public static class Sales2Builder { + + private String id; + private Integer price; + private Float tax; + private boolean applyDiscount; + + public Sales2Builder id(String id) { + this.id = id; + return this; + } + + public Sales2Builder price(Integer price) { + this.price = price; + return this; + } + + public Sales2Builder tax(Float tax) { + this.tax = tax; + return this; + } + + public Sales2Builder applyDiscount(boolean applyDiscount) { + this.applyDiscount = applyDiscount; + return this; + } + + public Sales2 build() { + return new Sales2(id, price, tax, applyDiscount); + } + + public String toString() { + return "AggregationTests.Sales2.Sales2Builder(id=" + this.id + ", price=" + this.price + ", tax=" + this.tax + + ", applyDiscount=" + this.applyDiscount + ")"; + } + } } // DATAMONGO-1551 - @lombok.Data - @Builder static class Employee { int id; String name; String reportsTo; + + Employee(int id, String name, String reportsTo) { + + this.id = id; + this.name = name; + this.reportsTo = reportsTo; + } + + public static EmployeeBuilder builder() { + return new EmployeeBuilder(); + } + + public int getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public String getReportsTo() { + return this.reportsTo; + } + + public void setId(int id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setReportsTo(String reportsTo) { + this.reportsTo = reportsTo; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Employee employee = (Employee) o; + return id == employee.id && Objects.equals(name, employee.name) && Objects.equals(reportsTo, employee.reportsTo); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, reportsTo); + } + + public String toString() { + return "AggregationTests.Employee(id=" + this.getId() + ", name=" + this.getName() + ", reportsTo=" + + this.getReportsTo() + ")"; + } + + public static class EmployeeBuilder { + + private int id; + private String name; + private String reportsTo; + + public EmployeeBuilder id(int id) { + this.id = id; + return this; + } + + public EmployeeBuilder name(String name) { + this.name = name; + return this; + } + + public EmployeeBuilder reportsTo(String reportsTo) { + this.reportsTo = reportsTo; + return this; + } + + public Employee build() { + return new Employee(id, name, reportsTo); + } + + public String toString() { + return "AggregationTests.Employee.EmployeeBuilder(id=" + this.id + ", name=" + this.name + ", reportsTo=" + + this.reportsTo + ")"; + } + } } // DATAMONGO-1552 - @lombok.Data - @Builder static class Art { int id; @@ -2163,5 +2726,322 @@ static class Art { String artist; Integer year; double price; + + Art(int id, String title, String artist, Integer year, double price) { + + this.id = id; + this.title = title; + this.artist = artist; + this.year = year; + this.price = price; + } + + public static ArtBuilder builder() { + return new ArtBuilder(); + } + + public int getId() { + return this.id; + } + + public String getTitle() { + return this.title; + } + + public String getArtist() { + return this.artist; + } + + public Integer getYear() { + return this.year; + } + + public double getPrice() { + return this.price; + } + + public void setId(int id) { + this.id = id; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setArtist(String artist) { + this.artist = artist; + } + + public void setYear(Integer year) { + this.year = year; + } + + public void setPrice(double price) { + this.price = price; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Art art = (Art) o; + return id == art.id && Double.compare(art.price, price) == 0 && Objects.equals(title, art.title) + && Objects.equals(artist, art.artist) && Objects.equals(year, art.year); + } + + @Override + public int hashCode() { + return Objects.hash(id, title, artist, year, price); + } + + public String toString() { + return "AggregationTests.Art(id=" + this.getId() + ", title=" + this.getTitle() + ", artist=" + this.getArtist() + + ", year=" + this.getYear() + ", price=" + this.getPrice() + ")"; + } + + public static class ArtBuilder { + + private int id; + private String title; + private String artist; + private Integer year; + private double price; + + public ArtBuilder id(int id) { + this.id = id; + return this; + } + + public ArtBuilder title(String title) { + this.title = title; + return this; + } + + public ArtBuilder artist(String artist) { + this.artist = artist; + return this; + } + + public ArtBuilder year(Integer year) { + this.year = year; + return this; + } + + public ArtBuilder price(double price) { + this.price = price; + return this; + } + + public Art build() { + return new Art(id, title, artist, year, price); + } + + public String toString() { + return "AggregationTests.Art.ArtBuilder(id=" + this.id + ", title=" + this.title + ", artist=" + this.artist + + ", year=" + this.year + ", price=" + this.price + ")"; + } + } + } + + static class WithComplexId { + @Id ComplexId id; + + public ComplexId getId() { + return this.id; + } + + public void setId(ComplexId id) { + this.id = id; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithComplexId that = (WithComplexId) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + public String toString() { + return "AggregationTests.WithComplexId(id=" + this.getId() + ")"; + } + } + + static class ComplexId { + + String p1; + String p2; + + public String getP1() { + return this.p1; + } + + public String getP2() { + return this.p2; + } + + public void setP1(String p1) { + this.p1 = p1; + } + + public void setP2(String p2) { + this.p2 = p2; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComplexId complexId = (ComplexId) o; + return Objects.equals(p1, complexId.p1) && Objects.equals(p2, complexId.p2); + } + + @Override + public int hashCode() { + return Objects.hash(p1, p2); + } + + public String toString() { + return "AggregationTests.ComplexId(p1=" + this.getP1() + ", p2=" + this.getP2() + ")"; + } + } + + static enum MyEnum { + ONE, TWO + } + + static class WithEnum { + + @Id String id; + MyEnum enumValue; + + public WithEnum() {} + + public String getId() { + return this.id; + } + + public MyEnum getEnumValue() { + return this.enumValue; + } + + public void setId(String id) { + this.id = id; + } + + public void setEnumValue(MyEnum enumValue) { + this.enumValue = enumValue; + } + + public String toString() { + return "AggregationTests.WithEnum(id=" + this.getId() + ", enumValue=" + this.getEnumValue() + ")"; + } + } + + static class Widget { + + @Id String id; + List users; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public List getUsers() { + return users; + } + + public void setUsers(List users) { + this.users = users; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + Widget widget = (Widget) o; + + if (!ObjectUtils.nullSafeEquals(id, widget.id)) { + return false; + } + return ObjectUtils.nullSafeEquals(users, widget.users); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(id); + result = 31 * result + ObjectUtils.nullSafeHashCode(users); + return result; + } + } + + static class UserRef { + + @MongoId String id; + String name; + + public UserRef() {} + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UserRef userRef = (UserRef) o; + return Objects.equals(id, userRef.id) && Objects.equals(name, userRef.name); + } + + @Override + public int hashCode() { + return Objects.hash(id, name); + } + + public String toString() { + return "AggregationTests.UserRef(id=" + this.getId() + ", name=" + this.getName() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java index 05258c1294..066a80f82c 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,16 +22,26 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Map; import org.bson.Document; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; +import org.springframework.data.mongodb.core.aggregation.ProjectionOperationUnitTests.BookWithFieldAnnotation; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.query.Criteria; +import com.mongodb.client.model.Aggregates; +import com.mongodb.client.model.Projections; + /** * Unit tests for {@link Aggregation}. * @@ -39,47 +49,44 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Julia Lee */ public class AggregationUnitTests { - public @Rule ExpectedException exception = ExpectedException.none(); - - @Test(expected = IllegalArgumentException.class) - public void rejectsNullAggregationOperation() { - newAggregation((AggregationOperation[]) null); + @Test + void rejectsNullAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation((AggregationOperation[]) null)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullTypedAggregationOperation() { - newAggregation(String.class, (AggregationOperation[]) null); + @Test + void rejectsNullTypedAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(String.class, (AggregationOperation[]) null)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNoAggregationOperation() { - newAggregation(new AggregationOperation[0]); + @Test + void rejectsNoAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(new AggregationOperation[0])); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNoTypedAggregationOperation() { - newAggregation(String.class, new AggregationOperation[0]); + @Test + void rejectsNoTypedAggregationOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> newAggregation(String.class, new AggregationOperation[0])); } @Test // DATAMONGO-753 - public void checkForCorrectFieldScopeTransfer() { - - exception.expect(IllegalArgumentException.class); - exception.expectMessage("Invalid reference"); - exception.expectMessage("'b'"); + void checkForCorrectFieldScopeTransfer() { - newAggregation( // - project("a", "b"), // - group("a").count().as("cnt"), // a was introduced to the context by the project operation - project("cnt", "b") // b was removed from the context by the group operation - ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); // -> triggers IllegalArgumentException + assertThatIllegalArgumentException().isThrownBy(() -> { + newAggregation( // + project("a", "b"), // + group("a").count().as("cnt"), // a was introduced to the context by the project operation + project("cnt", "b") // b was removed from the context by the group operation + ).toDocument("foo", Aggregation.DEFAULT_CONTEXT); // -> triggers IllegalArgumentException + }); } @Test // DATAMONGO-753 - public void unwindOperationShouldNotChangeAvailableFields() { + void unwindOperationShouldNotChangeAvailableFields() { newAggregation( // project("a", "b"), // @@ -89,7 +96,7 @@ public void unwindOperationShouldNotChangeAvailableFields() { } @Test // DATAMONGO-1391 - public void unwindOperationWithIndexShouldPreserveFields() { + void unwindOperationWithIndexShouldPreserveFields() { newAggregation( // project("a", "b"), // @@ -99,7 +106,7 @@ public void unwindOperationWithIndexShouldPreserveFields() { } @Test // DATAMONGO-1391 - public void unwindOperationWithIndexShouldAddIndexField() { + void unwindOperationWithIndexShouldAddIndexField() { newAggregation( // project("a", "b"), // @@ -109,7 +116,7 @@ public void unwindOperationWithIndexShouldAddIndexField() { } @Test // DATAMONGO-1391 - public void fullUnwindOperationShouldBuildCorrectClause() { + void fullUnwindOperationShouldBuildCorrectClause() { Document agg = newAggregation( // unwind("a", "x", true)).toDocument("foo", Aggregation.DEFAULT_CONTEXT); @@ -122,7 +129,7 @@ public void fullUnwindOperationShouldBuildCorrectClause() { } @Test // DATAMONGO-1391 - public void unwindOperationWithPreserveNullShouldBuildCorrectClause() { + void unwindOperationWithPreserveNullShouldBuildCorrectClause() { Document agg = newAggregation( // unwind("a", true)).toDocument("foo", Aggregation.DEFAULT_CONTEXT); @@ -135,7 +142,7 @@ public void unwindOperationWithPreserveNullShouldBuildCorrectClause() { } @Test // DATAMONGO-1550 - public void replaceRootOperationShouldBuildCorrectClause() { + void replaceRootOperationShouldBuildCorrectClause() { Document agg = newAggregation( // replaceRoot().withDocument().andValue("value").as("field")) // @@ -147,7 +154,7 @@ public void replaceRootOperationShouldBuildCorrectClause() { } @Test // DATAMONGO-753 - public void matchOperationShouldNotChangeAvailableFields() { + void matchOperationShouldNotChangeAvailableFields() { newAggregation( // project("a", "b"), // @@ -157,7 +164,7 @@ public void matchOperationShouldNotChangeAvailableFields() { } @Test // DATAMONGO-788 - public void referencesToGroupIdsShouldBeRenderedAsReferences() { + void referencesToGroupIdsShouldBeRenderedAsReferences() { Document agg = newAggregation( // project("a"), // @@ -173,7 +180,7 @@ public void referencesToGroupIdsShouldBeRenderedAsReferences() { } @Test // DATAMONGO-791 - public void allowAggregationOperationsToBePassedAsIterable() { + void allowAggregationOperationsToBePassedAsIterable() { List ops = new ArrayList(); ops.add(project("a")); @@ -190,7 +197,7 @@ public void allowAggregationOperationsToBePassedAsIterable() { } @Test // DATAMONGO-791 - public void allowTypedAggregationOperationsToBePassedAsIterable() { + void allowTypedAggregationOperationsToBePassedAsIterable() { List ops = new ArrayList<>(); ops.add(project("a")); @@ -207,7 +214,7 @@ public void allowTypedAggregationOperationsToBePassedAsIterable() { } @Test // DATAMONGO-838 - public void expressionBasedFieldsShouldBeReferencableInFollowingOperations() { + void expressionBasedFieldsShouldBeReferencableInFollowingOperations() { Document agg = newAggregation( // project("a").andExpression("b+c").as("foo"), // @@ -221,7 +228,7 @@ public void expressionBasedFieldsShouldBeReferencableInFollowingOperations() { } @Test // DATAMONGO-908 - public void shouldSupportReferingToNestedPropertiesInGroupOperation() { + void shouldSupportReferingToNestedPropertiesInGroupOperation() { Document agg = newAggregation( // project("cmsParameterId", "rules"), // @@ -239,7 +246,7 @@ public void shouldSupportReferingToNestedPropertiesInGroupOperation() { } @Test // DATAMONGO-1585 - public void shouldSupportSortingBySyntheticAndExposedGroupFields() { + void shouldSupportSortingBySyntheticAndExposedGroupFields() { Document agg = newAggregation( // group("cmsParameterId").addToSet("title").as("titles"), // @@ -255,7 +262,7 @@ public void shouldSupportSortingBySyntheticAndExposedGroupFields() { } @Test // DATAMONGO-1585 - public void shouldSupportSortingByProjectedFields() { + void shouldSupportSortingByProjectedFields() { Document agg = newAggregation( // project("cmsParameterId") // @@ -274,7 +281,7 @@ public void shouldSupportSortingByProjectedFields() { } @Test // DATAMONGO-924 - public void referencingProjectionAliasesFromPreviousStepShouldReferToTheSameFieldTarget() { + void referencingProjectionAliasesFromPreviousStepShouldReferToTheSameFieldTarget() { Document agg = newAggregation( // project().and("foo.bar").as("ba") // @@ -289,7 +296,7 @@ public void referencingProjectionAliasesFromPreviousStepShouldReferToTheSameFiel } @Test // DATAMONGO-960 - public void shouldRenderAggregationWithDefaultOptionsCorrectly() { + void shouldRenderAggregationWithDefaultOptionsCorrectly() { Document agg = newAggregation( // project().and("a").as("aa") // @@ -300,7 +307,7 @@ public void shouldRenderAggregationWithDefaultOptionsCorrectly() { } @Test // DATAMONGO-960 - public void shouldRenderAggregationWithCustomOptionsCorrectly() { + void shouldRenderAggregationWithCustomOptionsCorrectly() { AggregationOptions aggregationOptions = newAggregationOptions().explain(true).cursor(new Document("foo", 1)) .allowDiskUse(true).build(); @@ -320,7 +327,7 @@ public void shouldRenderAggregationWithCustomOptionsCorrectly() { } @Test // DATAMONGO-954, DATAMONGO-1585 - public void shouldSupportReferencingSystemVariables() { + void shouldSupportReferencingSystemVariables() { Document agg = newAggregation( // project("someKey") // @@ -341,7 +348,7 @@ public void shouldSupportReferencingSystemVariables() { } @Test // DATAMONGO-1254 - public void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThePipeline() { + void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThePipeline() { Document agg = Aggregation.newAggregation(// project("date") // @@ -355,7 +362,7 @@ public void shouldExposeAliasedFieldnameForProjectionsIncludingOperationsDownThe } @Test // DATAMONGO-1254 - public void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePipelineWhenUsingSpEL() { + void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePipelineWhenUsingSpEL() { Document agg = Aggregation.newAggregation(// project("date") // @@ -369,7 +376,7 @@ public void shouldUseAliasedFieldnameForProjectionsIncludingOperationsDownThePip } @Test // DATAMONGO-861 - public void conditionExpressionBasedFieldsShouldBeReferencableInFollowingOperations() { + void conditionExpressionBasedFieldsShouldBeReferencableInFollowingOperations() { Document agg = newAggregation( // project("a", "answer"), // @@ -387,7 +394,7 @@ public void conditionExpressionBasedFieldsShouldBeReferencableInFollowingOperati } @Test // DATAMONGO-861 - public void shouldRenderProjectionConditionalExpressionCorrectly() { + void shouldRenderProjectionConditionalExpressionCorrectly() { Document agg = Aggregation.newAggregation(// project().and(ConditionalOperators.Cond.newBuilder() // @@ -406,7 +413,7 @@ public void shouldRenderProjectionConditionalExpressionCorrectly() { } @Test // DATAMONGO-861 - public void shouldRenderProjectionConditionalCorrectly() { + void shouldRenderProjectionConditionalCorrectly() { Document agg = Aggregation.newAggregation(// project().and("color").applyCondition(ConditionalOperators.Cond.newBuilder() // @@ -425,7 +432,7 @@ public void shouldRenderProjectionConditionalCorrectly() { } @Test // DATAMONGO-861 - public void shouldRenderProjectionConditionalWithCriteriaCorrectly() { + void shouldRenderProjectionConditionalWithCriteriaCorrectly() { Document agg = Aggregation.newAggregation(project()// .and("color")// @@ -442,14 +449,14 @@ public void shouldRenderProjectionConditionalWithCriteriaCorrectly() { assertThat(getAsDocument(project, "color")).containsEntry("$cond", expectedCondition); } - @Test // DATAMONGO-861 - public void referencingProjectionAliasesShouldRenderProjectionConditionalWithFieldReferenceCorrectly() { + @Test // DATAMONGO-861, DATAMONGO-2242 + void referencingProjectionAliasesShouldRenderProjectionConditionalWithFieldReferenceCorrectly() { Document agg = Aggregation.newAggregation(// project().and("color").as("chroma"), project().and("luminosity") // .applyCondition(ConditionalOperators // .when("chroma") // - .thenValueOf("bright") // + .then("bright") // .otherwise("dark"))) // .toDocument("foo", Aggregation.DEFAULT_CONTEXT); @@ -463,7 +470,7 @@ public void referencingProjectionAliasesShouldRenderProjectionConditionalWithFie } @Test // DATAMONGO-861 - public void referencingProjectionAliasesShouldRenderProjectionConditionalWithCriteriaReferenceCorrectly() { + void referencingProjectionAliasesShouldRenderProjectionConditionalWithCriteriaReferenceCorrectly() { Document agg = Aggregation.newAggregation(// project().and("color").as("chroma"), project().and("luminosity") // @@ -482,7 +489,7 @@ public void referencingProjectionAliasesShouldRenderProjectionConditionalWithCri } @Test // DATAMONGO-861 - public void shouldRenderProjectionIfNullWithFieldReferenceCorrectly() { + void shouldRenderProjectionIfNullWithFieldReferenceCorrectly() { Document agg = Aggregation.newAggregation(// project().and("color"), // @@ -498,7 +505,7 @@ public void shouldRenderProjectionIfNullWithFieldReferenceCorrectly() { } @Test // DATAMONGO-861 - public void shouldRenderProjectionIfNullWithFallbackFieldReferenceCorrectly() { + void shouldRenderProjectionIfNullWithFallbackFieldReferenceCorrectly() { Document agg = Aggregation.newAggregation(// project("fallback").and("color").as("chroma"), project().and("luminosity") // @@ -512,7 +519,7 @@ public void shouldRenderProjectionIfNullWithFallbackFieldReferenceCorrectly() { } @Test // DATAMONGO-1552 - public void shouldHonorDefaultCountField() { + void shouldHonorDefaultCountField() { Document agg = Aggregation.newAggregation(// bucket("year"), // @@ -525,7 +532,7 @@ public void shouldHonorDefaultCountField() { } @Test // DATAMONGO-1533 - public void groupOperationShouldAllowUsageOfDerivedSpELAggregationExpression() { + void groupOperationShouldAllowUsageOfDerivedSpELAggregationExpression() { Document agg = newAggregation( // project("a"), // @@ -543,7 +550,7 @@ public void groupOperationShouldAllowUsageOfDerivedSpELAggregationExpression() { } @Test // DATAMONGO-1756 - public void projectOperationShouldRenderNestedFieldNamesCorrectly() { + void projectOperationShouldRenderNestedFieldNamesCorrectly() { Document agg = newAggregation(project().and("value1.value").plus("value2.value").as("val")).toDocument("collection", Aggregation.DEFAULT_CONTEXT); @@ -553,7 +560,7 @@ public void projectOperationShouldRenderNestedFieldNamesCorrectly() { } @Test // DATAMONGO-1871 - public void providedAliasShouldAllowNestingExpressionWithAliasCorrectly() { + void providedAliasShouldAllowNestingExpressionWithAliasCorrectly() { Document condition = new Document("$and", Arrays.asList(new Document("$gte", Arrays.asList("$$est.dt", "2015-12-29")), // @@ -566,12 +573,123 @@ public void providedAliasShouldAllowNestingExpressionWithAliasCorrectly() { Document $project = extractPipelineElement(agg.toDocument("collection-1", Aggregation.DEFAULT_CONTEXT), 0, "$project"); - assertThat($project.containsKey("plts.ests")).isTrue(); + assertThat($project).containsKey("plts.ests"); + } + + @Test // DATAMONGO-2377 + void shouldAllowInternalThisAndValueReferences() { + + Document untyped = newAggregation( // + Arrays.asList( // + (group("uid", "data.sourceId") // + .push("data.attributeRecords").as("attributeRecordArrays")), // + (project() // + .and(ArrayOperators.arrayOf("attributeRecordArrays") // + .reduce(ArrayOperators.arrayOf("$$value").concat("$$this")) // + .startingWith(Collections.emptyList())) // + .as("attributeRecordArrays")) // + )).toDocument("collection-1", DEFAULT_CONTEXT); + + assertThat(extractPipelineElement(untyped, 1, "$project")).isEqualTo(Document.parse( + "{\"attributeRecordArrays\": {\"$reduce\": {\"input\": \"$attributeRecordArrays\", \"initialValue\": [], \"in\": {\"$concatArrays\": [\"$$value\", \"$$this\"]}}}}")); + } + + @Test // DATAMONGO-2644 + void projectOnIdIsAlwaysValid() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + Document target = new Aggregation(bucket("start"), project("_id")).toDocument("collection-1", + new RelaxedTypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)))); + + assertThat(extractPipelineElement(target, 1, "$project")).isEqualTo(Document.parse(" { \"_id\" : 1 }")); + } + + @Test // GH-3898 + void shouldNotConvertIncludeExcludeValuesForProjectOperation() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext( + WithRetypedIdField.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + Document document = project(WithRetypedIdField.class).toDocument(context); + assertThat(document).isEqualTo(new Document("$project", new Document("_id", 1).append("renamed-field", 1).append("entries", 1))); + } + + @Test // GH-4038 + void createsBasicAggregationOperationFromJsonString() { + + AggregationOperation stage = stage("{ $project : { name : 1} }"); + Document target = newAggregation(stage).toDocument("col-1", DEFAULT_CONTEXT); + assertThat(extractPipelineElement(target, 0, "$project")).containsEntry("name", 1); + } + + @Test // GH-4038 + void createsBasicAggregationOperationFromBson() { + + AggregationOperation stage = stage(Aggregates.project(Projections.fields(Projections.include("name")))); + Document target = newAggregation(stage).toDocument("col-1", DEFAULT_CONTEXT); + assertThat(extractPipelineElement(target, 0, "$project")).containsKey("name"); + } + + @Test // GH-3917 + void inheritedFieldsExposingContextShouldNotFailOnUnknownFieldReferenceForRelaxedRootContext() { + + List aggregationOperations = new ArrayList<>(); + + GroupOperation groupOperation = Aggregation.group("_id", "label_name"); + aggregationOperations.add(groupOperation); + + ProjectionOperation projectionOperation = Aggregation.project("label_name").andExclude("_id"); + aggregationOperations.add(projectionOperation); + + Sort sort = Sort.by(Sort.Direction.DESC, "serial_number"); + SortOperation sortOperation = new SortOperation(sort).and(Sort.Direction.DESC, "label_name"); + aggregationOperations.add(sortOperation); + + MongoMappingContext mappingContext = new MongoMappingContext(); + QueryMapper queryMapper = new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + + List documents = newAggregation(City.class, aggregationOperations).toPipeline(new RelaxedTypeBasedAggregationOperationContext(City.class, mappingContext, queryMapper)); + assertThat(documents.get(2)).isEqualTo("{ $sort : { 'serial_number' : -1, 'label_name' : -1 } }"); + } + + @Test // GH-4443 + void fieldsExposingContextShouldUseCustomFieldNameFromRelaxedRootContext() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext( + WithRetypedIdField.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + TypedAggregation agg = newAggregation(WithRetypedIdField.class, + unwind("entries"), match(where("foo").is("value 2"))); + List pipeline = agg.toPipeline(context); + + Document fields = getAsDocument(pipeline.get(1), "$match"); + assertThat(fields.get("renamed-field")).isEqualTo("value 2"); } private Document extractPipelineElement(Document agg, int index, String operation) { List pipeline = (List) agg.get("pipeline"); - return (Document) pipeline.get(index).get(operation); + Object value = pipeline.get(index).get(operation); + if (value instanceof Document document) { + return document; + } + if (value instanceof Map map) { + return new Document(map); + } + throw new IllegalArgumentException(); + } + + public class WithRetypedIdField { + + @Id @org.springframework.data.mongodb.core.mapping.Field private String id; + + @org.springframework.data.mongodb.core.mapping.Field("renamed-field") private String foo; + + private List entries = new ArrayList<>(); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdateUnitTests.java new file mode 100644 index 0000000000..60a6437f91 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdateUnitTests.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link AggregationUpdate}. + * + * @author Christoph Strobl + */ +public class AggregationUpdateUnitTests { + + @Test // DATAMONGO-2331 + public void createPipelineWithMultipleStages() { + + assertThat(AggregationUpdate.update() // + .set("stage-1").toValue("value-1") // + .unset("stage-2") // + .set("stage-3").toValue("value-3") // + .toPipeline(Aggregation.DEFAULT_CONTEXT)) // + .containsExactly(new Document("$set", new Document("stage-1", "value-1")), + new Document("$unset", "stage-2"), new Document("$set", new Document("stage-3", "value-3"))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationVariableUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationVariableUnitTests.java new file mode 100644 index 0000000000..7ebf7c2849 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationVariableUnitTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +/** + * Unit tests for {@link AggregationVariable}. + * + * @author Christoph Strobl + */ +class AggregationVariableUnitTests { + + @Test // GH-4070 + void variableErrorsOnNullValue() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> AggregationVariable.variable(null)); + } + + @Test // GH-4070 + void createsVariable() { + + var variable = AggregationVariable.variable("$$now"); + + assertThat(variable.getTarget()).isEqualTo("$$now"); + assertThat(variable.isInternal()).isFalse(); + } + + @Test // GH-4070 + void prefixesVariableIfNeeded() { + + var variable = AggregationVariable.variable("this"); + + assertThat(variable.getTarget()).isEqualTo("$$this"); + } + + @Test // GH-4070 + void localVariableErrorsOnNullValue() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> AggregationVariable.localVariable(null)); + } + + @Test // GH-4070 + void localVariable() { + + var variable = AggregationVariable.localVariable("$$this"); + + assertThat(variable.getTarget()).isEqualTo("$$this"); + assertThat(variable.isInternal()).isTrue(); + } + + @Test // GH-4070 + void prefixesLocalVariableIfNeeded() { + + var variable = AggregationVariable.localVariable("this"); + + assertThat(variable.getTarget()).isEqualTo("$$this"); + } + + @Test // GH-4070 + void isVariableReturnsTrueForAggregationVariableTypes() { + + var variable = Mockito.mock(AggregationVariable.class); + + assertThat(AggregationVariable.isVariable(variable)).isTrue(); + } + + @Test // GH-4070 + void isVariableReturnsTrueForFieldThatTargetsVariable() { + + var variable = Fields.field("value", "$$this"); + + assertThat(AggregationVariable.isVariable(variable)).isTrue(); + } + + @Test // GH-4070 + void isVariableReturnsFalseForFieldThatDontTargetsVariable() { + + var variable = Fields.field("value", "$this"); + + assertThat(AggregationVariable.isVariable(variable)).isFalse(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java new file mode 100644 index 0000000000..381ddb45a1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -0,0 +1,208 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.Collections; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ArithmeticOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Mushtaq Ahmed + * @author Divya Srivastava + */ +class ArithmeticOperatorsUnitTests { + + @Test // DATAMONGO-2370 + void roundShouldWithoutPlace() { + + assertThat(valueOf("field").round().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Collections.singletonList("$field"))); + } + + @Test // DATAMONGO-2370 + void roundShouldWithPlace() { + + assertThat(valueOf("field").roundToPlace(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Arrays.asList("$field", 3))); + } + + @Test // DATAMONGO-2370 + void roundShouldWithPlaceFromField() { + + assertThat(valueOf("field").round().placeOf("my-field").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Arrays.asList("$field", "$my-field"))); + } + + @Test // DATAMONGO-2370 + void roundShouldWithPlaceFromExpression() { + + assertThat(valueOf("field").round().placeOf((ctx -> new Document("$first", "$source"))) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$round", Arrays.asList("$field", new Document("$first", "$source")))); + } + + @Test // GH-3716 + void rendersDerivativeCorrectly() { + + assertThat( + valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $derivative: { input: \"$miles\", unit: \"hour\" } }"); + } + + @Test // GH-3721 + void rendersIntegral() { + assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\" } }"); + } + + @Test // GH-3721 + void rendersIntegralWithUnit() { + assertThat(valueOf("kilowatts").integral(SetWindowFieldsOperation.WindowUnits.HOUR) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }"); + } + + @Test // GH-3728 + void rendersSin() { + + assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sin : \"$angle\" }"); + } + + @Test // GH-3728 + void rendersSinWithValueInDegrees() { + + assertThat(valueOf("angle").sin(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sin : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3728 + void rendersSinh() { + + assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sinh : \"$angle\" }"); + } + + @Test // GH-3728 + void rendersSinhWithValueInDegrees() { + + assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3708 + void rendersASin() { + assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asin : \"$field\" }"); + } + + @Test // GH-3708 + void rendersASinh() { + assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asinh : \"$field\" }"); + } + + @Test // GH-3710 + void rendersCos() { + + assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cos : \"$angle\" }"); + } + + @Test // GH-3710 + void rendersCosWithValueInDegrees() { + + assertThat(valueOf("angle").cos(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cos : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3710 + void rendersCosh() { + + assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cosh : \"$angle\" }"); + } + + @Test // GH-3707 + void rendersACos() { + assertThat(valueOf("field").acos().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $acos : \"$field\" }"); + } + + @Test // GH-3707 + void rendersACosh() { + assertThat(valueOf("field").acosh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $acosh : \"$field\" }"); + } + + @Test // GH-3710 + void rendersCoshWithValueInDegrees() { + + assertThat(valueOf("angle").cosh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cosh : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3730 + void rendersTan() { + + assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tan : \"$angle\" }"); + } + + @Test // GH-3730 + void rendersTanWithValueInDegrees() { + + assertThat(valueOf("angle").tan(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tan : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3730 + void rendersTanh() { + + assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tanh : \"$angle\" }"); + } + + @Test // GH-3730 + void rendersTanhWithValueInDegrees() { + + assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); + } + + @Test // GH-3709 + void rendersATan() { + + assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atan : \"$field\" }"); + } + + @Test // GH-3709 + void rendersATan2() { + + assertThat(valueOf("field1").atan2("field2").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); + } + + @Test // GH-3709 + void rendersATanh() { + + assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atanh : \"$field\" }"); + } + + @Test // GH-3724 + void rendersRand() { + assertThat(rand().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rand", new Document())); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArrayOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArrayOperatorsUnitTests.java new file mode 100644 index 0000000000..0ab5545f23 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArrayOperatorsUnitTests.java @@ -0,0 +1,205 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.ArrayToObject; + +/** + * Unit tests for {@link ArrayOperators} + * + * @author Christoph Strobl + * @author Shashank Sharma + * @author Divya Srivastava + * @currentRead Royal Assassin - Robin Hobb + */ +public class ArrayOperatorsUnitTests { + + static final List VALUE_LIST = Arrays.asList(1, "2", new Document("_id", 3)); + static final String VALUE_LIST_STRING = "[1, \"2\", { \"_id\" : 3 }]"; + static final String EXPRESSION_STRING = "{ \"$stablemaster\" : \"burrich\" }"; + static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2052 + public void toArrayWithFieldReference() { + + assertThat(ArrayOperators.arrayOf("regal").toObject().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayToObject: \"$regal\" } "); + } + + @Test // DATAMONGO-2052 + public void toArrayWithExpression() { + + assertThat(ArrayOperators.arrayOf(EXPRESSION).toObject().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayToObject: " + EXPRESSION_STRING + "} "); + } + + @Test // DATAMONGO-2052 + public void toArrayWithArgumentList() { + + List> source = new ArrayList<>(); + source.add(Arrays.asList("king", "shrewd")); + source.add(Arrays.asList("prince", "verity")); + + assertThat(ArrayToObject.arrayToObject(source).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayToObject: [ [ \"king\", \"shrewd\"], [ \"prince\", \"verity\" ] ] } "); + } + + @Test // DATAMONGO-2287 + public void arrayElementAtWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).elementAt(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $arrayElemAt: [ " + VALUE_LIST_STRING + ", 1] } "); + } + + @Test // DATAMONGO-2287 + public void concatWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).concat("field").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $concatArrays: [ " + VALUE_LIST_STRING + ", \"$field\"] } "); + } + + @Test // DATAMONGO-2287 + public void filterWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).filter().as("var").by(new Document()) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $filter: { \"input\" : " + VALUE_LIST_STRING + ", \"as\" : \"var\", \"cond\" : {} } } "); + } + + @Test // DATAMONGO-2287 + public void lengthWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).length().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $size: [ " + VALUE_LIST_STRING + "] } "); + } + + @Test // DATAMONGO-2287 + public void sliceWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).slice().itemCount(3).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $slice: [ " + VALUE_LIST_STRING + ", 3] } "); + } + + @Test // DATAMONGO-2287 + public void indexOfWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).indexOf("s1p").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $indexOfArray: [ " + VALUE_LIST_STRING + ", \"s1p\"] } "); + } + + @Test // DATAMONGO-2287 + public void reverseWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).reverse().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $reverseArray: [ " + VALUE_LIST_STRING + "] } "); + } + + @Test // DATAMONGO-2287 + public void zipWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).zipWith("field").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $zip: { \"inputs\": [" + VALUE_LIST_STRING + ", \"$field\"]} } "); + } + + @Test // DATAMONGO-2287 + public void inWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).containsValue("$userName").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$in\" : [\"$userName\", " + VALUE_LIST_STRING + "] }"); + } + + @Test // GH-3694 + public void firstWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).first().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$first\" : " + VALUE_LIST_STRING + "}"); + } + + @Test // GH-3694 + public void firstWithExpression() { + + assertThat(ArrayOperators.arrayOf(EXPRESSION).first().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$first\" : " + EXPRESSION_STRING + "}"); + } + + @Test // GH-3694 + public void firstWithFieldReference() { + + assertThat(ArrayOperators.arrayOf("field").first().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $first : \"$field\" }"); + } + + @Test // GH-3694 + public void lastWithValueList() { + + assertThat(ArrayOperators.arrayOf(VALUE_LIST).last().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$last\" : " + VALUE_LIST_STRING + "}"); + } + + @Test // GH-3694 + public void lastWithExpression() { + + assertThat(ArrayOperators.arrayOf(EXPRESSION).last().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ \"$last\" : " + EXPRESSION_STRING + "}"); + } + + @Test // GH-3694 + public void lastWithFieldReference() { + + assertThat(ArrayOperators.arrayOf("field").last().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $last : \"$field\" }"); + } + + @Test // GH-4139 + void sortByWithFieldRef() { + + assertThat(ArrayOperators.arrayOf("team").sort(Sort.by("name")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sortArray: { input: \"$team\", sortBy: { name: 1 } } }"); + } + + @Test // GH-4929 + public void sortArrayByValueAscending() { + + Document result = ArrayOperators.arrayOf("numbers").sort(Direction.ASC).toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(result).isEqualTo("{ $sortArray: { input: '$numbers', sortBy: 1 } }"); + } + + @Test // GH-4929 + public void sortArrayByValueDescending() { + + Document result = ArrayOperators.arrayOf("numbers").sort(Direction.DESC).toDocument(Aggregation.DEFAULT_CONTEXT); + assertThat(result).isEqualTo("{ $sortArray: { input: '$numbers', sortBy: -1 } }"); + } + + @Test // GH-4929 + void sortByWithDirection() { + + assertThat(ArrayOperators.arrayOf(List.of("a", "b", "d", "c")).sort(Direction.DESC) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sortArray: { input: [\"a\", \"b\", \"d\", \"c\"], sortBy: -1 } }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperationUnitTests.java new file mode 100644 index 0000000000..c5b73b6576 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperationUnitTests.java @@ -0,0 +1,91 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoClientSettings; + +/** + * Unit tests for {@link BasicAggregationOperation}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class BasicAggregationOperationUnitTests { + + @Mock QueryMapper queryMapper; + @Mock MongoConverter converter; + + TypeBasedAggregationOperationContext ctx; + + @BeforeEach + void beforeEach() { + + // no field mapping though having a type based context + ctx = new TypeBasedAggregationOperationContext(Person.class, new MongoMappingContext(), queryMapper); + when(queryMapper.getConverter()).thenReturn(converter); + when(converter.getCodecRegistry()).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + } + + @Test // GH-4038 + void usesGivenDocumentAsIs() { + + Document source = new Document("value", 1); + assertThat(new BasicAggregationOperation(source).toDocument(ctx)).isSameAs(source); + } + + @Test // GH-4038 + void parsesJson() { + + Document source = new Document("value", 1); + assertThat(new BasicAggregationOperation(source.toJson()).toDocument(ctx)).isEqualTo(source); + } + + @Test // GH-4038 + void errorsOnInvalidValue() { + + BasicAggregationOperation agg = new BasicAggregationOperation(new Object()); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> agg.toDocument(ctx)); + } + + @Test // GH-4038 + void errorsOnNonJsonSting() { + + BasicAggregationOperation agg = new BasicAggregationOperation("#005BBB #FFD500"); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> agg.toDocument(ctx)); + } + + private static class Person { + + @Field("v-a-l-u-e") Object value; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java index a0fb7002cd..af05e7bee0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.DocumentTestUtils.getAsDocument; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import org.junit.Test; -import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; - import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities; /** * Unit tests for {@link BucketAutoOperation}. @@ -32,14 +31,14 @@ */ public class BucketAutoOperationUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1552 + @Test // DATAMONGO-1552 public void rejectsNullFields() { - new BucketAutoOperation((Field) null, 0); + assertThatIllegalArgumentException().isThrownBy(() -> new BucketAutoOperation((Field) null, 0)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1552 + @Test // DATAMONGO-1552 public void rejectsNonPositiveIntegerNullFields() { - new BucketAutoOperation(Fields.field("field"), 0); + assertThatIllegalArgumentException().isThrownBy(() -> new BucketAutoOperation(Fields.field("field"), 0)); } @Test // DATAMONGO-1552 @@ -50,13 +49,13 @@ public void shouldRenderBucketOutputExpressions() { .andOutput("title").push().as("titles"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse( - "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse( + "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}")); } - @Test(expected = IllegalStateException.class) // DATAMONGO-1552 + @Test // DATAMONGO-1552 public void shouldRenderEmptyAggregationExpression() { - bucket("groupby").andOutput("field").as("alias"); + assertThatIllegalStateException().isThrownBy(() -> bucket("groupby").andOutput("field").as("alias")); } @Test // DATAMONGO-1552 @@ -66,7 +65,7 @@ public void shouldRenderBucketOutputOperators() { .andOutputCount().as("titles"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ titles : { $sum: 1 } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $sum: 1 } }")); } @Test // DATAMONGO-1552 @@ -74,7 +73,7 @@ public void shouldRenderCorrectly() { Document agg = bucketAuto("field", 1).withBuckets(5).toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(agg, is(Document.parse("{ $bucketAuto: { groupBy: \"$field\", buckets: 5 } }"))); + assertThat(agg).isEqualTo(Document.parse("{ $bucketAuto: { groupBy: \"$field\", buckets: 5 } }")); } @Test // DATAMONGO-1552 @@ -84,7 +83,8 @@ public void shouldRenderGranulariy() { .withGranularity(Granularities.E24) // .toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(agg, is(Document.parse("{ $bucketAuto: { buckets: 1, granularity: \"E24\", groupBy: \"$field\" } }"))); + assertThat(agg) + .isEqualTo(Document.parse("{ $bucketAuto: { buckets: 1, granularity: \"E24\", groupBy: \"$field\" } }")); } @Test // DATAMONGO-1552 @@ -94,7 +94,7 @@ public void shouldRenderSumOperator() { .andOutput("score").sum().as("cummulated_score"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ cummulated_score : { $sum: \"$score\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ cummulated_score : { $sum: \"$score\" } }")); } @Test // DATAMONGO-1552 @@ -104,8 +104,8 @@ public void shouldRenderSumWithOwnOutputExpression() { .andOutputExpression("netPrice + tax").apply("$multiply", 5).as("total"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), - is(Document.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }"))); + assertThat(extractOutput(agg)) + .isEqualTo(Document.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }")); } private static Document extractOutput(Document fromBucketClause) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java index 1b00e3aba8..36a943d1c1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/BucketOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import org.junit.Test; - import org.bson.Document; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link BucketOperation}. @@ -31,9 +28,9 @@ */ public class BucketOperationUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1552 + @Test // DATAMONGO-1552 public void rejectsNullFields() { - new BucketOperation((Field) null); + assertThatIllegalArgumentException().isThrownBy(() -> new BucketOperation((Field) null)); } @Test // DATAMONGO-1552 @@ -44,13 +41,13 @@ public void shouldRenderBucketOutputExpressions() { .andOutput("title").push().as("titles"); Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(dbObject), is(Document.parse( - "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}"))); + assertThat(extractOutput(dbObject)).isEqualTo(Document.parse( + "{ \"grossSalesPrice\" : { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\" , 2]} , \"titles\" : { $push: \"$title\" } }}")); } - @Test(expected = IllegalStateException.class) // DATAMONGO-1552 + @Test // DATAMONGO-1552 public void shouldRenderEmptyAggregationExpression() { - bucket("groupby").andOutput("field").as("alias"); + assertThatIllegalStateException().isThrownBy(() -> bucket("groupby").andOutput("field").as("alias")); } @Test // DATAMONGO-1552 @@ -60,7 +57,7 @@ public void shouldRenderBucketOutputOperators() { .andOutputCount().as("titles"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ titles : { $sum: 1 } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $sum: 1 } }")); } @Test // DATAMONGO-1552 @@ -70,8 +67,8 @@ public void shouldRenderSumAggregationExpression() { .andOutput(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") // .toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(agg, is(Document.parse( - "{ $bucket: { groupBy: \"$field\", boundaries: [], output : { quizTotal: { $sum: \"$quizzes\"} } } }"))); + assertThat(agg).isEqualTo(Document + .parse("{ $bucket: { groupBy: \"$field\", boundaries: [], output : { quizTotal: { $sum: \"$quizzes\"} } } }")); } @Test // DATAMONGO-1552 @@ -79,8 +76,8 @@ public void shouldRenderDefault() { Document agg = bucket("field").withDefaultBucket("default bucket").toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(agg, - is(Document.parse("{ $bucket: { groupBy: \"$field\", boundaries: [], default: \"default bucket\" } }"))); + assertThat(agg).isEqualTo( + Document.parse("{ $bucket: { groupBy: \"$field\", boundaries: [], default: \"default bucket\" } }")); } @Test // DATAMONGO-1552 @@ -91,8 +88,8 @@ public void shouldRenderBoundaries() { .withBoundaries(0) // .withBoundaries(10, 20).toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(agg, - is(Document.parse("{ $bucket: { boundaries: [0, 10, 20], default: \"default bucket\", groupBy: \"$field\" } }"))); + assertThat(agg).isEqualTo( + Document.parse("{ $bucket: { boundaries: [0, 10, 20], default: \"default bucket\", groupBy: \"$field\" } }")); } @Test // DATAMONGO-1552 @@ -102,7 +99,7 @@ public void shouldRenderSumOperator() { .andOutput("score").sum().as("cummulated_score"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ cummulated_score : { $sum: \"$score\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ cummulated_score : { $sum: \"$score\" } }")); } @Test // DATAMONGO-1552 @@ -112,7 +109,7 @@ public void shouldRenderSumWithValueOperator() { .andOutput("score").sum(4).as("cummulated_score"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ cummulated_score : { $sum: 4 } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ cummulated_score : { $sum: 4 } }")); } @Test // DATAMONGO-1552 @@ -122,7 +119,7 @@ public void shouldRenderAvgOperator() { .andOutput("score").avg().as("average"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ average : { $avg: \"$score\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ average : { $avg: \"$score\" } }")); } @Test // DATAMONGO-1552 @@ -132,7 +129,7 @@ public void shouldRenderFirstOperator() { .andOutput("title").first().as("first_title"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ first_title : { $first: \"$title\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ first_title : { $first: \"$title\" } }")); } @Test // DATAMONGO-1552 @@ -142,7 +139,7 @@ public void shouldRenderLastOperator() { .andOutput("title").last().as("last_title"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ last_title : { $last: \"$title\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ last_title : { $last: \"$title\" } }")); } @Test // DATAMONGO-1552 @@ -152,7 +149,7 @@ public void shouldRenderMinOperator() { .andOutput("score").min().as("min_score"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ min_score : { $min: \"$score\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ min_score : { $min: \"$score\" } }")); } @Test // DATAMONGO-1552 @@ -162,7 +159,7 @@ public void shouldRenderPushOperator() { .andOutput("title").push().as("titles"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ titles : { $push: \"$title\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $push: \"$title\" } }")); } @Test // DATAMONGO-1552 @@ -172,7 +169,7 @@ public void shouldRenderAddToSetOperator() { .andOutput("title").addToSet().as("titles"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ titles : { $addToSet: \"$title\" } }"))); + assertThat(extractOutput(agg)).isEqualTo(Document.parse("{ titles : { $addToSet: \"$title\" } }")); } @Test // DATAMONGO-1552 @@ -182,7 +179,8 @@ public void shouldRenderSumWithExpression() { .andOutputExpression("netPrice + tax").sum().as("total"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), is(Document.parse("{ total : { $sum: { $add : [\"$netPrice\", \"$tax\"]} } }"))); + assertThat(extractOutput(agg)) + .isEqualTo(Document.parse("{ total : { $sum: { $add : [\"$netPrice\", \"$tax\"]} } }")); } @Test // DATAMONGO-1552 @@ -192,8 +190,8 @@ public void shouldRenderSumWithOwnOutputExpression() { .andOutputExpression("netPrice + tax").apply("$multiply", 5).as("total"); Document agg = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(extractOutput(agg), - is(Document.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }"))); + assertThat(extractOutput(agg)) + .isEqualTo(Document.parse("{ total : { $multiply: [ {$add : [\"$netPrice\", \"$tax\"]}, 5] } }")); } @Test // DATAMONGO-1552 @@ -201,8 +199,8 @@ public void shouldExposeDefaultCountField() { BucketOperation operation = bucket("field"); - assertThat(operation.getFields().exposesSingleFieldOnly(), is(true)); - assertThat(operation.getFields().getField("count"), is(notNullValue())); + assertThat(operation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(operation.getFields().getField("count")).isNotNull(); } private static Document extractOutput(Document fromBucketClause) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java index 8fe534dd31..7940f9e354 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/City.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,59 @@ */ package org.springframework.data.mongodb.core.aggregation; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; +import java.util.Objects; /** * @author Thomas Darimont * @author Mark Paluch */ -@lombok.Data -@AllArgsConstructor -@NoArgsConstructor class City { String name; int population; + public City() {} + + public City(String name, int population) { + + this.name = name; + this.population = population; + } + public String toString() { return "City [name=" + name + ", population=" + population + "]"; } + + public String getName() { + return this.name; + } + + public int getPopulation() { + return this.population; + } + + public void setName(String name) { + this.name = name; + } + + public void setPopulation(int population) { + this.population = population; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + City city = (City) o; + return population == city.population && Objects.equals(name, city.name); + } + + @Override + public int hashCode() { + return Objects.hash(name, population); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java index afc8bb169c..c11313176f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CondExpressionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,13 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.junit.Assert.*; import static org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond; import org.springframework.data.mongodb.core.query.Criteria; @@ -34,42 +33,43 @@ */ public class CondExpressionUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-861 - public void builderRejectsEmptyFieldName() { - newBuilder().when(""); + @Test // DATAMONGO-861 + void builderRejectsEmptyFieldName() { + assertThatIllegalArgumentException().isThrownBy(() -> newBuilder().when("")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-861 - public void builderRejectsNullFieldName() { - newBuilder().when((Document) null); + @Test // DATAMONGO-861 + void builderRejectsNullFieldName() { + assertThatIllegalArgumentException().isThrownBy(() -> newBuilder().when((Document) null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-861 - public void builderRejectsNullCriteriaName() { - newBuilder().when((Criteria) null); + @Test // DATAMONGO-861 + void builderRejectsNullCriteriaName() { + assertThatIllegalArgumentException().isThrownBy(() -> newBuilder().when((Criteria) null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-861 - public void builderRejectsBuilderAsThenValue() { - newBuilder().when("isYellow").then(newBuilder().when("field").then("then-value")).otherwise("otherwise"); + @Test // DATAMONGO-861 + void builderRejectsBuilderAsThenValue() { + assertThatIllegalArgumentException().isThrownBy( + () -> newBuilder().when("isYellow").then(newBuilder().when("field").then("then-value")).otherwise("otherwise")); } - @Test // DATAMONGO-861, DATAMONGO-1542 - public void simpleBuilderShouldRenderCorrectly() { + @Test // DATAMONGO-861, DATAMONGO-1542, DATAMONGO-2242 + void simpleBuilderShouldRenderCorrectly() { Cond operator = ConditionalOperators.when("isYellow").thenValueOf("bright").otherwise("dark"); Document document = operator.toDocument(Aggregation.DEFAULT_CONTEXT); Document expectedCondition = new Document() // .append("if", "$isYellow") // - .append("then", "bright") // + .append("then", "$bright") // .append("else", "dark"); - assertThat(document, isBsonObject().containing("$cond", expectedCondition)); + assertThat(document).containsEntry("$cond", expectedCondition); } - @Test // DATAMONGO-861, DATAMONGO-1542 - public void simpleCriteriaShouldRenderCorrectly() { + @Test // DATAMONGO-861, DATAMONGO-1542, DATAMONGO-2242 + void simpleCriteriaShouldRenderCorrectly() { Cond operator = ConditionalOperators.when(Criteria.where("luminosity").gte(100)).thenValueOf("bright") .otherwise("dark"); @@ -77,14 +77,14 @@ public void simpleCriteriaShouldRenderCorrectly() { Document expectedCondition = new Document() // .append("if", new Document("$gte", Arrays. asList("$luminosity", 100))) // - .append("then", "bright") // + .append("then", "$bright") // .append("else", "dark"); - assertThat(document, isBsonObject().containing("$cond", expectedCondition)); + assertThat(document).containsEntry("$cond", expectedCondition); } - @Test // DATAMONGO-861 - public void andCriteriaShouldRenderCorrectly() { + @Test // DATAMONGO-861, DATAMONGO-2242 + void andCriteriaShouldRenderCorrectly() { Cond operator = ConditionalOperators.when(Criteria.where("luminosity").gte(100) // .andOperator(Criteria.where("hue").is(50), // @@ -99,14 +99,14 @@ public void andCriteriaShouldRenderCorrectly() { Document expectedCondition = new Document() // .append("if", Arrays. asList(luminosity, new Document("$and", Arrays.asList(hue, saturation)))) // - .append("then", "bright") // + .append("then", "$bright") // .append("else", "$dark-field"); - assertThat(document, isBsonObject().containing("$cond", expectedCondition)); + assertThat(document).containsEntry("$cond", expectedCondition); } - @Test // DATAMONGO-861, DATAMONGO-1542 - public void twoArgsCriteriaShouldRenderCorrectly() { + @Test // DATAMONGO-861, DATAMONGO-1542, DATAMONGO-2242 + void twoArgsCriteriaShouldRenderCorrectly() { Criteria criteria = Criteria.where("luminosity").gte(100) // .and("saturation").and("chroma").is(200); @@ -119,14 +119,14 @@ public void twoArgsCriteriaShouldRenderCorrectly() { Document expectedCondition = new Document() // .append("if", Arrays.asList(gte, is)) // - .append("then", "bright") // + .append("then", "$bright") // .append("else", "dark"); - assertThat(document, isBsonObject().containing("$cond", expectedCondition)); + assertThat(document).containsEntry("$cond", expectedCondition); } @Test // DATAMONGO-861, DATAMONGO-1542 - public void nestedCriteriaShouldRenderCorrectly() { + void nestedCriteriaShouldRenderCorrectly() { Cond operator = ConditionalOperators.when(Criteria.where("luminosity").gte(100)) // .thenValueOf(newBuilder() // @@ -150,7 +150,7 @@ public void nestedCriteriaShouldRenderCorrectly() { .append("then", "very-dark") // .append("else", "not-so-dark"); - assertThat(document, isBsonObject().containing("$cond.then.$cond", trueCondition)); - assertThat(document, isBsonObject().containing("$cond.else.$cond", falseCondition)); + assertThat(document).containsEntry("$cond.then.$cond", trueCondition); + assertThat(document).containsEntry("$cond.else.$cond", falseCondition); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java new file mode 100644 index 0000000000..00f44194f6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ConditionalOperators.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ConditionalOperators}. + * + * @author Christoph Strobl + */ +class ConditionalOperatorsUnitTests { + + @Test // GH-3720 + void rendersIfNullWithMultipleConditionalValuesCorrectly() { + + assertThat(ifNull("description").orIfNull("quantity").then("Unspecified").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $ifNull: [ \"$description\", \"$quantity\", \"Unspecified\" ] }")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java new file mode 100644 index 0000000000..3600818a16 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java @@ -0,0 +1,232 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ConvertOperators}. + * + * @author Christoph Strobl + * @currentRead Royal Assassin - Robin Hobb + */ +public class ConvertOperatorsUnitTests { + + static final String EXPRESSION_STRING = "{ \"$molly\" : \"chandler\" }"; + static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2048 + public void convertToUsingStringIdentifier() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToUsingIntIdentifier() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : 1 } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToTypeOf("fitz").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"$fitz\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToUsingExpression() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToTypeOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnErrorValue() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onErrorReturn("foo") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onError\" : \"foo\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnErrorValueOfField() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onErrorReturnValueOf("verity") + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document + .parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onError\" : \"$verity\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnErrorValueOfExpression() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onErrorReturnValueOf(EXPRESSION) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onError\" : " + + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnNullValue() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onNullReturn("foo") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + Document.parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onNull\" : \"foo\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnNullValueOfField() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onNullReturnValueOf("verity") + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document + .parse("{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onNull\" : \"$verity\" } } ")); + } + + @Test // DATAMONGO-2048 + public void convertToWithOnNullValueOfExpression() { + + assertThat(ConvertOperators.valueOf("shrewd").convertTo("double").onNullReturnValueOf(EXPRESSION) + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( + "{ $convert: { \"input\" : \"$shrewd\", \"to\" : \"double\", \"onNull\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO-2048 + public void toBoolUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToBoolean().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toBool: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toBoolUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToBoolean().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toBool: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toDateUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToDate().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDate: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toDateUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToDate().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDate: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toDecimalUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToDecimal().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDecimal: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toDecimalUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToDecimal().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDecimal: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toDoubleUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToDouble().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDouble: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toDoubleUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToDouble().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toDouble: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toIntUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToInt().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toInt: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toIntUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToInt().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toInt: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toLongUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToLong().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toLong: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toLongUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToLong().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toLong: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toObjectIdUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToObjectId().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toObjectId: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toObjectIdUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToObjectId().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toObjectId: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2048 + public void toStringUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("shrewd").convertToString().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toString: \"$shrewd\" } ")); + } + + @Test // DATAMONGO-2048 + public void toStringUsingExpression() { + + assertThat(ConvertOperators.valueOf(EXPRESSION).convertToString().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $toString: " + EXPRESSION_STRING + " } ")); + } + + @Test // GH-3714 + void degreesToRadiansUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("angle_a").convertDegreesToRadians().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $degreesToRadians : \"$angle_a\"}")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java index 9ee88a1aaf..eea05fbfaa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/CountOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link CountOperation}. @@ -28,16 +27,17 @@ */ public class CountOperationUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1549 + @Test // DATAMONGO-1549 public void rejectsEmptyFieldName() { - new CountOperation(""); + assertThatIllegalArgumentException().isThrownBy(() -> new CountOperation("")); } @Test // DATAMONGO-1549 public void shouldRenderCorrectly() { CountOperation countOperation = new CountOperation("field"); - assertThat(countOperation.toDocument(Aggregation.DEFAULT_CONTEXT), is(Document.parse("{$count : \"field\" }"))); + assertThat(countOperation.toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{$count : \"field\" }")); } @Test // DATAMONGO-1549 @@ -45,8 +45,8 @@ public void countExposesFields() { CountOperation countOperation = new CountOperation("field"); - assertThat(countOperation.getFields().exposesNoFields(), is(false)); - assertThat(countOperation.getFields().exposesSingleFieldOnly(), is(true)); - assertThat(countOperation.getFields().getField("field"), notNullValue()); + assertThat(countOperation.getFields().exposesNoFields()).isFalse(); + assertThat(countOperation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(countOperation.getFields().getField("field")).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java index fc70c8e08b..b1826fdb33 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Data.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java index c232cd1d71..a7d294e138 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DataItem.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java new file mode 100644 index 0000000000..2dd6e3beea --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -0,0 +1,147 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.time.DayOfWeek; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.TimeZone; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; + +/** + * Unit tests for {@link DateOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class DateOperatorsUnitTests { + + @Test // GH-3713 + void rendersDateAdd() { + + assertThat(DateOperators.dateOf("purchaseDate").add(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-3713 + void rendersDateAddWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).add(3, "day") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }"); + } + + @Test // GH-4139 + void rendersDateSubtract() { + + assertThat(DateOperators.dateOf("purchaseDate").subtract(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateSubtract: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-4139 + void rendersDateSubtractWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).subtract(3, "day") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateSubtract: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }"); + } + + @Test // GH-3713 + void rendersDateDiff() { + + assertThat( + DateOperators.dateOf("purchaseDate").diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); + } + + @Test // GH-3713 + void rendersDateDiffWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")) + .diffValueOf("delivered", DateOperators.TemporalUnit.from(ChronoUnit.DAYS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(ZoneOffset.ofHoursMinutes(3, 30)).getValue()).isEqualTo("+03:30"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("-06:00"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneId() { + assertThat(DateOperators.Timezone.fromZone(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("America/Chicago"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneId() { + assertThat(DateOperators.Timezone.fromZone(ZoneId.of("America/Chicago")).getValue()).isEqualTo("America/Chicago"); + } + + @Test // GH-4139 + void rendersDateTrunc() { + + assertThat(DateOperators.dateOf("purchaseDate").truncate("week").binSize(2).startOfWeek(DayOfWeek.MONDAY).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateTrunc: { date: \"$purchaseDate\", unit: \"week\", binSize: 2, startOfWeek : \"monday\" } }"); + } + + @Test // GH-4139 + void rendersDateTruncWithTimezone() { + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).truncate("week").binSize(2).startOfWeek(DayOfWeek.MONDAY).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $dateTrunc: { date: \"$purchaseDate\", unit: \"week\", binSize: 2, startOfWeek : \"monday\", timezone : \"America/Chicago\" } }"); + } + + @Test // GH-4139 + void rendersTsIncrement() { + + assertThat(DateOperators.dateOf("saleTimestamp").tsIncrement().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $tsIncrement: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void tsIncrementErrorsOnTimezone() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).tsIncrement()); + } + + @Test // GH-4139 + void rendersTsSecond() { + + assertThat(DateOperators.dateOf("saleTimestamp").tsSecond().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $tsSecond: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void tsSecondErrorsOnTimezone() { + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).tsSecond()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DensifyOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DensifyOperationUnitTests.java new file mode 100644 index 0000000000..47176fd8ab --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DensifyOperationUnitTests.java @@ -0,0 +1,150 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Date; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DensifyOperation.DensifyUnits; +import org.springframework.data.mongodb.core.aggregation.DensifyOperation.Range; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link DensifyOperation}. + * + * @author Christoph Strobl + */ +class DensifyOperationUnitTests { + + @Test // GH-4139 + void rendersFieldNamesAsIsForUntypedContext() { + + DensifyOperation densify = DensifyOperation.builder().densify("ts") + .range(Range.bounded("2021-05-18T00:00:00", "2021-05-18T08:00:00").incrementBy(1).unit(DensifyUnits.HOUR)) + .build(); + + assertThat(densify.toDocument(contextFor(null))).isEqualTo(""" + { + $densify: { + field: "ts", + range: { + step: 1, + unit: "hour", + bounds:[ "2021-05-18T00:00:00", "2021-05-18T08:00:00" ] + } + } + } + """); + } + + @Test // GH-4139 + void rendersFieldNamesCorrectly() { + + DensifyOperation densify = DensifyOperation.builder().densify("ts") + .range(Range.bounded("2021-05-18T00:00:00", "2021-05-18T08:00:00").incrementBy(1).unit(DensifyUnits.HOUR)) + .build(); + + assertThat(densify.toDocument(contextFor(Weather.class))).isEqualTo(""" + { + $densify: { + field: "timestamp", + range: { + step: 1, + unit: "hour", + bounds:[ "2021-05-18T00:00:00", "2021-05-18T08:00:00" ] + } + } + } + """); + } + + @Test // GH-4139 + void rendersPartitonNamesCorrectly() { + + DensifyOperation densify = DensifyOperation.builder().densify("alt").partitionBy("var") + .fullRange(range -> range.incrementBy(200)).build(); + + assertThat(densify.toDocument(contextFor(Coffee.class))).isEqualTo(""" + { + $densify: { + field: "altitude", + partitionByFields : [ "variety" ], + range: { + step: 200, + bounds: "full" + } + } + } + """); + } + + @Test // GH-4139 + void rendersPartitionRangeCorrectly() { + + DensifyOperation densify = DensifyOperation.builder().densify("alt").partitionBy("var") + .partitionRange(range -> range.incrementBy(200)).build(); + + assertThat(densify.toDocument(contextFor(Coffee.class))).isEqualTo(""" + { + $densify: { + field: "altitude", + partitionByFields : [ "variety" ], + range: { + step: 200, + bounds: "partition" + } + } + } + """); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + class Weather { + + @Field("timestamp") Date ts; + + @Field("temp") Long temperature; + } + + class Coffee { + + @Field("altitude") Long alt; + + @Field("variety") String var; + + Float score; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java new file mode 100644 index 0000000000..c647e1c6c7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.core.aggregation.DocumentOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link DocumentOperators}. + * + * @author Christoph Strobl + */ +class DocumentOperatorsUnitTests { + + @Test // GH-3715 + void rendersRank() { + assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $rank: { } }"); + } + + @Test // GH-3715 + void rendersDenseRank() { + assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $denseRank: { } }"); + } + + @Test // GH-3717 + void rendersDocumentNumber() { + assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $documentNumber: { } }"); + } + + @Test // GH-3727 + void rendersShift() { + + assertThat(valueOf("quantity").shift(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(valueOf("quantity").shift(1).defaultTo("Not available").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java new file mode 100644 index 0000000000..60e6541281 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java @@ -0,0 +1,43 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link EvaluationOperators}. + * + * @author Mark Paluch + * @author Christoph Strobl + */ +class EvaluationOperatorsUnitTests { + + @Test // GH-3790 + void shouldRenderExprCorrectly() { + + assertThat(EvaluationOperators.valueOf("foo").expr().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $expr: \"$foo\" }"); + } + + @Test // GH-4139 + void shouldRenderLocfCorrectly() { + + assertThat(EvaluationOperators.valueOf("foo").locf().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $locf: \"$foo\" }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java index bc4b49a55c..ee20b15291 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ExposedFieldsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,10 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; /** @@ -29,28 +29,28 @@ */ public class ExposedFieldsUnitTests { - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFields() { - ExposedFields.from((ExposedField) null); + assertThatIllegalArgumentException().isThrownBy(() -> ExposedFields.from((ExposedField) null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldsForSynthetics() { - ExposedFields.synthetic(null); + assertThatIllegalArgumentException().isThrownBy(() -> ExposedFields.synthetic(null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldsForNonSynthetics() { - ExposedFields.nonSynthetic(null); + assertThatIllegalArgumentException().isThrownBy(() -> ExposedFields.nonSynthetic(null)); } @Test public void exposesSingleField() { ExposedFields fields = ExposedFields.synthetic(Fields.fields("foo")); - assertThat(fields.exposesSingleFieldOnly(), is(true)); + assertThat(fields.exposesSingleFieldOnly()).isTrue(); fields = fields.and(new ExposedField("bar", true)); - assertThat(fields.exposesSingleFieldOnly(), is(false)); + assertThat(fields.exposesSingleFieldOnly()).isFalse(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java index 7b84210d0f..2cfc941bda 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FacetOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,12 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import static org.springframework.data.mongodb.test.util.Assertions.*; import org.bson.Document; import org.junit.Test; + import org.springframework.data.mongodb.core.query.Criteria; /** @@ -61,19 +60,18 @@ public void shouldRenderEmpty() { Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(agg, is(Document.parse("{ $facet: { } }"))); + assertThat(agg).isEqualTo(Document.parse("{ $facet: { } }")); } @Test(expected = IllegalArgumentException.class) // DATAMONGO-1552 public void shouldRejectNonExistingFields() { - FacetOperation facetOperation = new FacetOperation() - .and(project("price"), // - bucket("price") // - .withBoundaries(0, 150, 200, 300, 400) // - .withDefaultBucket("Other") // - .andOutputCount().as("count") // - .andOutput("title").push().as("titles")) // + FacetOperation facetOperation = new FacetOperation().and(project("price"), // + bucket("price") // + .withBoundaries(0, 150, 200, 300, 400) // + .withDefaultBucket("Other") // + .andOutputCount().as("count") // + .andOutput("title").push().as("titles")) // .as("categorizedByPrice"); Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -88,18 +86,16 @@ public void shouldRejectNonExistingFields() { @Test // DATAMONGO-1552 public void shouldHonorProjectedFields() { - FacetOperation facetOperation = new FacetOperation() - .and(project("price").and("title").as("name"), // - bucketAuto("price", 5) // - .andOutput("name").push().as("titles")) // + FacetOperation facetOperation = new FacetOperation().and(project("price").and("title").as("name"), // + bucketAuto("price", 5) // + .andOutput("name").push().as("titles")) // .as("categorizedByPrice"); Document agg = facetOperation.toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $facet: { categorizedByPrice: [" - + "{ $project: { price: 1, name: \"$title\" } }, " - + "{ $bucketAuto: { buckets: 5, groupBy: \"$price\", " - + "output: { titles: { $push: \"$name\" } } } } ] } }")); + + "{ $project: { price: 1, name: \"$title\" } }, " + "{ $bucketAuto: { buckets: 5, groupBy: \"$price\", " + + "output: { titles: { $push: \"$name\" } } } } ] } }")); } @Test // DATAMONGO-1553 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java index c441a7cecc..cfeb411387 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FieldsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,12 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import org.hamcrest.Matchers; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField; +import org.junit.jupiter.api.Test; + +import org.springframework.data.mongodb.core.aggregation.Fields.*; /** * Unit tests for {@link Fields}. @@ -33,16 +30,14 @@ */ public class FieldsUnitTests { - @Rule public ExpectedException exception = ExpectedException.none(); - - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldVarArgs() { - Fields.from((Field[]) null); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.from((Field[]) null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldNameVarArgs() { - Fields.fields((String[]) null); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.fields((String[]) null)); } @Test @@ -55,19 +50,19 @@ public void createsFieldFromNameAndTarget() { verify(Fields.field("foo", "bar"), "foo", "bar"); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldName() { - Fields.field(null); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field(null)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullFieldNameIfTargetGiven() { - Fields.field(null, "foo"); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field(null, "foo")); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsEmptyFieldName() { - Fields.field(""); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field("")); } @Test @@ -76,8 +71,8 @@ public void createsFieldsFromFieldInstances() { AggregationField reference = new AggregationField("foo"); Fields fields = Fields.from(reference); - assertThat(fields, is(Matchers. iterableWithSize(1))); - assertThat(fields, hasItem(reference)); + assertThat(fields).hasSize(1); + assertThat(fields).contains(reference); } @Test @@ -90,7 +85,7 @@ public void fieldsFactoryMethod() { Fields fields = fields("a", "b").and("c").and("d", "e"); - assertThat(fields, is(Matchers. iterableWithSize(4))); + assertThat(fields).hasSize(4); verify(fields.getField("a"), "a", null); verify(fields.getField("b"), "b", null); @@ -100,35 +95,39 @@ public void fieldsFactoryMethod() { @Test public void rejectsAmbiguousFieldNames() { - - exception.expect(IllegalArgumentException.class); - - fields("b", "a.b"); + assertThatIllegalArgumentException().isThrownBy(() -> fields("b", "a.b")); } @Test // DATAMONGO-774 public void stripsLeadingDollarsFromName() { - assertThat(Fields.field("$name").getName(), is("name")); - assertThat(Fields.field("$$$$name").getName(), is("name")); + assertThat(Fields.field("$name").getName()).isEqualTo("name"); + assertThat(Fields.field("$$$$name").getName()).isEqualTo("name"); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-774 + @Test // DATAMONGO-774 public void rejectsNameConsistingOfDollarOnly() { - Fields.field("$"); + assertThatIllegalArgumentException().isThrownBy(() -> Fields.field("$")); } @Test // DATAMONGO-774 public void stripsLeadingDollarsFromTarget() { - assertThat(Fields.field("$target").getTarget(), is("target")); - assertThat(Fields.field("$$$$target").getTarget(), is("target")); + assertThat(Fields.field("$target").getTarget()).isEqualTo("target"); + assertThat(Fields.field("$$$$target").getTarget()).isEqualTo("target"); + } + + @Test // GH-4123 + public void keepsRawMappingToDbRefId() { + + assertThat(Fields.field("$id").getName()).isEqualTo("id"); + assertThat(Fields.field("person.$id").getTarget()).isEqualTo("person.$id"); } private static void verify(Field field, String name, String target) { - assertThat(field, is(notNullValue())); - assertThat(field.getName(), is(name)); - assertThat(field.getTarget(), is(target != null ? target : name)); + assertThat(field).isNotNull(); + assertThat(field.getName()).isEqualTo(name); + assertThat(field.getTarget()).isEqualTo(target != null ? target : name); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java index 796c6c2028..a5df9647a1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,112 +15,131 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.filter; import java.util.Arrays; import java.util.List; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.mongodb.core.DocumentTestUtils; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class FilterExpressionUnitTests { - - @Mock MongoDbFactory mongoDbFactory; +@ExtendWith(MockitoExtension.class) +class FilterExpressionUnitTests { private AggregationOperationContext aggregationContext; private MongoMappingContext mappingContext; - @Before - public void setUp() { + @BeforeEach + void setUp() { mappingContext = new MongoMappingContext(); aggregationContext = new TypeBasedAggregationOperationContext(Sales.class, mappingContext, - new QueryMapper(new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), mappingContext))); + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); } @Test // DATAMONGO-1491 - public void shouldConstructFilterExpressionCorrectly() { + void shouldConstructFilterExpressionCorrectly() { TypedAggregation agg = Aggregation.newAggregation(Sales.class, Aggregation.project() - .and(filter("items").as("item").by(AggregationFunctionExpressions.GTE.of(Fields.field("item.price"), 100))) + .and(filter("items").as("item").by(ComparisonOperators.valueOf("item.price").greaterThanEqualToValue(100))) .as("items")); - Document dbo = agg.toDocument("sales", aggregationContext); - - List pipeline = DocumentTestUtils.getAsDBList(dbo, "pipeline"); - Document $project = DocumentTestUtils.getAsDocument((Document) pipeline.get(0), "$project"); - Document items = DocumentTestUtils.getAsDocument($project, "items"); - Document $filter = DocumentTestUtils.getAsDocument(items, "$filter"); - + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", aggregationContext)); Document expected = Document.parse("{" + // "input: \"$items\"," + // "as: \"item\"," + // "cond: { $gte: [ \"$$item.price\", 100 ] }" + // "}"); - assertThat($filter, is(new Document(expected))); + assertThat($filter).isEqualTo(new Document(expected)); } @Test // DATAMONGO-1491 - public void shouldConstructFilterExpressionCorrectlyWhenUsingFilterOnProjectionBuilder() { + void shouldConstructFilterExpressionCorrectlyWhenUsingFilterOnProjectionBuilder() { TypedAggregation agg = Aggregation.newAggregation(Sales.class, Aggregation.project().and("items") - .filter("item", AggregationFunctionExpressions.GTE.of(Fields.field("item.price"), 100)).as("items")); - - Document dbo = agg.toDocument("sales", aggregationContext); - - List pipeline = DocumentTestUtils.getAsDBList(dbo, "pipeline"); - Document $project = DocumentTestUtils.getAsDocument((Document) pipeline.get(0), "$project"); - Document items = DocumentTestUtils.getAsDocument($project, "items"); - Document $filter = DocumentTestUtils.getAsDocument(items, "$filter"); + .filter("item", ComparisonOperators.valueOf("item.price").greaterThanEqualToValue(100)).as("items")); + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", aggregationContext)); Document expected = Document.parse("{" + // "input: \"$items\"," + // "as: \"item\"," + // "cond: { $gte: [ \"$$item.price\", 100 ] }" + // "}"); - assertThat($filter, is(expected)); + assertThat($filter).isEqualTo(expected); } @Test // DATAMONGO-1491 - public void shouldConstructFilterExpressionCorrectlyWhenInputMapToArray() { + void shouldConstructFilterExpressionCorrectlyWhenInputMapToArray() { TypedAggregation agg = Aggregation.newAggregation(Sales.class, Aggregation.project().and(filter(Arrays. asList(1, "a", 2, null, 3.1D, 4, "5")).as("num") - .by(AggregationFunctionExpressions.GTE.of(Fields.field("num"), 3))).as("items")); - - Document dbo = agg.toDocument("sales", aggregationContext); - - List pipeline = DocumentTestUtils.getAsDBList(dbo, "pipeline"); - Document $project = DocumentTestUtils.getAsDocument((Document) pipeline.get(0), "$project"); - Document items = DocumentTestUtils.getAsDocument($project, "items"); - Document $filter = DocumentTestUtils.getAsDocument(items, "$filter"); + .by(ComparisonOperators.valueOf("num").greaterThanEqualToValue(3))).as("items")); + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", aggregationContext)); Document expected = Document.parse("{" + // "input: [ 1, \"a\", 2, null, 3.1, 4, \"5\" ]," + // "as: \"num\"," + // "cond: { $gte: [ \"$$num\", 3 ] }" + // "}"); - assertThat($filter, is(expected)); + assertThat($filter).isEqualTo(expected); + } + + @Test // DATAMONGO-2320 + void shouldConstructFilterExpressionCorrectlyWhenConditionContainsFieldReference() { + + Aggregation agg = Aggregation.newAggregation(Aggregation.project().and((ctx) -> new Document()).as("field-1") + .and(filter("items").as("item").by(ComparisonOperators.valueOf("item.price").greaterThan("field-1"))) + .as("items")); + + Document $filter = extractFilterOperatorFromDocument(agg.toDocument("sales", Aggregation.DEFAULT_CONTEXT)); + Document expected = Document.parse("{" + // + "input: \"$items\"," + // + "as: \"item\"," + // + "cond: { $gt: [ \"$$item.price\", \"$field-1\" ] }" + // + "}"); + + assertThat($filter).isEqualTo(new Document(expected)); + } + + @Test // GH-4394 + void filterShouldAcceptExpression() { + + Document $filter = ArrayOperators.arrayOf(ObjectOperators.valueOf("data.metadata").toArray()).filter().as("item") + .by(ComparisonOperators.valueOf("item.price").greaterThan("field-1")).toDocument(Aggregation.DEFAULT_CONTEXT); + + Document expected = Document.parse(""" + { $filter : { + input: { $objectToArray: "$data.metadata" }, + as: "item", + cond: { $gt: [ "$$item.price", "$field-1" ] } + }} + """); + + assertThat($filter).isEqualTo(expected); + } + + private Document extractFilterOperatorFromDocument(Document source) { + + List pipeline = DocumentTestUtils.getAsDBList(source, "pipeline"); + Document $project = DocumentTestUtils.getAsDocument((Document) pipeline.get(0), "$project"); + Document items = DocumentTestUtils.getAsDocument($project, "items"); + return DocumentTestUtils.getAsDocument(items, "$filter"); } static class Sales { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java index 5b123c03d7..9496a51c03 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GeoNearOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,32 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.geo.Distance; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; /** * Unit tests for {@link GeoNearOperation}. * * @author Oliver Gierke * @author Thomas Darimont + * @author Christoph Strobl */ public class GeoNearOperationUnitTests { @@ -41,6 +54,214 @@ public void rendersNearQueryAsAggregationOperation() { Document nearClause = DocumentTestUtils.getAsDocument(document, "$geoNear"); Document expected = new Document(query.toDocument()).append("distanceField", "distance"); - assertThat(nearClause, is(expected)); + assertThat(nearClause).isEqualTo(expected); + } + + @Test // DATAMONGO-2050 + public void rendersNearQueryWithKeyCorrectly() { + + NearQuery query = NearQuery.near(10.0, 10.0); + GeoNearOperation operation = new GeoNearOperation(query, "distance").useIndex("geo-index-1"); + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(DocumentTestUtils.getAsDocument(document, "$geoNear")).containsEntry("key", "geo-index-1"); + } + + @Test // DATAMONGO-2264 + public void rendersMaxDistanceCorrectly() { + + NearQuery query = NearQuery.near(10.0, 20.0).maxDistance(new Distance(30.0)); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).maxDistance(30.0).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersMinDistanceCorrectly() { + + NearQuery query = NearQuery.near(10.0, 20.0).minDistance(new Distance(30.0)); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).minDistance(30.0).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersSphericalCorrectly() { + + NearQuery query = NearQuery.near(10.0, 20.0).spherical(true); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).spherical(true).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersDistanceMultiplier() { + + NearQuery query = NearQuery.near(10.0, 20.0).inKilometers(); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).spherical(true).distanceMultiplier(6378.137).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersIndexKey() { + + NearQuery query = NearQuery.near(10.0, 20.0); + + assertThat( + new GeoNearOperation(query, "distance").useIndex("index-1").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).key("index-1").doc()); + } + + @Test // DATAMONGO-2264 + public void rendersQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).query(Query.query(Criteria.where("city").is("Austin"))); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).query(new Document("city", "Austin")).doc()); + } + + @Test // DATAMONGO-2264 + public void rendersMappedQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).query(Query.query(Criteria.where("city").is("Austin"))); + + assertThat( + new GeoNearOperation(query, "distance").toPipelineStages(typedAggregationOperationContext(GeoDocument.class))) + .containsExactly($geoNear().near(10.0, 20.0).query(new Document("ci-ty", "Austin")).doc()); + } + + @Test // DATAMONGO-2264 + public void appliesSkipFromNearQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).skip(10L); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).doc(), new Document("$skip", 10L)); } + + @Test // DATAMONGO-2264 + public void appliesLimitFromNearQuery() { + + NearQuery query = NearQuery.near(10.0, 20.0).limit(10L); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).doc(), new Document("$limit", 10L)); + } + + @Test // DATAMONGO-2264 + public void appliesSkipAndLimitInOrder() { + + NearQuery query = NearQuery.near(10.0, 20.0).limit(10L).skip(3L); + + assertThat(new GeoNearOperation(query, "distance").toPipelineStages(Aggregation.DEFAULT_CONTEXT)) + .containsExactly($geoNear().near(10.0, 20.0).doc(), new Document("$skip", 3L), new Document("$limit", 10L)); + } + + private TypeBasedAggregationOperationContext typedAggregationOperationContext(Class type) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + return new TypeBasedAggregationOperationContext(type, mappingContext, new QueryMapper(converter)); + } + + GeoNearDocumentBuilder $geoNear() { + return new GeoNearDocumentBuilder(); + } + + static class GeoDocument { + + @Id String id; + @Field("ci-ty") String city; + } + + static class GeoNearDocumentBuilder { + + Document target = new Document("distanceField", "distance").append("distanceMultiplier", 1.0D).append("spherical", + false); + + GeoNearDocumentBuilder maxDistance(@Nullable Number value) { + + if (value != null) { + target.put("maxDistance", value); + } else { + target.remove("maxDistance"); + } + return this; + } + + GeoNearDocumentBuilder minDistance(@Nullable Number value) { + + if (value != null) { + target.put("minDistance", value); + } else { + target.remove("minDistance"); + } + return this; + } + + GeoNearDocumentBuilder near(Number... coordinates) { + + target.put("near", Arrays.asList(coordinates)); + return this; + } + + GeoNearDocumentBuilder spherical(@Nullable Boolean value) { + + if (value != null) { + target.put("spherical", value); + } else { + target.remove("spherical"); + } + return this; + } + + GeoNearDocumentBuilder distanceField(@Nullable String value) { + + if (value != null) { + target.put("distanceField", value); + } else { + target.remove("distanceField"); + } + return this; + } + + GeoNearDocumentBuilder distanceMultiplier(Number value) { + + if (value != null) { + target.put("distanceMultiplier", value); + } else { + target.remove("distanceMultiplier"); + } + return this; + } + + GeoNearDocumentBuilder key(String value) { + + if (value != null) { + target.put("key", value); + } else { + target.remove("key"); + } + return this; + } + + GeoNearDocumentBuilder query(Document value) { + + if (value != null) { + target.put("query", value); + } else { + target.remove("query"); + } + return this; + } + + Document doc() { + return new Document("$geoNear", new Document(target)); + } + + } + + // TODO: we need to test this to the full extend } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java index 70405a738e..b752fab793 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GraphLookupOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,15 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.query.Criteria; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.util.JSON; - /** * Unit tests for {@link GraphLookupOperation}. * @@ -36,9 +32,9 @@ */ public class GraphLookupOperationUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1551 + @Test // DATAMONGO-1551 public void rejectsNullFromCollection() { - GraphLookupOperation.builder().from(null); + assertThatIllegalArgumentException().isThrownBy(() -> GraphLookupOperation.builder().from(null)); } @Test // DATAMONGO-1551 @@ -54,8 +50,7 @@ public void shouldRenderCorrectly() { .as("reportingHierarchy"); Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(document, - isBsonObject().containing("$graphLookup.depthField", "depth").containing("$graphLookup.maxDepth", 42L)); + assertThat(document).containsEntry("$graphLookup.depthField", "depth").containsEntry("$graphLookup.maxDepth", 42L); } @Test // DATAMONGO-1551 @@ -70,8 +65,7 @@ public void shouldRenderCriteriaCorrectly() { .as("reportingHierarchy"); Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(document, - isBsonObject().containing("$graphLookup.restrictSearchWithMatch", new Document("key", "value"))); + assertThat(document).containsEntry("$graphLookup.restrictSearchWithMatch", new Document("key", "value")); } @Test // DATAMONGO-1551 @@ -86,9 +80,9 @@ public void shouldRenderArrayOfStartsWithCorrectly() { Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(document, - is(Document.parse("{ $graphLookup : { from: \"employees\", startWith: [\"$reportsTo\", \"$boss\"], " - + "connectFromField: \"reportsTo\", connectToField: \"name\", as: \"reportingHierarchy\" } }"))); + assertThat(document) + .isEqualTo(Document.parse("{ $graphLookup : { from: \"employees\", startWith: [\"$reportsTo\", \"$boss\"], " + + "connectFromField: \"reportsTo\", connectToField: \"name\", as: \"reportingHierarchy\" } }")); } @Test // DATAMONGO-1551 @@ -103,20 +97,18 @@ public void shouldRenderMixedArrayOfStartsWithCorrectly() { Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(document, - is(Document.parse("{ $graphLookup : { from: \"employees\", startWith: [\"$reportsTo\", { $literal: \"$boss\"}], " - + "connectFromField: \"reportsTo\", connectToField: \"name\", as: \"reportingHierarchy\" } }"))); + assertThat(document).containsEntry("$graphLookup.startWith", + Arrays.asList("$reportsTo", new Document("$literal", "$boss"))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1551 + @Test // DATAMONGO-1551 public void shouldRejectUnknownTypeInMixedArrayOfStartsWithCorrectly() { - - GraphLookupOperation graphLookupOperation = GraphLookupOperation.builder() // + assertThatIllegalArgumentException().isThrownBy(() -> GraphLookupOperation.builder() // .from("employees") // .startWith("reportsTo", new Person()) // .connectFrom("reportsTo") // .connectTo("name") // - .as("reportingHierarchy"); + .as("reportingHierarchy")); } @Test // DATAMONGO-1551 @@ -131,7 +123,39 @@ public void shouldRenderStartWithAggregationExpressions() { Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(document, is(Document.parse("{ $graphLookup : { from: \"employees\", startWith: { $literal: \"hello\"}, " - + "connectFromField: \"reportsTo\", connectToField: \"name\", as: \"reportingHierarchy\" } }"))); + assertThat(document).containsEntry("$graphLookup.startWith", new Document("$literal", "hello")); + } + + @Test // DATAMONGO-2096 + public void connectFromShouldUseTargetFieldInsteadOfAlias() { + + AggregationOperation graphLookupOperation = Aggregation.graphLookup("user").startWith("contacts.userId") + .connectFrom("contacts.userId").connectTo("_id").depthField("numConnections").as("connections"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.startWith", "$contacts.userId"); + } + + @Test // DATAMONGO-2096 + public void connectToShouldUseTargetFieldInsteadOfAlias() { + + AggregationOperation graphLookupOperation = Aggregation.graphLookup("user").startWith("contacts.userId") + .connectFrom("userId").connectTo("connectto.field").depthField("numConnections").as("connections"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.connectToField", "connectto.field"); + } + + @Test // DATAMONGO-2096 + public void depthFieldShouldUseTargetFieldInsteadOfAlias() { + + AggregationOperation graphLookupOperation = Aggregation.graphLookup("user").startWith("contacts.userId") + .connectFrom("contacts.userId").connectTo("_id").depthField("foo.bar").as("connections"); + + Document document = graphLookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).containsEntry("$graphLookup.depthField", "foo.bar"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java index 459518f5af..dc6219c7f1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/GroupOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,18 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.aggregation.AggregationFunctionExpressions.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; import java.util.Arrays; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile; +import org.springframework.data.mongodb.core.aggregation.SelectionOperators.Bottom; import org.springframework.data.mongodb.core.query.Criteria; /** @@ -33,97 +35,97 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Gustavo de Geus + * @author Julia Lee */ -public class GroupOperationUnitTests { +class GroupOperationUnitTests { - @Test(expected = IllegalArgumentException.class) - public void rejectsNullFields() { - new GroupOperation((Fields) null); + @Test + void rejectsNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new GroupOperation((Fields) null)); } @Test // DATAMONGO-759 - public void groupOperationWithNoGroupIdFieldsShouldGenerateNullAsGroupId() { + void groupOperationWithNoGroupIdFieldsShouldGenerateNullAsGroupId() { GroupOperation operation = new GroupOperation(Fields.from()); ExposedFields fields = operation.getFields(); Document groupClause = extractDocumentFromGroupOperation(operation); - assertThat(fields.exposesSingleFieldOnly(), is(true)); - assertThat(fields.exposesNoFields(), is(false)); - assertThat(groupClause.get(UNDERSCORE_ID), is(nullValue())); + assertThat(fields.exposesSingleFieldOnly()).isTrue(); + assertThat(fields.exposesNoFields()).isFalse(); + assertThat(groupClause.get(UNDERSCORE_ID)).isNull(); } @Test // DATAMONGO-759 - public void groupOperationWithNoGroupIdFieldsButAdditionalFieldsShouldGenerateNullAsGroupId() { + void groupOperationWithNoGroupIdFieldsButAdditionalFieldsShouldGenerateNullAsGroupId() { GroupOperation operation = new GroupOperation(Fields.from()).count().as("cnt").last("foo").as("foo"); ExposedFields fields = operation.getFields(); Document groupClause = extractDocumentFromGroupOperation(operation); - assertThat(fields.exposesSingleFieldOnly(), is(false)); - assertThat(fields.exposesNoFields(), is(false)); - assertThat(groupClause.get(UNDERSCORE_ID), is(nullValue())); - assertThat((Document) groupClause.get("cnt"), is(new Document("$sum", 1))); - assertThat((Document) groupClause.get("foo"), is(new Document("$last", "$foo"))); + assertThat(fields.exposesSingleFieldOnly()).isFalse(); + assertThat(fields.exposesNoFields()).isFalse(); + assertThat(groupClause.get(UNDERSCORE_ID)).isNull(); + assertThat((Document) groupClause.get("cnt")).isEqualTo(new Document("$sum", 1)); + assertThat((Document) groupClause.get("foo")).isEqualTo(new Document("$last", "$foo")); } @Test - public void createsGroupOperationWithSingleField() { + void createsGroupOperationWithSingleField() { GroupOperation operation = new GroupOperation(fields("a")); Document groupClause = extractDocumentFromGroupOperation(operation); - assertThat(groupClause.get(UNDERSCORE_ID), is((Object) "$a")); + assertThat(groupClause).containsEntry(UNDERSCORE_ID, "$a"); } @Test - public void createsGroupOperationWithMultipleFields() { + void createsGroupOperationWithMultipleFields() { GroupOperation operation = new GroupOperation(fields("a").and("b", "c")); Document groupClause = extractDocumentFromGroupOperation(operation); Document idClause = DocumentTestUtils.getAsDocument(groupClause, UNDERSCORE_ID); - assertThat(idClause.get("a"), is((Object) "$a")); - assertThat(idClause.get("b"), is((Object) "$c")); + assertThat(idClause).containsEntry("a", "$a").containsEntry("b", "$c"); } @Test - public void groupFactoryMethodWithMultipleFieldsAndSumOperation() { + void groupFactoryMethodWithMultipleFieldsAndSumOperation() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .sum("e").as("e"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document eOp = DocumentTestUtils.getAsDocument(groupClause, "e"); - assertThat(eOp, is((Document) new Document("$sum", "$e"))); + assertThat(eOp).isEqualTo(new Document("$sum", "$e")); } @Test - public void groupFactoryMethodWithMultipleFieldsAndSumOperationWithAlias() { + void groupFactoryMethodWithMultipleFieldsAndSumOperationWithAlias() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .sum("e").as("ee"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document eOp = DocumentTestUtils.getAsDocument(groupClause, "ee"); - assertThat(eOp, is((Document) new Document("$sum", "$e"))); + assertThat(eOp).isEqualTo(new Document("$sum", "$e")); } @Test - public void groupFactoryMethodWithMultipleFieldsAndCountOperationWithout() { + void groupFactoryMethodWithMultipleFieldsAndCountOperationWithout() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .count().as("count"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document eOp = DocumentTestUtils.getAsDocument(groupClause, "count"); - assertThat(eOp, is((Document) new Document("$sum", 1))); + assertThat(eOp).isEqualTo(new Document("$sum", 1)); } @Test - public void groupFactoryMethodWithMultipleFieldsAndMultipleAggregateOperationsWithAlias() { + void groupFactoryMethodWithMultipleFieldsAndMultipleAggregateOperationsWithAlias() { GroupOperation groupOperation = Aggregation.group(fields("a", "b").and("c")) // .sum("e").as("sum") // @@ -131,94 +133,94 @@ public void groupFactoryMethodWithMultipleFieldsAndMultipleAggregateOperationsWi Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document sum = DocumentTestUtils.getAsDocument(groupClause, "sum"); - assertThat(sum, is((Document) new Document("$sum", "$e"))); + assertThat(sum).isEqualTo(new Document("$sum", "$e")); Document min = DocumentTestUtils.getAsDocument(groupClause, "min"); - assertThat(min, is((Document) new Document("$min", "$e"))); + assertThat(min).isEqualTo(new Document("$min", "$e")); } @Test - public void groupOperationPushWithValue() { + void groupOperationPushWithValue() { GroupOperation groupOperation = Aggregation.group("a", "b").push(1).as("x"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((Document) new Document("$push", 1))); + assertThat(push).isEqualTo(new Document("$push", 1)); } @Test - public void groupOperationPushWithReference() { + void groupOperationPushWithReference() { GroupOperation groupOperation = Aggregation.group("a", "b").push("ref").as("x"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((Document) new Document("$push", "$ref"))); + assertThat(push).isEqualTo(new Document("$push", "$ref")); } @Test - public void groupOperationAddToSetWithReference() { + void groupOperationAddToSetWithReference() { GroupOperation groupOperation = Aggregation.group("a", "b").addToSet("ref").as("x"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((Document) new Document("$addToSet", "$ref"))); + assertThat(push).isEqualTo(new Document("$addToSet", "$ref")); } @Test - public void groupOperationAddToSetWithValue() { + void groupOperationAddToSetWithValue() { GroupOperation groupOperation = Aggregation.group("a", "b").addToSet(42).as("x"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document push = DocumentTestUtils.getAsDocument(groupClause, "x"); - assertThat(push, is((Document) new Document("$addToSet", 42))); + assertThat(push).isEqualTo(new Document("$addToSet", 42)); } @Test // DATAMONGO-979 - public void shouldRenderSizeExpressionInGroup() { + void shouldRenderSizeExpressionInGroup() { GroupOperation groupOperation = Aggregation // .group("username") // - .first(SIZE.of(field("tags"))) // + .first(ArrayOperators.arrayOf("tags").length()) // .as("tags_count"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document tagsCount = DocumentTestUtils.getAsDocument(groupClause, "tags_count"); - assertThat(tagsCount.get("$first"), is((Object) new Document("$size", Arrays.asList("$tags")))); + assertThat(tagsCount).containsEntry("$first", new Document("$size", "$tags")); } @Test // DATAMONGO-1327 - public void groupOperationStdDevSampWithValue() { + void groupOperationStdDevSampWithValue() { GroupOperation groupOperation = Aggregation.group("a", "b").stdDevSamp("field").as("fieldStdDevSamp"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document push = DocumentTestUtils.getAsDocument(groupClause, "fieldStdDevSamp"); - assertThat(push, is(new Document("$stdDevSamp", "$field"))); + assertThat(push).isEqualTo(new Document("$stdDevSamp", "$field")); } @Test // DATAMONGO-1327 - public void groupOperationStdDevPopWithValue() { + void groupOperationStdDevPopWithValue() { GroupOperation groupOperation = Aggregation.group("a", "b").stdDevPop("field").as("fieldStdDevPop"); Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document push = DocumentTestUtils.getAsDocument(groupClause, "fieldStdDevPop"); - assertThat(push, is(new Document("$stdDevPop", "$field"))); + assertThat(push).isEqualTo(new Document("$stdDevPop", "$field")); } @Test // DATAMONGO-1784 - public void shouldRenderSumWithExpressionInGroup() { + void shouldRenderSumWithExpressionInGroup() { GroupOperation groupOperation = Aggregation // .group("username") // @@ -231,13 +233,51 @@ public void shouldRenderSumWithExpressionInGroup() { Document groupClause = extractDocumentFromGroupOperation(groupOperation); Document foobar = DocumentTestUtils.getAsDocument(groupClause, "foobar"); - assertThat(foobar.get("$sum"), is(new Document("$cond", - new Document("if", new Document("$eq", Arrays.asList("$foo", "bar"))).append("then", 1).append("else", -1)))); + assertThat(foobar).containsEntry("$sum", new Document("$cond", + new Document("if", new Document("$eq", Arrays.asList("$foo", "bar"))).append("then", 1).append("else", -1))); + } + + @Test // DATAMONGO-1784 + void sumWithNullExpressionShouldThrowException() { + assertThatIllegalArgumentException() + .isThrownBy(() -> Aggregation.group("username").sum((AggregationExpression) null)); + } + + @Test // DATAMONGO-2651 + void accumulatorShouldBeAllowedOnGroupOperation() { + + GroupOperation groupOperation = Aggregation.group("id") + .accumulate( + ScriptOperators.accumulatorBuilder().init("inti").accumulate("acc").merge("merge").finalize("finalize")) + .as("accumulated-value"); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + Document accumulatedValue = DocumentTestUtils.getAsDocument(groupClause, "accumulated-value"); + + assertThat(accumulatedValue).containsKey("$accumulator"); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1784 - public void sumWithNullExpressionShouldThrowException() { - Aggregation.group("username").sum((AggregationExpression) null); + @Test // GH-4139 + void groupOperationAllowsToAddFieldsComputedViaExpression() { + + GroupOperation groupOperation = Aggregation.group("id").and("playerId", + Bottom.bottom().output("playerId", "score").sortBy(Sort.by(Direction.DESC, "score"))); + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + + assertThat(groupClause).containsEntry("playerId", + Document.parse("{ $bottom : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}")); + } + + @Test // GH-4473 + void groupOperationAllowsAddingFieldWithPercentileAggregationExpression() { + + GroupOperation groupOperation = Aggregation.group("id").and("scorePercentile", + Percentile.percentileOf("score").percentages(0.2)); + + Document groupClause = extractDocumentFromGroupOperation(groupOperation); + + assertThat(groupClause).containsEntry("scorePercentile", + Document.parse("{ $percentile : { input: \"$score\", method: \"approximate\", p: [0.2]}}")); } private Document extractDocumentFromGroupOperation(GroupOperation groupOperation) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java index a23b70c154..fb70bab918 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Invoice.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java index 024722e746..e668dc3ed5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LikeStats.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java index 1da640ddb7..32ac758a50 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LineItem.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,11 @@ */ public class LineItem { - final String id; + String id; - final String caption; + String caption; - final double price; + double price; int quantity = 1; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java index b1735218cc..58bae3f43d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/LookupOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,17 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.List; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.query.Criteria; /** * Unit tests for {@link LookupOperation}. @@ -32,24 +36,28 @@ */ public class LookupOperationUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void rejectsNullForFrom() { - new LookupOperation(null, Fields.field("localField"), Fields.field("foreignField"), Fields.field("as")); + assertThatIllegalArgumentException().isThrownBy( + () -> new LookupOperation(null, Fields.field("localField"), Fields.field("foreignField"), Fields.field("as"))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void rejectsNullLocalFieldField() { - new LookupOperation(Fields.field("from"), null, Fields.field("foreignField"), Fields.field("as")); + assertThatIllegalArgumentException().isThrownBy( + () -> new LookupOperation(Fields.field("from"), null, Fields.field("foreignField"), Fields.field("as"))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void rejectsNullForeignField() { - new LookupOperation(Fields.field("from"), Fields.field("localField"), null, Fields.field("as")); + assertThatIllegalArgumentException().isThrownBy( + () -> new LookupOperation(Fields.field("from"), Fields.field("localField"), null, Fields.field("as"))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void rejectsNullForAs() { - new LookupOperation(Fields.field("from"), Fields.field("localField"), Fields.field("foreignField"), null); + assertThatIllegalArgumentException().isThrownBy(() -> new LookupOperation(Fields.field("from"), + Fields.field("localField"), Fields.field("foreignField"), null)); } @Test // DATAMONGO-1326 @@ -59,11 +67,10 @@ public void lookupOperationWithValues() { Document lookupClause = extractDocumentFromLookupOperation(lookupOperation); - assertThat(lookupClause, - isBsonObject().containing("from", "a") // - .containing("localField", "b") // - .containing("foreignField", "c") // - .containing("as", "d")); + org.assertj.core.api.Assertions.assertThat(lookupClause).containsEntry("from", "a") // + .containsEntry("localField", "b") // + .containsEntry("foreignField", "c") // + .containsEntry("as", "d"); } @Test // DATAMONGO-1326 @@ -71,9 +78,9 @@ public void lookupOperationExposesAsField() { LookupOperation lookupOperation = Aggregation.lookup("a", "b", "c", "d"); - assertThat(lookupOperation.getFields().exposesNoFields(), is(false)); - assertThat(lookupOperation.getFields().exposesSingleFieldOnly(), is(true)); - assertThat(lookupOperation.getFields().getField("d"), notNullValue()); + assertThat(lookupOperation.getFields().exposesNoFields()).isFalse(); + assertThat(lookupOperation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(lookupOperation.getFields().getField("d")).isNotNull(); } private Document extractDocumentFromLookupOperation(LookupOperation lookupOperation) { @@ -83,24 +90,26 @@ private Document extractDocumentFromLookupOperation(LookupOperation lookupOperat return lookupClause; } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void builderRejectsNullFromField() { - LookupOperation.newLookup().from(null); + assertThatIllegalArgumentException().isThrownBy(() -> LookupOperation.newLookup().from(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void builderRejectsNullLocalField() { - LookupOperation.newLookup().from("a").localField(null); + assertThatIllegalArgumentException().isThrownBy(() -> LookupOperation.newLookup().from("a").localField(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void builderRejectsNullForeignField() { - LookupOperation.newLookup().from("a").localField("b").foreignField(null); + assertThatIllegalArgumentException() + .isThrownBy(() -> LookupOperation.newLookup().from("a").localField("b").foreignField(null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void builderRejectsNullAsField() { - LookupOperation.newLookup().from("a").localField("b").foreignField("c").as(null); + assertThatIllegalArgumentException() + .isThrownBy(() -> LookupOperation.newLookup().from("a").localField("b").foreignField("c").as(null)); } @Test // DATAMONGO-1326 @@ -110,11 +119,10 @@ public void lookupBuilderBuildsCorrectClause() { Document lookupClause = extractDocumentFromLookupOperation(lookupOperation); - assertThat(lookupClause, - isBsonObject().containing("from", "a") // - .containing("localField", "b") // - .containing("foreignField", "c") // - .containing("as", "d")); + org.assertj.core.api.Assertions.assertThat(lookupClause).containsEntry("from", "a") // + .containsEntry("localField", "b") // + .containsEntry("foreignField", "c") // + .containsEntry("as", "d"); } @Test // DATAMONGO-1326 @@ -122,8 +130,90 @@ public void lookupBuilderExposesFields() { LookupOperation lookupOperation = LookupOperation.newLookup().from("a").localField("b").foreignField("c").as("d"); - assertThat(lookupOperation.getFields().exposesNoFields(), is(false)); - assertThat(lookupOperation.getFields().exposesSingleFieldOnly(), is(true)); - assertThat(lookupOperation.getFields().getField("d"), notNullValue()); + assertThat(lookupOperation.getFields().exposesNoFields()).isFalse(); + assertThat(lookupOperation.getFields().exposesSingleFieldOnly()).isTrue(); + assertThat(lookupOperation.getFields().getField("d")).isNotNull(); + } + + @Test // GH-3322 + void buildsLookupWithLetAndPipeline() { + + LookupOperation lookupOperation = LookupOperation.newLookup().from("warehouses") + .let(newVariable("order_item").forField("item"), newVariable("order_qty").forField("ordered")) + .pipeline(match(ctx -> new Document("$expr", + new Document("$and", List.of(Document.parse("{ $eq: [ \"$stock_item\", \"$$order_item\" ] }"), + Document.parse("{ $gte: [ \"$instock\", \"$$order_qty\" ] }")))))) + .as("stockdata"); + + assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(""" + { $lookup: { + from: "warehouses", + let: { order_item: "$item", order_qty: "$ordered" }, + pipeline: [ + { $match: + { $expr: + { $and: + [ + { $eq: [ "$stock_item", "$$order_item" ] }, + { $gte: [ "$instock", "$$order_qty" ] } + ] + } + } + } + ], + as: "stockdata" + }} + """); + } + + @Test // GH-3322 + void buildsLookupWithJustPipeline() { + + LookupOperation lookupOperation = LookupOperation.newLookup().from("holidays") // + .pipeline( // + match(Criteria.where("year").is(2018)), // + project().andExclude("_id").and(ctx -> new Document("name", "$name").append("date", "$date")).as("date"), // + Aggregation.replaceRoot("date") // + ).as("holidays"); + + assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(""" + { $lookup: + { + from: "holidays", + pipeline: [ + { $match: { year: 2018 } }, + { $project: { _id: 0, date: { name: "$name", date: "$date" } } }, + { $replaceRoot: { newRoot: "$date" } } + ], + as: "holidays" + } + }} + """); + } + + @Test // GH-3322 + void buildsLookupWithLocalAndForeignFieldAsWellAsLetAndPipeline() { + + LookupOperation lookupOperation = Aggregation.lookup().from("restaurants") // + .localField("restaurant_name") + .foreignField("name") + .let(newVariable("orders_drink").forField("drink")) // + .pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages"))))) + .as("matches"); + + assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(""" + { $lookup: { + from: "restaurants", + localField: "restaurant_name", + foreignField: "name", + let: { orders_drink: "$drink" }, + pipeline: [{ + $match: { + $expr: { $in: [ "$$orders_drink", "$beverages" ] } + } + }], + as: "matches" + }} + """); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java new file mode 100644 index 0000000000..ec3decb7a8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java @@ -0,0 +1,23 @@ +package org.springframework.data.mongodb.core.aggregation; + + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link MatchOperation}. + * + * @author Divya Srivastava + */ +class MatchOperationUnitTests { + + @Test // GH-3790 + void matchShouldRenderCorrectly() { + + MatchOperation operation = Aggregation.match(ArithmeticOperators.valueOf("quiz").stdDevPop()); + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). + isEqualTo("{ $match: { \"$stdDevPop\" : \"$quiz\" } } "); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MergeOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MergeOperationUnitTests.java new file mode 100644 index 0000000000..311496ba8d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MergeOperationUnitTests.java @@ -0,0 +1,128 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.aggregation.MergeOperation.*; +import static org.springframework.data.mongodb.core.aggregation.MergeOperation.WhenDocumentsMatch.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link MergeOperation}. + * + * @author Christoph Strobl + */ +class MergeOperationUnitTests { + + private static final String OUT_COLLECTION = "target-collection"; + private static final String OUT_DB = "target-db"; + + private static final Document OUT = new Document("db", OUT_DB).append("coll", OUT_COLLECTION); + + @Test // DATAMONGO-2363 + void justCollection() { + + assertThat(mergeInto(OUT_COLLECTION).toDocument(DEFAULT_CONTEXT)).isEqualTo(new Document("$merge", OUT_COLLECTION)); + } + + @Test // DATAMONGO-2363 + void collectionInDatabase() { + + assertThat(merge().intoCollection(OUT_COLLECTION).inDatabase("target-db").build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", new Document("into", OUT))); + } + + @Test // DATAMONGO-2363 + void singleOn() { + + assertThat(merge().intoCollection(OUT_COLLECTION).on("id-field").build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", new Document("into", OUT_COLLECTION).append("on", "id-field"))); + } + + @Test // DATAMONGO-2363 + void multipleOn() { + + assertThat(merge().intoCollection(OUT_COLLECTION).on("field-1", "field-2").build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", + new Document("into", OUT_COLLECTION).append("on", Arrays.asList("field-1", "field-2")))); + } + + @Test // DATAMONGO-2363 + void collectionAndSimpleArgs() { + + assertThat(merge().intoCollection(OUT_COLLECTION).on("_id").whenMatched(replaceDocument()) + .whenNotMatched(WhenDocumentsDontMatch.insertNewDocument()).build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(new Document("$merge", new Document("into", OUT_COLLECTION).append("on", "_id") + .append("whenMatched", "replace").append("whenNotMatched", "insert"))); + } + + @Test // DATAMONGO-2363 + void whenMatchedWithAggregation() { + + String expected = "{ \"$merge\" : {\"into\": \"" + OUT_COLLECTION + "\", \"whenMatched\": [" + + "{ \"$addFields\" : {" // + + "\"thumbsup\": { \"$sum\":[ \"$thumbsup\", \"$$new.thumbsup\" ] }," + + "\"thumbsdown\": { \"$sum\": [ \"$thumbsdown\", \"$$new.thumbsdown\" ] } } } ]" // + + "} }"; + + Aggregation update = Aggregation + .newAggregation(AddFieldsOperation.addField("thumbsup").withValueOf(Sum.sumOf("thumbsup").and("$$new.thumbsup")) + .addField("thumbsdown").withValueOf(Sum.sumOf("thumbsdown").and("$$new.thumbsdown")).build()); + + assertThat( + merge().intoCollection(OUT_COLLECTION).whenDocumentsMatchApply(update).build().toDocument(DEFAULT_CONTEXT)) + .isEqualTo(Document.parse(expected)); + } + + @Test // DATAMONGO-2363 + void mapsFieldNames() { + + assertThat(merge().intoCollection("newrestaurants").on("date", "postCode").build() + .toDocument(contextFor(Restaurant.class))).isEqualTo( + Document.parse("{ \"$merge\": { \"into\": \"newrestaurants\", \"on\": [ \"date\", \"post_code\" ] } }")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference(); + } + + static class Restaurant { + + @Field("post_code") String postCode; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java index f699b352ab..52b51ad251 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MeterData.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ObjectOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ObjectOperatorsUnitTests.java new file mode 100644 index 0000000000..05d5a2d758 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ObjectOperatorsUnitTests.java @@ -0,0 +1,157 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.ObjectOperators.MergeObjects; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * Unit tests for {@link ObjectOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @currentRead Royal Assassin - Robin Hobb + */ +public class ObjectOperatorsUnitTests { + + static final String EXPRESSION_STRING = "{ \"$king-in-waiting\" : \"verity\" }"; + static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2053 + public void mergeSingleFieldReference() { + + assertThat(ObjectOperators.valueOf("kettricken").merge().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: \"$kettricken\" } ")); + } + + @Test // DATAMONGO-2053 + public void mergeSingleExpression() { + + assertThat(ObjectOperators.valueOf(EXPRESSION).merge().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: " + EXPRESSION_STRING + " } ")); + } + + @Test // DATAMONGO-2053 + public void mergeEmpty() { + + assertThat(MergeObjects.merge().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: [] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeMuliFieldReference() { + + assertThat( + ObjectOperators.valueOf("kettricken").mergeWithValuesOf("verity").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: [ \"$kettricken\", \"$verity\" ] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeMixed() { + + assertThat(ObjectOperators.valueOf("kettricken").mergeWithValuesOf(EXPRESSION).mergeWithValuesOf("verity") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + Document.parse("{ $mergeObjects: [ \"$kettricken\", " + EXPRESSION_STRING + ", \"$verity\" ] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeWithSystemVariable() { + + assertThat( + ObjectOperators.valueOf(EXPRESSION).mergeWith(SystemVariable.ROOT).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $mergeObjects: [ " + EXPRESSION_STRING + ", \"$$ROOT\" ] } ")); + } + + @Test // DATAMONGO-2053 + public void mergeMany() { + + assertThat(ObjectOperators.valueOf("kettricken").mergeWithValuesOf(EXPRESSION) + .mergeWith(new Document("fitz", "chivalry")).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse( + "{ $mergeObjects: [ \"$kettricken\", " + EXPRESSION_STRING + ", { \"fitz\" : \"chivalry\" } ] } ")); + } + + @Test // DATAMONGO-2052 + public void toArrayWithFieldReference() { + + assertThat(ObjectOperators.valueOf("verity").toArray().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $objectToArray : \"$verity\" }")); + } + + @Test // DATAMONGO-2052 + public void toArrayWithExpression() { + + assertThat(ObjectOperators.valueOf(EXPRESSION).toArray().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $objectToArray : " + EXPRESSION_STRING + " }")); + } + + @Test // GH-4139 + public void getField() { + + assertThat(ObjectOperators.valueOf("batman").getField("robin").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $getField : { field : \"robin\", input : \"$batman\" }}")); + } + + @Test // GH-4464 + public void getFieldOfCurrent() { + + assertThat(ObjectOperators.valueOf(Aggregation.CURRENT).getField("robin").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $getField : { field : \"robin\", input : \"$$CURRENT\" }}")); + } + + @Test // GH-4464 + public void getFieldOfMappedKey() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); + converter.afterPropertiesSet(); + + assertThat(ObjectOperators.getValueOf("population").toDocument(new RelaxedTypeBasedAggregationOperationContext(ZipInfo.class, converter.getMappingContext(), new QueryMapper(converter)))) + .isEqualTo(Document.parse("{ $getField : { field : \"pop\", input : \"$$CURRENT\" } }")); + } + + @Test // GH-4139 + public void setField() { + + assertThat(ObjectOperators.valueOf("batman").setField("friend").toValue("robin").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $setField : { field : \"friend\", value : \"robin\", input : \"$batman\" }}")); + } + + @Test // GH-4464 + public void setFieldOfMappedKey() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); + converter.afterPropertiesSet(); + + assertThat(ObjectOperators.setValueTo("population", "robin").toDocument(new RelaxedTypeBasedAggregationOperationContext(ZipInfo.class, converter.getMappingContext(), new QueryMapper(converter)))) + .isEqualTo(Document.parse("{ $setField : { field : \"pop\", value : \"robin\", input : \"$$CURRENT\" }}")); + } + + @Test // GH-4139 + public void removeField() { + + assertThat(ObjectOperators.valueOf("batman").removeField("joker").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $setField : { field : \"joker\", value : \"$$REMOVE\", input : \"$batman\" }}")); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java index 14421e7cd6..1174507e1c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Order.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java index 632009c003..f8812448b3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/OutOperationUnitTest.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,80 @@ */ package org.springframework.data.mongodb.core.aggregation; -import org.junit.Test; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link OutOperation}. * * @author Nikolay Bogdanov + * @author Christoph Strobl + * @author Mark Paluch */ public class OutOperationUnitTest { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1418 + @Test // DATAMONGO-1418 public void shouldCheckNPEInCreation() { - new OutOperation(null); + assertThatIllegalArgumentException().isThrownBy(() -> new OutOperation(null)); + } + + @Test // DATAMONGO-2259 + public void shouldUsePreMongoDB42FormatWhenOnlyCollectionIsPresent() { + assertThat(out("out-col").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$out", "out-col")); + } + + @Test // DATAMONGO-2259 + public void shouldUseMongoDB42ExtendedFormatWhenAdditionalParametersPresent() { + + assertThat(out("out-col").insertDocuments().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$out", new Document("to", "out-col").append("mode", "insertDocuments"))); + } + + @Test // DATAMONGO-2259 + public void shouldRenderExtendedFormatWithJsonStringKey() { + + assertThat(out("out-col").insertDocuments() // + .in("database-2") // + .uniqueKey("{ 'field-1' : 1, 'field-2' : 1}") // + .toDocument(Aggregation.DEFAULT_CONTEXT)) // + .containsEntry("$out.to", "out-col") // + .containsEntry("$out.mode", "insertDocuments") // + .containsEntry("$out.db", "database-2") // + .containsEntry("$out.uniqueKey", new Document("field-1", 1).append("field-2", 1)); } + + @Test // DATAMONGO-2259 + public void shouldRenderExtendedFormatWithSingleFieldKey() { + + assertThat(out("out-col").insertDocuments().in("database-2") // + .uniqueKey("field-1").toDocument(Aggregation.DEFAULT_CONTEXT)) // + .containsEntry("$out.to", "out-col") // + .containsEntry("$out.mode", "insertDocuments") // + .containsEntry("$out.db", "database-2") // + .containsEntry("$out.uniqueKey", new Document("field-1", 1)); + } + + @Test // DATAMONGO-2259 + public void shouldRenderExtendedFormatWithMultiFieldKey() { + + assertThat(out("out-col").insertDocuments().in("database-2") // + .uniqueKeyOf(Arrays.asList("field-1", "field-2")) // + .toDocument(Aggregation.DEFAULT_CONTEXT)).containsEntry("$out.to", "out-col") // + .containsEntry("$out.mode", "insertDocuments") // + .containsEntry("$out.db", "database-2") // + .containsEntry("$out.uniqueKey", new Document("field-1", 1).append("field-2", 1)); + } + + @Test // DATAMONGO-2259 + public void shouldErrorOnExtendedFormatWithoutMode() { + + assertThatThrownBy(() -> out("out-col").in("database-2").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isInstanceOf(IllegalStateException.class); + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java index 4d641c7316..59d374bd73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/Product.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java index 3baa774951..b904807d65 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,36 @@ */ package org.springframework.data.mongodb.core.aggregation; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import static org.springframework.data.mongodb.core.aggregation.AggregationFunctionExpressions.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; import static org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable.*; -import static org.springframework.data.mongodb.test.util.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; import java.util.Arrays; import java.util.List; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.Subtract; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.PropertyExpression; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.Variable; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Slice; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder; import org.springframework.data.mongodb.core.aggregation.StringOperators.Concat; import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** * Unit tests for {@link ProjectionOperation}. @@ -43,58 +52,59 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Divya Srivastava * @author Mark Paluch */ public class ProjectionOperationUnitTests { - static final String MOD = "$mod"; - static final String ADD = "$add"; - static final String SUBTRACT = "$subtract"; - static final String MULTIPLY = "$multiply"; - static final String DIVIDE = "$divide"; - static final String PROJECT = "$project"; + private static final String MOD = "$mod"; + private static final String ADD = "$add"; + private static final String SUBTRACT = "$subtract"; + private static final String MULTIPLY = "$multiply"; + private static final String DIVIDE = "$divide"; + private static final String PROJECT = "$project"; - @Test(expected = IllegalArgumentException.class) // DATAMONGO-586 - public void rejectsNullFields() { - new ProjectionOperation(null); + @Test // DATAMONGO-586 + void rejectsNullFields() { + assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation((Fields) null)); } @Test // DATAMONGO-586 - public void declaresBackReferenceCorrectly() { + void declaresBackReferenceCorrectly() { ProjectionOperation operation = new ProjectionOperation(); operation = operation.and("prop").previousOperation(); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - assertThat(projectClause.get("prop")).isEqualTo((Object) Fields.UNDERSCORE_ID_REF); + assertThat(projectClause.get("prop")).isEqualTo(Fields.UNDERSCORE_ID_REF); } @Test // DATAMONGO-586 - public void alwaysUsesExplicitReference() { + void alwaysUsesExplicitReference() { ProjectionOperation operation = new ProjectionOperation(Fields.fields("foo").and("bar", "foobar")); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - assertThat(projectClause.get("foo")).isEqualTo((Object) 1); - assertThat(projectClause.get("bar")).isEqualTo((Object) "$foobar"); + assertThat(projectClause.get("foo")).isEqualTo(1); + assertThat(projectClause.get("bar")).isEqualTo("$foobar"); } @Test // DATAMONGO-586 - public void aliasesSimpleFieldProjection() { + void aliasesSimpleFieldProjection() { ProjectionOperation operation = new ProjectionOperation(); Document document = operation.and("foo").as("bar").toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - assertThat(projectClause.get("bar")).isEqualTo((Object) "$foo"); + assertThat(projectClause.get("bar")).isEqualTo("$foo"); } @Test // DATAMONGO-586 - public void aliasesArithmeticProjection() { + void aliasesArithmeticProjection() { ProjectionOperation operation = new ProjectionOperation(); @@ -104,25 +114,25 @@ public void aliasesArithmeticProjection() { List addClause = (List) barClause.get("$add"); assertThat(addClause).hasSize(2); - assertThat(addClause.get(0)).isEqualTo((Object) "$foo"); - assertThat(addClause.get(1)).isEqualTo((Object) 41); + assertThat(addClause.get(0)).isEqualTo("$foo"); + assertThat(addClause.get(1)).isEqualTo(41); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationWithoutAlias() { + void arithmeticProjectionOperationWithoutAlias() { String fieldName = "a"; ProjectionOperationBuilder operation = new ProjectionOperation().and(fieldName).plus(1); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - Document oper = exctractOperation(fieldName, projectClause); + Document oper = extractOperation(fieldName, projectClause); assertThat(oper.containsKey(ADD)).isTrue(); - assertThat(oper.get(ADD)).isEqualTo((Object) Arrays. asList("$a", 1)); + assertThat(oper.get(ADD)).isEqualTo(Arrays. asList("$a", 1)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationPlus() { + void arithmeticProjectionOperationPlus() { String fieldName = "a"; String fieldAlias = "b"; @@ -130,81 +140,93 @@ public void arithmeticProjectionOperationPlus() { Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - Document oper = exctractOperation(fieldAlias, projectClause); + Document oper = extractOperation(fieldAlias, projectClause); assertThat(oper.containsKey(ADD)).isTrue(); - assertThat(oper.get(ADD)).isEqualTo((Object) Arrays. asList("$a", 1)); + assertThat(oper.get(ADD)).isEqualTo(Arrays. asList("$a", 1)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMinus() { + void arithmeticProjectionOperationMinus() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).minus(1).as(fieldAlias); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - Document oper = exctractOperation(fieldAlias, projectClause); + Document oper = extractOperation(fieldAlias, projectClause); assertThat(oper.containsKey(SUBTRACT)).isTrue(); - assertThat(oper.get(SUBTRACT)).isEqualTo((Object) Arrays. asList("$a", 1)); + assertThat(oper.get(SUBTRACT)).isEqualTo(Arrays. asList("$a", 1)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMultiply() { + void arithmeticProjectionOperationMultiply() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).multiply(1).as(fieldAlias); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - Document oper = exctractOperation(fieldAlias, projectClause); + Document oper = extractOperation(fieldAlias, projectClause); assertThat(oper.containsKey(MULTIPLY)).isTrue(); - assertThat(oper.get(MULTIPLY)).isEqualTo((Object) Arrays. asList("$a", 1)); + assertThat(oper.get(MULTIPLY)).isEqualTo(Arrays. asList("$a", 1)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationDivide() { + void arithmeticProjectionOperationDivide() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).divide(1).as(fieldAlias); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - Document oper = exctractOperation(fieldAlias, projectClause); + Document oper = extractOperation(fieldAlias, projectClause); assertThat(oper.containsKey(DIVIDE)).isTrue(); - assertThat(oper.get(DIVIDE)).isEqualTo((Object) Arrays. asList("$a", 1)); + assertThat(oper.get(DIVIDE)).isEqualTo(Arrays. asList("$a", 1)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-586 - public void arithmeticProjectionOperationDivideByZeroException() { - - new ProjectionOperation().and("a").divide(0); + @Test // DATAMONGO-586 + void arithmeticProjectionOperationDivideByZeroException() { + assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").divide(0)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMod() { + void arithmeticProjectionOperationMod() { String fieldName = "a"; String fieldAlias = "b"; ProjectionOperation operation = new ProjectionOperation().and(fieldName).mod(3).as(fieldAlias); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - Document oper = exctractOperation(fieldAlias, projectClause); + Document oper = extractOperation(fieldAlias, projectClause); assertThat(oper.containsKey(MOD)).isTrue(); - assertThat(oper.get(MOD)).isEqualTo((Object) Arrays. asList("$a", 3)); + assertThat(oper.get(MOD)).isEqualTo(Arrays. asList("$a", 3)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-758 - public void excludeShouldThrowExceptionForFieldsOtherThanUnderscoreId() { + @Test // DATAMONGO-758, DATAMONGO-1893 + void excludeShouldAllowExclusionOfFieldsOtherThanUnderscoreId/* since MongoDB 3.4 */() { - new ProjectionOperation().andExclude("foo"); + ProjectionOperation projectionOp = new ProjectionOperation().andExclude("foo"); + Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectionOp.inheritsFields()).isTrue(); + assertThat((Integer) projectClause.get("foo")).isEqualTo(0); + } + + @Test // DATAMONGO-1893 + void includeShouldNotInheritFields() { + + ProjectionOperation projectionOp = new ProjectionOperation().andInclude("foo"); + + assertThat(projectionOp.inheritsFields()).isFalse(); } @Test // DATAMONGO-758 - public void excludeShouldAllowExclusionOfUnderscoreId() { + void excludeShouldAllowExclusionOfUnderscoreId() { ProjectionOperation projectionOp = new ProjectionOperation().andExclude(Fields.UNDERSCORE_ID); Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -212,8 +234,23 @@ public void excludeShouldAllowExclusionOfUnderscoreId() { assertThat((Integer) projectClause.get(Fields.UNDERSCORE_ID)).isEqualTo(0); } + @Test // DATAMONGO-1906 + void rendersConditionalProjectionCorrectly() { + + TypedAggregation aggregation = Aggregation.newAggregation(Book.class, + Aggregation.project("title") + .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle").equalToValue("")) + .then("$$REMOVE").otherwiseValueOf("author.middle")) + .as("author.middle")); + + Document document = aggregation.toDocument("books", Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse( + "{\"aggregate\" : \"books\", \"pipeline\" : [{\"$project\" : {\"title\" : 1, \"author.middle\" : {\"$cond\" : {\"if\" : {\"$eq\" : [\"$author.middle\", \"\"]}, \"then\" : \"$$REMOVE\",\"else\" : \"$author.middle\"} }}}]}")); + } + @Test // DATAMONGO-757 - public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { + void usesImplictAndExplicitFieldAliasAndIncludeExclude() { ProjectionOperation operation = Aggregation.project("foo").and("foobar").as("bar").andInclude("inc1", "inc2") .andExclude("_id"); @@ -221,21 +258,20 @@ public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); - assertThat(projectClause.get("foo")).isEqualTo((Object) 1); // implicit - assertThat(projectClause.get("bar")).isEqualTo((Object) "$foobar"); // explicit - assertThat(projectClause.get("inc1")).isEqualTo((Object) 1); // include shortcut - assertThat(projectClause.get("inc2")).isEqualTo((Object) 1); - assertThat(projectClause.get("_id")).isEqualTo((Object) 0); + assertThat(projectClause.get("foo")).isEqualTo(1); // implicit + assertThat(projectClause.get("bar")).isEqualTo("$foobar"); // explicit + assertThat(projectClause.get("inc1")).isEqualTo(1); // include shortcut + assertThat(projectClause.get("inc2")).isEqualTo(1); + assertThat(projectClause.get("_id")).isEqualTo(0); } - @Test(expected = IllegalArgumentException.class) - public void arithmeticProjectionOperationModByZeroException() { - - new ProjectionOperation().and("a").mod(0); + @Test + void arithmeticProjectionOperationModByZeroException() { + assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").mod(0)); } @Test // DATAMONGO-769 - public void allowArithmeticOperationsWithFieldReferences() { + void allowArithmeticOperationsWithFieldReferences() { ProjectionOperation operation = Aggregation.project() // .and("foo").plus("bar").as("fooPlusBar") // @@ -260,7 +296,7 @@ public void allowArithmeticOperationsWithFieldReferences() { } @Test // DATAMONGO-774 - public void projectionExpressions() { + void projectionExpressions() { ProjectionOperation operation = Aggregation.project() // .andExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // @@ -272,7 +308,7 @@ public void projectionExpressions() { } @Test // DATAMONGO-975 - public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { + void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { ProjectionOperation operation = Aggregation.project() // .and("date").extractHour().as("hour") // @@ -290,22 +326,22 @@ public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorr Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(document).isNotNull(); - Document projected = exctractOperation("$project", document); + Document projected = extractOperation("$project", document); - assertThat(projected.get("hour")).isEqualTo((Object) new Document("$hour", Arrays.asList("$date"))); - assertThat(projected.get("min")).isEqualTo((Object) new Document("$minute", Arrays.asList("$date"))); - assertThat(projected.get("second")).isEqualTo((Object) new Document("$second", Arrays.asList("$date"))); - assertThat(projected.get("millis")).isEqualTo((Object) new Document("$millisecond", Arrays.asList("$date"))); - assertThat(projected.get("year")).isEqualTo((Object) new Document("$year", Arrays.asList("$date"))); - assertThat(projected.get("month")).isEqualTo((Object) new Document("$month", Arrays.asList("$date"))); - assertThat(projected.get("week")).isEqualTo((Object) new Document("$week", Arrays.asList("$date"))); - assertThat(projected.get("dayOfYear")).isEqualTo((Object) new Document("$dayOfYear", Arrays.asList("$date"))); - assertThat(projected.get("dayOfMonth")).isEqualTo((Object) new Document("$dayOfMonth", Arrays.asList("$date"))); - assertThat(projected.get("dayOfWeek")).isEqualTo((Object) new Document("$dayOfWeek", Arrays.asList("$date"))); + assertThat(projected.get("hour")).isEqualTo(new Document("$hour", Arrays.asList("$date"))); + assertThat(projected.get("min")).isEqualTo(new Document("$minute", Arrays.asList("$date"))); + assertThat(projected.get("second")).isEqualTo(new Document("$second", Arrays.asList("$date"))); + assertThat(projected.get("millis")).isEqualTo(new Document("$millisecond", Arrays.asList("$date"))); + assertThat(projected.get("year")).isEqualTo(new Document("$year", Arrays.asList("$date"))); + assertThat(projected.get("month")).isEqualTo(new Document("$month", Arrays.asList("$date"))); + assertThat(projected.get("week")).isEqualTo(new Document("$week", Arrays.asList("$date"))); + assertThat(projected.get("dayOfYear")).isEqualTo(new Document("$dayOfYear", Arrays.asList("$date"))); + assertThat(projected.get("dayOfMonth")).isEqualTo(new Document("$dayOfMonth", Arrays.asList("$date"))); + assertThat(projected.get("dayOfWeek")).isEqualTo(new Document("$dayOfWeek", Arrays.asList("$date"))); } @Test // DATAMONGO-975 - public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { + void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project() // .andExpression("date + 86400000") // @@ -316,13 +352,13 @@ public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorre Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(document).isNotNull(); - Document projected = exctractOperation("$project", document); + Document projected = extractOperation("$project", document); assertThat(projected.get("dayOfYearPlus1Day")).isEqualTo( new Document("$dayOfYear", Arrays.asList(new Document("$add", Arrays. asList("$date", 86400000))))); } @Test // DATAMONGO-979 - public void shouldRenderSizeExpressionInProjection() { + void shouldRenderSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // @@ -332,50 +368,48 @@ public void shouldRenderSizeExpressionInProjection() { Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - Document projected = exctractOperation("$project", document); - assertThat(projected.get("tags_count")).isEqualTo((Object) new Document("$size", Arrays.asList("$tags"))); + Document projected = extractOperation("$project", document); + assertThat(projected.get("tags_count")).isEqualTo(new Document("$size", Arrays.asList("$tags"))); } @Test // DATAMONGO-979 - public void shouldRenderGenericSizeExpressionInProjection() { + void shouldRenderGenericSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // - .and(SIZE.of(field("tags"))) // + .and(ArrayOperators.arrayOf("tags").length()) // .as("tags_count"); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - Document projected = exctractOperation("$project", document); - assertThat(projected.get("tags_count")).isEqualTo((Object) new Document("$size", Arrays.asList("$tags"))); + Document projected = extractOperation("$project", document); + assertThat(projected.get("tags_count")).isEqualTo(new Document("$size", "$tags")); } @Test // DATAMONGO-1457 - public void shouldRenderSliceCorrectly() throws Exception { + void shouldRenderSliceCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project().and("field").slice(10).as("renamed"); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - Document projected = exctractOperation("$project", document); + Document projected = extractOperation("$project", document); - assertThat(projected.get("renamed")) - .isEqualTo((Object) new Document("$slice", Arrays. asList("$field", 10))); + assertThat(projected.get("renamed")).isEqualTo(new Document("$slice", Arrays. asList("$field", 10))); } @Test // DATAMONGO-1457 - public void shouldRenderSliceWithPositionCorrectly() throws Exception { + void shouldRenderSliceWithPositionCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project().and("field").slice(10, 5).as("renamed"); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - Document projected = exctractOperation("$project", document); + Document projected = extractOperation("$project", document); - assertThat(projected.get("renamed")) - .isEqualTo((Object) new Document("$slice", Arrays. asList("$field", 5, 10))); + assertThat(projected.get("renamed")).isEqualTo(new Document("$slice", Arrays. asList("$field", 5, 10))); } @Test // DATAMONGO-784 - public void shouldRenderCmpCorrectly() { + void shouldRenderCmpCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").cmp(10).as("cmp10"); @@ -384,7 +418,7 @@ public void shouldRenderCmpCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderEqCorrectly() { + void shouldRenderEqCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").eq(10).as("eq10"); @@ -393,7 +427,7 @@ public void shouldRenderEqCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderGtCorrectly() { + void shouldRenderGtCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").gt(10).as("gt10"); @@ -402,7 +436,7 @@ public void shouldRenderGtCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderGteCorrectly() { + void shouldRenderGteCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").gte(10).as("gte10"); @@ -411,7 +445,7 @@ public void shouldRenderGteCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderLtCorrectly() { + void shouldRenderLtCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").lt(10).as("lt10"); @@ -420,7 +454,7 @@ public void shouldRenderLtCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderLteCorrectly() { + void shouldRenderLteCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").lte(10).as("lte10"); @@ -429,7 +463,7 @@ public void shouldRenderLteCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderNeCorrectly() { + void shouldRenderNeCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").ne(10).as("ne10"); @@ -438,7 +472,7 @@ public void shouldRenderNeCorrectly() { } @Test // DATAMONGO-1536 - public void shouldRenderSetEquals() { + void shouldRenderSetEquals() { Document agg = project("A", "B").and("A").equalsArrays("B").as("sameElements") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -448,7 +482,7 @@ public void shouldRenderSetEquals() { } @Test // DATAMONGO-1536 - public void shouldRenderSetEqualsAggregationExpresssion() { + void shouldRenderSetEqualsAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isEqualTo("B")).as("sameElements") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -458,7 +492,7 @@ public void shouldRenderSetEqualsAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIntersection() { + void shouldRenderSetIntersection() { Document agg = project("A", "B").and("A").intersectsArrays("B").as("commonToBoth") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -468,7 +502,7 @@ public void shouldRenderSetIntersection() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIntersectionAggregationExpresssion() { + void shouldRenderSetIntersectionAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").intersects("B")).as("commonToBoth") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -478,7 +512,7 @@ public void shouldRenderSetIntersectionAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetUnion() { + void shouldRenderSetUnion() { Document agg = project("A", "B").and("A").unionArrays("B").as("allValues").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -487,7 +521,7 @@ public void shouldRenderSetUnion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetUnionAggregationExpresssion() { + void shouldRenderSetUnionAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").union("B")).as("allValues") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -497,7 +531,7 @@ public void shouldRenderSetUnionAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetDifference() { + void shouldRenderSetDifference() { Document agg = project("A", "B").and("B").differenceToArray("A").as("inBOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -507,7 +541,7 @@ public void shouldRenderSetDifference() { } @Test // DATAMONGO-1536 - public void shouldRenderSetDifferenceAggregationExpresssion() { + void shouldRenderSetDifferenceAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("B").differenceTo("A")).as("inBOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -517,7 +551,7 @@ public void shouldRenderSetDifferenceAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIsSubset() { + void shouldRenderSetIsSubset() { Document agg = project("A", "B").and("A").subsetOfArray("B").as("aIsSubsetOfB") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -527,7 +561,7 @@ public void shouldRenderSetIsSubset() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIsSubsetAggregationExpresssion() { + void shouldRenderSetIsSubsetAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isSubsetOf("B")).as("aIsSubsetOfB") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -537,7 +571,7 @@ public void shouldRenderSetIsSubsetAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAnyElementTrue() { + void shouldRenderAnyElementTrue() { Document agg = project("responses").and("responses").anyElementInArrayTrue().as("isAnyTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -547,7 +581,7 @@ public void shouldRenderAnyElementTrue() { } @Test // DATAMONGO-1536 - public void shouldRenderAnyElementTrueAggregationExpresssion() { + void shouldRenderAnyElementTrueAggregationExpresssion() { Document agg = project("responses").and(SetOperators.arrayAsSet("responses").anyElementTrue()).as("isAnyTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -557,7 +591,7 @@ public void shouldRenderAnyElementTrueAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAllElementsTrue() { + void shouldRenderAllElementsTrue() { Document agg = project("responses").and("responses").allElementsInArrayTrue().as("isAllTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -567,7 +601,7 @@ public void shouldRenderAllElementsTrue() { } @Test // DATAMONGO-1536 - public void shouldRenderAllElementsTrueAggregationExpresssion() { + void shouldRenderAllElementsTrueAggregationExpresssion() { Document agg = project("responses").and(SetOperators.arrayAsSet("responses").allElementsTrue()).as("isAllTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -577,7 +611,7 @@ public void shouldRenderAllElementsTrueAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAbs() { + void shouldRenderAbs() { Document agg = project().and("anyNumber").absoluteValue().as("absoluteValue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -586,19 +620,18 @@ public void shouldRenderAbs() { } @Test // DATAMONGO-1536 - public void shouldRenderAbsAggregationExpresssion() { + void shouldRenderAbsAggregationExpresssion() { Document agg = project() - .and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).abs()) - .as("delta").toDocument(Aggregation.DEFAULT_CONTEXT); + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).abs()).as("delta") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { delta: { $abs: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderAddAggregationExpresssion() { + void shouldRenderAddAggregationExpresssion() { Document agg = project().and(ArithmeticOperators.valueOf("price").add("fee")).as("total") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -607,7 +640,7 @@ public void shouldRenderAddAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderCeil() { + void shouldRenderCeil() { Document agg = project().and("anyNumber").ceil().as("ceilValue").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -615,21 +648,20 @@ public void shouldRenderCeil() { } @Test // DATAMONGO-1536 - public void shouldRenderCeilAggregationExpresssion() { + void shouldRenderCeilAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).ceil()) - .as("delta").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).ceil()).as("delta") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { delta: { $ceil: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderDivide() { + void shouldRenderDivide() { - Document agg = project().and("value") - .divide(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).as("result") + Document agg = project().and("value").divide(ArithmeticOperators.valueOf("start").subtract("end")).as("result") .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo( @@ -637,11 +669,10 @@ public void shouldRenderDivide() { } @Test // DATAMONGO-1536 - public void shouldRenderDivideAggregationExpresssion() { + void shouldRenderDivideAggregationExpresssion() { Document agg = project() - .and(ArithmeticOperators.valueOf("anyNumber") - .divideBy(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end")))) + .and(ArithmeticOperators.valueOf("anyNumber").divideBy(ArithmeticOperators.valueOf("start").subtract("end"))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document @@ -649,7 +680,7 @@ public void shouldRenderDivideAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderExp() { + void shouldRenderExp() { Document agg = project().and("value").exp().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -657,19 +688,18 @@ public void shouldRenderExp() { } @Test // DATAMONGO-1536 - public void shouldRenderExpAggregationExpresssion() { + void shouldRenderExpAggregationExpresssion() { Document agg = project() - .and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).exp()) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).exp()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $exp: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderFloor() { + void shouldRenderFloor() { Document agg = project().and("value").floor().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -677,18 +707,18 @@ public void shouldRenderFloor() { } @Test // DATAMONGO-1536 - public void shouldRenderFloorAggregationExpresssion() { + void shouldRenderFloorAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).floor()) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).floor()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $floor: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderLn() { + void shouldRenderLn() { Document agg = project().and("value").ln().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -696,10 +726,9 @@ public void shouldRenderLn() { } @Test // DATAMONGO-1536 - public void shouldRenderLnAggregationExpresssion() { + void shouldRenderLnAggregationExpresssion() { - Document agg = project() - .and(ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).ln()) + Document agg = project().and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).ln()) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) @@ -707,7 +736,7 @@ public void shouldRenderLnAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLog() { + void shouldRenderLog() { Document agg = project().and("value").log(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -715,18 +744,18 @@ public void shouldRenderLog() { } @Test // DATAMONGO-1536 - public void shouldRenderLogAggregationExpresssion() { + void shouldRenderLogAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).log(2)) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).log(2)).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $log: [ { $subtract: [ \"$start\", \"$end\" ] }, 2] } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderLog10() { + void shouldRenderLog10() { Document agg = project().and("value").log10().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -734,42 +763,41 @@ public void shouldRenderLog10() { } @Test // DATAMONGO-1536 - public void shouldRenderLog10AggregationExpresssion() { + void shouldRenderLog10AggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).log10()) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).log10()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $log10: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderMod() { + void shouldRenderMod() { - Document agg = project().and("value").mod(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project().and("value").mod(ArithmeticOperators.valueOf("start").subtract("end")).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo( Document.parse("{ $project: { result: { $mod: [\"$value\", { $subtract: [ \"$start\", \"$end\" ] }] } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderModAggregationExpresssion() { + void shouldRenderModAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).mod(2)) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).mod(2)).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $mod: [{ $subtract: [ \"$start\", \"$end\" ] }, 2] } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderMultiply() { + void shouldRenderMultiply() { - Document agg = project().and("value") - .multiply(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).as("result") + Document agg = project().and("value").multiply(ArithmeticOperators.valueOf("start").subtract("end")).as("result") .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document @@ -777,19 +805,17 @@ public void shouldRenderMultiply() { } @Test // DATAMONGO-1536 - public void shouldRenderMultiplyAggregationExpresssion() { + void shouldRenderMultiplyAggregationExpresssion() { - Document agg = project() - .and(ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))) - .multiplyBy(2).multiplyBy("refToAnotherNumber")) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project().and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")) + .multiplyBy(2).multiplyBy("refToAnotherNumber")).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse( "{ $project: { result: { $multiply: [{ $subtract: [ \"$start\", \"$end\" ] }, 2, \"$refToAnotherNumber\"] } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderPow() { + void shouldRenderPow() { Document agg = project().and("value").pow(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -797,18 +823,18 @@ public void shouldRenderPow() { } @Test // DATAMONGO-1536 - public void shouldRenderPowAggregationExpresssion() { + void shouldRenderPowAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).pow(2)) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).pow(2)).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $pow: [{ $subtract: [ \"$start\", \"$end\" ] }, 2] } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderSqrt() { + void shouldRenderSqrt() { Document agg = project().and("value").sqrt().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -816,40 +842,39 @@ public void shouldRenderSqrt() { } @Test // DATAMONGO-1536 - public void shouldRenderSqrtAggregationExpresssion() { + void shouldRenderSqrtAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).sqrt()) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).sqrt()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $sqrt: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderSubtract() { + void shouldRenderSubtract() { - Document agg = project().and("numericField").minus(AggregationFunctionExpressions.SIZE.of(field("someArray"))) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project().and("numericField").minus(ArrayOperators.arrayOf("someArray").length()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo( - Document.parse("{ $project: { result: { $subtract: [ \"$numericField\", { $size : [\"$someArray\"]}] } } }")); + Document.parse("{ $project: { result: { $subtract: [ \"$numericField\", { $size : \"$someArray\"}] } } }")); } @Test // DATAMONGO-1536 - public void shouldRenderSubtractAggregationExpresssion() { + void shouldRenderSubtractAggregationExpresssion() { Document agg = project() - .and(ArithmeticOperators.valueOf("numericField") - .subtract(AggregationFunctionExpressions.SIZE.of(field("someArray")))) + .and(ArithmeticOperators.valueOf("numericField").subtract(ArrayOperators.arrayOf("someArray").length())) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo( - Document.parse("{ $project: { result: { $subtract: [ \"$numericField\", { $size : [\"$someArray\"]}] } } }")); + Document.parse("{ $project: { result: { $subtract: [ \"$numericField\", { $size : \"$someArray\"}] } } }")); } @Test // DATAMONGO-1536 - public void shouldRenderTrunc() { + void shouldRenderTrunc() { Document agg = project().and("value").trunc().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -857,18 +882,18 @@ public void shouldRenderTrunc() { } @Test // DATAMONGO-1536 - public void shouldRenderTruncAggregationExpresssion() { + void shouldRenderTruncAggregationExpresssion() { - Document agg = project().and( - ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).trunc()) - .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); + Document agg = project() + .and(ArithmeticOperators.valueOf(ArithmeticOperators.valueOf("start").subtract("end")).trunc()).as("result") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg) .isEqualTo(Document.parse("{ $project: { result: { $trunc: { $subtract: [ \"$start\", \"$end\" ] } } }}")); } @Test // DATAMONGO-1536 - public void shouldRenderConcat() { + void shouldRenderConcat() { Document agg = project().and("item").concat(" - ", field("description")).as("itemDescription") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -879,7 +904,7 @@ public void shouldRenderConcat() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatAggregationExpression() { + void shouldRenderConcatAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").concat(" - ").concatValueOf("description")) .as("itemDescription").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -890,7 +915,7 @@ public void shouldRenderConcatAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSubstr() { + void shouldRenderSubstr() { Document agg = project().and("quarter").substring(0, 2).as("yearSubstring").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -898,7 +923,7 @@ public void shouldRenderSubstr() { } @Test // DATAMONGO-1536 - public void shouldRenderSubstrAggregationExpression() { + void shouldRenderSubstrAggregationExpression() { Document agg = project().and(StringOperators.valueOf("quarter").substring(0, 2)).as("yearSubstring") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -907,7 +932,7 @@ public void shouldRenderSubstrAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderToLower() { + void shouldRenderToLower() { Document agg = project().and("item").toLower().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -915,7 +940,7 @@ public void shouldRenderToLower() { } @Test // DATAMONGO-1536 - public void shouldRenderToLowerAggregationExpression() { + void shouldRenderToLowerAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").toLower()).as("item") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -924,7 +949,7 @@ public void shouldRenderToLowerAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderToUpper() { + void shouldRenderToUpper() { Document agg = project().and("item").toUpper().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -932,7 +957,7 @@ public void shouldRenderToUpper() { } @Test // DATAMONGO-1536 - public void shouldRenderToUpperAggregationExpression() { + void shouldRenderToUpperAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").toUpper()).as("item") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -941,7 +966,7 @@ public void shouldRenderToUpperAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStrCaseCmp() { + void shouldRenderStrCaseCmp() { Document agg = project().and("quarter").strCaseCmp("13q4").as("comparisonResult") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -951,7 +976,7 @@ public void shouldRenderStrCaseCmp() { } @Test // DATAMONGO-1536 - public void shouldRenderStrCaseCmpAggregationExpression() { + void shouldRenderStrCaseCmpAggregationExpression() { Document agg = project().and(StringOperators.valueOf("quarter").strCaseCmp("13q4")).as("comparisonResult") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -961,7 +986,7 @@ public void shouldRenderStrCaseCmpAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderArrayElementAt() { + void shouldRenderArrayElementAt() { Document agg = project().and("favorites").arrayElementAt(0).as("first").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -969,7 +994,7 @@ public void shouldRenderArrayElementAt() { } @Test // DATAMONGO-1536 - public void shouldRenderArrayElementAtAggregationExpression() { + void shouldRenderArrayElementAtAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").elementAt(0)).as("first") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -978,7 +1003,7 @@ public void shouldRenderArrayElementAtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatArrays() { + void shouldRenderConcatArrays() { Document agg = project().and("instock").concatArrays("ordered").as("items").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -987,7 +1012,7 @@ public void shouldRenderConcatArrays() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatArraysAggregationExpression() { + void shouldRenderConcatArraysAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").concat("ordered")).as("items") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -997,7 +1022,7 @@ public void shouldRenderConcatArraysAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderIsArray() { + void shouldRenderIsArray() { Document agg = project().and("instock").isArray().as("isAnArray").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1005,7 +1030,7 @@ public void shouldRenderIsArray() { } @Test // DATAMONGO-1536 - public void shouldRenderIsArrayAggregationExpression() { + void shouldRenderIsArrayAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").isArray()).as("isAnArray") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1014,7 +1039,7 @@ public void shouldRenderIsArrayAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSizeAggregationExpression() { + void shouldRenderSizeAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").length()).as("arraySize") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1023,7 +1048,7 @@ public void shouldRenderSizeAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSliceAggregationExpression() { + void shouldRenderSliceAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().itemCount(3)).as("threeFavorites") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1032,7 +1057,7 @@ public void shouldRenderSliceAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSliceWithPositionAggregationExpression() { + void shouldRenderSliceWithPositionAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().offset(2).itemCount(3)) .as("threeFavorites").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1040,8 +1065,19 @@ public void shouldRenderSliceWithPositionAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { threeFavorites: { $slice: [ \"$favorites\", 2, 3 ] } } }")); } + @Test // DATAMONGO-4857 + void shouldRenderSliceWithExpressions() { + + Document agg = project().and(ArrayOperators.arrayOf("favorites").slice() + .offset(Subtract.valueOf(ArrayOperators.Size.lengthOfArray("myArray")).subtract(1)) + .itemCount(ArithmeticOperators.rand())).as("threeFavorites").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { threeFavorites: { $slice: [ \"$favorites\", { \"$subtract\": [ {\"$size\": \"$myArray\"}, 1]}, { $rand : {} } ] } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderLiteral() { + void shouldRenderLiteral() { Document agg = project().and("$1").asLiteral().as("literalOnly").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1049,7 +1085,7 @@ public void shouldRenderLiteral() { } @Test // DATAMONGO-1536 - public void shouldRenderLiteralAggregationExpression() { + void shouldRenderLiteralAggregationExpression() { Document agg = project().and(LiteralOperators.valueOf("$1").asLiteral()).as("literalOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1058,7 +1094,7 @@ public void shouldRenderLiteralAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfYearAggregationExpression() { + void shouldRenderDayOfYearAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfYear()).as("dayOfYear") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1066,8 +1102,41 @@ public void shouldRenderDayOfYearAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { dayOfYear: { $dayOfYear: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderDayOfYearAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfYear()).as("dayOfYear") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { dayOfYear: { $dayOfYear: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderTimeZoneFromField() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.ofField("tz")).dayOfYear()) + .as("dayOfYear").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { dayOfYear: { $dayOfYear: { \"date\" : \"$date\", \"timezone\" : \"$tz\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderTimeZoneFromExpression() { + + Document agg = project() + .and(DateOperators.dateOf("date") + .withTimezone(Timezone.ofExpression(LiteralOperators.valueOf("America/Chicago").asLiteral())).dayOfYear()) + .as("dayOfYear").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { dayOfYear: { $dayOfYear: { \"date\" : \"$date\", \"timezone\" : { $literal: \"America/Chicago\"} } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderDayOfMonthAggregationExpression() { + void shouldRenderDayOfMonthAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfMonth()).as("day") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1075,8 +1144,19 @@ public void shouldRenderDayOfMonthAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { day: { $dayOfMonth: \"$date\" }} }")); } + @Test // DATAMONGO-1834 + void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfMonth()).as("day") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { day: { $dayOfMonth: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderDayOfWeekAggregationExpression() { + void shouldRenderDayOfWeekAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfWeek()).as("dayOfWeek") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1084,8 +1164,19 @@ public void shouldRenderDayOfWeekAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { dayOfWeek: { $dayOfWeek: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfWeek()).as("dayOfWeek") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { dayOfWeek: { $dayOfWeek: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderYearAggregationExpression() { + void shouldRenderYearAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").year()).as("year") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1093,8 +1184,18 @@ public void shouldRenderYearAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { year: { $year: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderYearAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).year()) + .as("year").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { year: { $year: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderMonthAggregationExpression() { + void shouldRenderMonthAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").month()).as("month") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1102,8 +1203,18 @@ public void shouldRenderMonthAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { month: { $month: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderMonthAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).month()) + .as("month").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { month: { $month: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderWeekAggregationExpression() { + void shouldRenderWeekAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").week()).as("week") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1111,8 +1222,18 @@ public void shouldRenderWeekAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { week: { $week: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderWeekAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).week()) + .as("week").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { week: { $week: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderHourAggregationExpression() { + void shouldRenderHourAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").hour()).as("hour") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1120,8 +1241,18 @@ public void shouldRenderHourAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { hour: { $hour: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderHourAggregationExpressionWithTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).hour()) + .as("hour").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { hour: { $hour: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderMinuteAggregationExpression() { + void shouldRenderMinuteAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").minute()).as("minute") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1129,8 +1260,19 @@ public void shouldRenderMinuteAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { minute: { $minute: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderMinuteAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).minute()).as("minute") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { minute: { $minute: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderSecondAggregationExpression() { + void shouldRenderSecondAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").second()).as("second") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1138,8 +1280,19 @@ public void shouldRenderSecondAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { second: { $second: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderSecondAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).second()).as("second") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { second: { $second: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderMillisecondAggregationExpression() { + void shouldRenderMillisecondAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").millisecond()).as("msec") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1147,8 +1300,19 @@ public void shouldRenderMillisecondAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { msec: { $millisecond: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderMillisecondAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).millisecond()).as("msec") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { msec: { $millisecond: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderDateToString() { + void shouldRenderDateToString() { Document agg = project().and("date").dateAsFormattedString("%H:%M:%S:%L").as("time") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1157,8 +1321,16 @@ public void shouldRenderDateToString() { Document.parse("{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\" } } } }")); } + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithoutFormatOption() { + + Document agg = project().and("date").dateAsFormattedString().as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { time: { $dateToString: { date: \"$date\" } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderDateToStringAggregationExpression() { + void shouldRenderDateToStringAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").toString("%H:%M:%S:%L")).as("time") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1167,8 +1339,60 @@ public void shouldRenderDateToStringAggregationExpression() { Document.parse("{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\" } } } }")); } + @Test // DATAMONGO-1834, DATAMONGO-2047 + void shouldRenderDateToStringAggregationExpressionWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toString("%H:%M:%S:%L")) + .as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\", \"timezone\" : \"America/Chicago\" } } } } } }")); + + Document removedTimezone = project().and(DateOperators.dateOf("date") + .withTimezone(Timezone.valueOf("America/Chicago")).toString("%H:%M:%S:%L").withTimezone(Timezone.none())) + .as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(removedTimezone).isEqualTo( + Document.parse("{ $project: { time: { $dateToString: { format: \"%H:%M:%S:%L\", date: \"$date\" } } } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithOnNull() { + + Document agg = project() + .and(DateOperators.dateOf("date").toStringWithDefaultFormat().onNullReturnValueOf("fallback-field")).as("time") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project: { time: { $dateToString: { date: \"$date\", \"onNull\" : \"$fallback-field\" } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithOnNullExpression() { + + Document agg = project() + .and(DateOperators.dateOf("date").toStringWithDefaultFormat() + .onNullReturnValueOf(LiteralOperators.valueOf("my-literal").asLiteral())) + .as("time").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { time: { $dateToString: { date: \"$date\", \"onNull\" : { \"$literal\": \"my-literal\"} } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateToStringWithOnNullAndTimezone() { + + Document agg = project().and(DateOperators.dateOf("date").toStringWithDefaultFormat() + .onNullReturnValueOf("fallback-field").withTimezone(Timezone.ofField("foo"))).as("time") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project: { time: { $dateToString: { date: \"$date\", \"onNull\" : \"$fallback-field\", \"timezone\": \"$foo\" } } } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderSumAggregationExpression() { + void shouldRenderSumAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1177,7 +1401,7 @@ public void shouldRenderSumAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSumWithMultipleArgsAggregationExpression() { + void shouldRenderSumWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").sum().and("midterm")).as("examTotal") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1186,7 +1410,7 @@ public void shouldRenderSumWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderAvgAggregationExpression() { + void shouldRenderAvgAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").avg()).as("quizAvg") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1195,7 +1419,7 @@ public void shouldRenderAvgAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderAvgWithMultipleArgsAggregationExpression() { + void shouldRenderAvgWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").avg().and("midterm")).as("examAvg") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1204,7 +1428,7 @@ public void shouldRenderAvgWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMaxAggregationExpression() { + void shouldRenderMaxAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").max()).as("quizMax") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1213,7 +1437,7 @@ public void shouldRenderMaxAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMaxWithMultipleArgsAggregationExpression() { + void shouldRenderMaxWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").max().and("midterm")).as("examMax") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1222,7 +1446,7 @@ public void shouldRenderMaxWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMinAggregationExpression() { + void shouldRenderMinAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").min()).as("quizMin") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1231,7 +1455,7 @@ public void shouldRenderMinAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMinWithMultipleArgsAggregationExpression() { + void shouldRenderMinWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").min().and("midterm")).as("examMin") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1240,7 +1464,7 @@ public void shouldRenderMinWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStdDevPopAggregationExpression() { + void shouldRenderStdDevPopAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevPop()).as("stdDev") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1249,7 +1473,7 @@ public void shouldRenderStdDevPopAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStdDevSampAggregationExpression() { + void shouldRenderStdDevSampAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevSamp()).as("stdDev") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1258,7 +1482,7 @@ public void shouldRenderStdDevSampAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderCmpAggregationExpression() { + void shouldRenderCmpAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").compareToValue(250)).as("cmp250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1267,7 +1491,7 @@ public void shouldRenderCmpAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderEqAggregationExpression() { + void shouldRenderEqAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(250)).as("eq250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1275,8 +1499,17 @@ public void shouldRenderEqAggregationExpression() { assertThat(agg).isEqualTo(Document.parse("{ $project: { eq250: { $eq: [\"$qty\", 250]} } }")); } + @Test // DATAMONGO-2513 + void shouldRenderEqAggregationExpressionWithListComparison() { + + Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(Arrays.asList(250))).as("eq250") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { eq250: { $eq: [\"$qty\", [250]]} } }")); + } + @Test // DATAMONGO-1536 - public void shouldRenderGtAggregationExpression() { + void shouldRenderGtAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanValue(250)).as("gt250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1285,7 +1518,7 @@ public void shouldRenderGtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderGteAggregationExpression() { + void shouldRenderGteAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanEqualToValue(250)).as("gte250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1294,7 +1527,7 @@ public void shouldRenderGteAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLtAggregationExpression() { + void shouldRenderLtAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanValue(250)).as("lt250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1303,7 +1536,7 @@ public void shouldRenderLtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLteAggregationExpression() { + void shouldRenderLteAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanEqualToValue(250)).as("lte250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1312,7 +1545,7 @@ public void shouldRenderLteAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderNeAggregationExpression() { + void shouldRenderNeAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").notEqualToValue(250)).as("ne250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1321,7 +1554,7 @@ public void shouldRenderNeAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLogicAndAggregationExpression() { + void shouldRenderLogicAndAggregationExpression() { Document agg = project() .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(100)) @@ -1333,7 +1566,7 @@ public void shouldRenderLogicAndAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLogicOrAggregationExpression() { + void shouldRenderLogicOrAggregationExpression() { Document agg = project() .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(250)) @@ -1345,7 +1578,7 @@ public void shouldRenderLogicOrAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderNotAggregationExpression() { + void shouldRenderNotAggregationExpression() { Document agg = project().and(BooleanOperators.not(ComparisonOperators.valueOf("qty").greaterThanValue(250))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1354,11 +1587,10 @@ public void shouldRenderNotAggregationExpression() { } @Test // DATAMONGO-1540 - public void shouldRenderMapAggregationExpression() { + void shouldRenderMapAggregationExpression() { Document agg = Aggregation.project() - .and(VariableOperators.mapItemsOf("quizzes").as("grade") - .andApply(AggregationFunctionExpressions.ADD.of(field("grade"), 2))) + .and(VariableOperators.mapItemsOf("quizzes").as("grade").andApply(ArithmeticOperators.valueOf("grade").add(2))) .as("adjustedGrades").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse( @@ -1366,19 +1598,19 @@ public void shouldRenderMapAggregationExpression() { } @Test // DATAMONGO-1540 - public void shouldRenderMapAggregationExpressionOnExpression() { + void shouldRenderMapAggregationExpressionOnExpression() { Document agg = Aggregation.project() - .and(VariableOperators.mapItemsOf(AggregationFunctionExpressions.SIZE.of("foo")).as("grade") - .andApply(AggregationFunctionExpressions.ADD.of(field("grade"), 2))) + .and(VariableOperators.mapItemsOf(ArrayOperators.arrayOf("foo").length()).as("grade") + .andApply(ArithmeticOperators.valueOf("grade").add(2))) .as("adjustedGrades").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse( - "{ $project:{ adjustedGrades:{ $map: { input: { $size : [\"foo\"]}, as: \"grade\",in: { $add: [ \"$$grade\", 2 ] }}}}}")); + "{ $project:{ adjustedGrades:{ $map: { input: { $size : \"$foo\"}, as: \"grade\",in: { $add: [ \"$$grade\", 2 ] }}}}}")); } @Test // DATAMONGO-861, DATAMONGO-1542 - public void shouldRenderIfNullConditionAggregationExpression() { + void shouldRenderIfNullConditionAggregationExpression() { Document agg = project().and( ConditionalOperators.ifNull(ArrayOperators.arrayOf("array").elementAt(1)).then("a more sophisticated value")) @@ -1389,7 +1621,7 @@ public void shouldRenderIfNullConditionAggregationExpression() { } @Test // DATAMONGO-1542 - public void shouldRenderIfNullValueAggregationExpression() { + void shouldRenderIfNullValueAggregationExpression() { Document agg = project() .and(ConditionalOperators.ifNull("field").then(ArrayOperators.arrayOf("array").elementAt(1))).as("result") @@ -1400,7 +1632,7 @@ public void shouldRenderIfNullValueAggregationExpression() { } @Test // DATAMONGO-861, DATAMONGO-1542 - public void fieldReplacementIfNullShouldRenderCorrectly() { + void fieldReplacementIfNullShouldRenderCorrectly() { Document agg = project().and(ConditionalOperators.ifNull("optional").thenValueOf("$never-null")).as("result") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1410,52 +1642,48 @@ public void fieldReplacementIfNullShouldRenderCorrectly() { } @Test // DATAMONGO-1538 - public void shouldRenderLetExpressionCorrectly() { + void shouldRenderLetExpressionCorrectly() { Document agg = Aggregation.project() .and(VariableOperators - .define( - newVariable("total") - .forExpression(AggregationFunctionExpressions.ADD.of(Fields.field("price"), Fields.field("tax"))), + .define(newVariable("total").forExpression(ArithmeticOperators.valueOf("price").add("tax")), newVariable("discounted") .forExpression(ConditionalOperators.Cond.when("applyDiscount").then(0.9D).otherwise(1.0D))) - .andApply(AggregationFunctionExpressions.MULTIPLY.of(Fields.field("total"), Fields.field("discounted")))) // + .andApply(ArithmeticOperators.valueOf("total").multiplyBy("discounted"))) // .as("finalTotal").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project:{ \"finalTotal\" : { \"$let\": {" + // - "\"vars\": {" + // - "\"total\": { \"$add\": [ \"$price\", \"$tax\" ] }," + // - "\"discounted\": { \"$cond\": { \"if\": \"$applyDiscount\", \"then\": 0.9, \"else\": 1.0 } }" + // - "}," + // - "\"in\": { \"$multiply\": [ \"$$total\", \"$$discounted\" ] }" + // + "\"vars\": {" + // + "\"total\": { \"$add\": [ \"$price\", \"$tax\" ] }," + // + "\"discounted\": { \"$cond\": { \"if\": \"$applyDiscount\", \"then\": 0.9, \"else\": 1.0 } }" + // + "}," + // + "\"in\": { \"$multiply\": [ \"$$total\", \"$$discounted\" ] }" + // "}}}}")); } @Test // DATAMONGO-1538 - public void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { + void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { - ExpressionVariable var1 = newVariable("total") - .forExpression(AggregationFunctionExpressions.ADD.of(Fields.field("price"), Fields.field("tax"))); + ExpressionVariable var1 = newVariable("total").forExpression(ArithmeticOperators.valueOf("price").add("tax")); ExpressionVariable var2 = newVariable("discounted") .forExpression(ConditionalOperators.Cond.when("applyDiscount").then(0.9D).otherwise(1.0D)); Document agg = Aggregation.project().and("foo") - .let(Arrays.asList(var1, var2), - AggregationFunctionExpressions.MULTIPLY.of(Fields.field("total"), Fields.field("discounted"))) - .as("finalTotal").toDocument(Aggregation.DEFAULT_CONTEXT); + .let(Arrays.asList(var1, var2), ArithmeticOperators.valueOf("total").multiplyBy("discounted")).as("finalTotal") + .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project:{ \"finalTotal\" : { \"$let\": {" + // - "\"vars\": {" + // - "\"total\": { \"$add\": [ \"$price\", \"$tax\" ] }," + // - "\"discounted\": { \"$cond\": { \"if\": \"$applyDiscount\", \"then\": 0.9, \"else\": 1.0 } }" + // - "}," + // - "\"in\": { \"$multiply\": [ \"$$total\", \"$$discounted\" ] }" + // + "\"vars\": {" + // + "\"total\": { \"$add\": [ \"$price\", \"$tax\" ] }," + // + "\"discounted\": { \"$cond\": { \"if\": \"$applyDiscount\", \"then\": 0.9, \"else\": 1.0 } }" + // + "}," + // + "\"in\": { \"$multiply\": [ \"$$total\", \"$$discounted\" ] }" + // "}}}}")); } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfBytesCorrectly() { + void shouldRenderIndexOfBytesCorrectly() { Document agg = project().and(StringOperators.valueOf("item").indexOf("foo")).as("byteLocation") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1465,9 +1693,11 @@ public void shouldRenderIndexOfBytesCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfBytesWithRangeCorrectly() { + void shouldRenderIndexOfBytesWithRangeCorrectly() { - Document agg = project().and(StringOperators.valueOf("item").indexOf("foo").within(new Range(5L, 9L))) + Document agg = project() + .and(StringOperators.valueOf("item").indexOf("foo") + .within(Range.from(Bound.inclusive(5L)).to(Bound.exclusive(9L)))) .as("byteLocation").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).containsEntry("$project.byteLocation.$indexOfBytes.[2]", 5L) @@ -1475,7 +1705,7 @@ public void shouldRenderIndexOfBytesWithRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfCPCorrectly() { + void shouldRenderIndexOfCPCorrectly() { Document agg = project().and(StringOperators.valueOf("item").indexOfCP("foo")).as("cpLocation") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1484,9 +1714,11 @@ public void shouldRenderIndexOfCPCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfCPWithRangeCorrectly() { + void shouldRenderIndexOfCPWithRangeCorrectly() { - Document agg = project().and(StringOperators.valueOf("item").indexOfCP("foo").within(new Range(5L, 9L))) + Document agg = project() + .and(StringOperators.valueOf("item").indexOfCP("foo") + .within(Range.from(Bound.inclusive(5L)).to(Bound.exclusive(9L)))) .as("cpLocation").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).containsEntry("$project.cpLocation.$indexOfCP.[2]", 5L) @@ -1494,7 +1726,7 @@ public void shouldRenderIndexOfCPWithRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSplitCorrectly() { + void shouldRenderSplitCorrectly() { Document agg = project().and(StringOperators.valueOf("city").split(", ")).as("city_state") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1503,7 +1735,7 @@ public void shouldRenderSplitCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderStrLenBytesCorrectly() { + void shouldRenderStrLenBytesCorrectly() { Document agg = project().and(StringOperators.valueOf("name").length()).as("length") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1512,7 +1744,7 @@ public void shouldRenderStrLenBytesCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderStrLenCPCorrectly() { + void shouldRenderStrLenCPCorrectly() { Document agg = project().and(StringOperators.valueOf("name").lengthCP()).as("length") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1521,7 +1753,7 @@ public void shouldRenderStrLenCPCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSubstrCPCorrectly() { + void shouldRenderSubstrCPCorrectly() { Document agg = project().and(StringOperators.valueOf("quarter").substringCP(0, 2)).as("yearSubstring") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1530,8 +1762,38 @@ public void shouldRenderSubstrCPCorrectly() { .isEqualTo(Document.parse("{ $project : { yearSubstring: { $substrCP: [ \"$quarter\", 0, 2 ] } } }")); } + @Test // GH-3725 + void shouldRenderRegexFindCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFind("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { regex: { $regexFind: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFindAll("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { regex: { $regexFindAll: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // GH-3725 + void shouldRenderRegexMatchCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexMatch("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { regex: { $regexMatch: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + @Test // DATAMONGO-1548 - public void shouldRenderIndexOfArrayCorrectly() { + void shouldRenderIndexOfArrayCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("items").indexOf(2)).as("index") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1540,7 +1802,7 @@ public void shouldRenderIndexOfArrayCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderRangeCorrectly() { + void shouldRenderRangeCorrectly() { Document agg = project().and(ArrayOperators.RangeOperator.rangeStartingAt(0L).to("distance").withStepSize(25L)) .as("rest_stops").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1551,7 +1813,7 @@ public void shouldRenderRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReverseArrayCorrectly() { + void shouldRenderReverseArrayCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("favorites").reverse()).as("reverseFavorites") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1560,7 +1822,7 @@ public void shouldRenderReverseArrayCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReduceWithSimpleObjectCorrectly() { + void shouldRenderReduceWithSimpleObjectCorrectly() { Document agg = project() .and(ArrayOperators.arrayOf("probabilityArr") @@ -1572,7 +1834,7 @@ public void shouldRenderReduceWithSimpleObjectCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReduceWithComplexObjectCorrectly() { + void shouldRenderReduceWithComplexObjectCorrectly() { PropertyExpression sum = PropertyExpression.property("sum").definedAs( ArithmeticOperators.valueOf(Variable.VALUE.referringTo("sum").getName()).add(Variable.THIS.getName())); @@ -1589,7 +1851,7 @@ public void shouldRenderReduceWithComplexObjectCorrectly() { } @Test // DATAMONGO-1843 - public void shouldRenderReduceWithInputAndInExpressionsCorrectly() { + void shouldRenderReduceWithInputAndInExpressionsCorrectly() { Document expected = Document.parse( "{ \"$project\" : { \"results\" : { \"$reduce\" : { \"input\" : { \"$slice\" : [\"$array\", 5] }, \"initialValue\" : \"\", \"in\" : { \"$concat\" : [\"$$value\", \"/\", \"$$this\"] } } } } }"); @@ -1610,7 +1872,7 @@ public void shouldRenderReduceWithInputAndInExpressionsCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderZipCorrectly() { + void shouldRenderZipCorrectly() { AggregationExpression elemAt0 = ArrayOperators.arrayOf("matrix").elementAt(0); AggregationExpression elemAt1 = ArrayOperators.arrayOf("matrix").elementAt(1); @@ -1625,7 +1887,7 @@ public void shouldRenderZipCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderInCorrectly() { + void shouldRenderInCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("in_stock").containsValue("bananas")).as("has_bananas") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1635,7 +1897,7 @@ public void shouldRenderInCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoDayOfWeekCorrectly() { + void shouldRenderIsoDayOfWeekCorrectly() { Document agg = project().and(DateOperators.dateOf("birthday").isoDayOfWeek()).as("dayOfWeek") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1643,8 +1905,19 @@ public void shouldRenderIsoDayOfWeekCorrectly() { assertThat(agg).isEqualTo(Document.parse("{ $project : { dayOfWeek: { $isoDayOfWeek: \"$birthday\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { + + Document agg = project() + .and(DateOperators.dateOf("birthday").withTimezone(Timezone.valueOf("America/Chicago")).isoDayOfWeek()) + .as("dayOfWeek").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { dayOfWeek: { $isoDayOfWeek: { \"date\" : \"$birthday\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1548 - public void shouldRenderIsoWeekCorrectly() { + void shouldRenderIsoWeekCorrectly() { Document agg = project().and(DateOperators.dateOf("date").isoWeek()).as("weekNumber") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1652,8 +1925,19 @@ public void shouldRenderIsoWeekCorrectly() { assertThat(agg).isEqualTo(Document.parse("{ $project : { weekNumber: { $isoWeek: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderIsoWeekWithTimezoneCorrectly() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeek()).as("weekNumber") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { weekNumber: { $isoWeek: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1548 - public void shouldRenderIsoWeekYearCorrectly() { + void shouldRenderIsoWeekYearCorrectly() { Document agg = project().and(DateOperators.dateOf("date").isoWeekYear()).as("yearNumber") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1661,15 +1945,26 @@ public void shouldRenderIsoWeekYearCorrectly() { assertThat(agg).isEqualTo(Document.parse("{ $project : { yearNumber: { $isoWeekYear: \"$date\" } } }")); } + @Test // DATAMONGO-1834 + void shouldRenderIsoWeekYearWithTimezoneCorrectly() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear()) + .as("yearNumber").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { yearNumber: { $isoWeekYear: { \"date\" : \"$date\", \"timezone\" : \"America/Chicago\" } } } } }")); + } + @Test // DATAMONGO-1548 - public void shouldRenderSwitchCorrectly() { + void shouldRenderSwitchCorrectly() { String expected = "$switch:\n" + // "{\n" + // " branches: [\n" + // " {\n" + // " case: { $gte : [ { $avg : \"$scores\" }, 90 ] },\n" + // - " then: \"Doing great!\"\n" + // + " then: \"Doing great\"\n" + // " },\n" + // " {\n" + // " case: { $and : [ { $gte : [ { $avg : \"$scores\" }, 80 ] },\n" + // @@ -1687,7 +1982,7 @@ public void shouldRenderSwitchCorrectly() { CaseOperator cond1 = CaseOperator .when(ComparisonOperators.Gte.valueOf(AccumulatorOperators.Avg.avgOf("scores")).greaterThanEqualToValue(90)) - .then("Doing great!"); + .then("Doing great"); CaseOperator cond2 = CaseOperator .when(BooleanOperators.And.and( ComparisonOperators.Gte.valueOf(AccumulatorOperators.Avg.avgOf("scores")).greaterThanEqualToValue(80), @@ -1704,15 +1999,423 @@ public void shouldRenderSwitchCorrectly() { } @Test // DATAMONGO-1548 - public void shouldTypeCorrectly() { + void shouldTypeCorrectly() { Document agg = project().and(DataTypeOperators.Type.typeOf("a")).as("a").toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project : { a: { $type: \"$a\" } } }")); } - private static Document exctractOperation(String field, Document fromProjectClause) { + @Test // DATAMONGO-1834 + void shouldRenderDateFromPartsWithJustTheYear() { + + Document agg = project().and(DateOperators.dateFromParts().year(2018)).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { newDate: { $dateFromParts: { year : 2018 } } } }")); + } + + @Test // DATAMONGO-1834, DATAMONGO-2671 + void shouldRenderDateFromParts() { + + Document agg = project() + .and(DateOperators.dateFromParts().year(2018).month(3).day(23).hour(14).minute(25).second(10).millisecond(2)) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromParts: { year : 2018, month : 3, day : 23, hour : 14, minute : 25, second : 10, millisecond : 2 } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromPartsWithTimezone() { + + Document agg = project() + .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).year(2018)).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project : { newDate: { $dateFromParts: { year : 2018, timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoDateFromPartsWithJustTheYear() { + + Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018)).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { newDate: { $dateFromParts: { isoWeekYear : 2018 } } } }")); + } + + @Test // DATAMONGO-1834, DATAMONGO-2671 + void shouldRenderIsoDateFromParts() { + + Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018).isoWeek(12).isoDayOfWeek(5).hour(14) + .minute(30).second(42).millisecond(2)).as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromParts: { isoWeekYear : 2018, isoWeek : 12, isoDayOfWeek : 5, hour : 14, minute : 30, second : 42, millisecond : 2 } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderIsoDateFromPartsWithTimezone() { + + Document agg = project() + .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear(2018)) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromParts: { isoWeekYear : 2018, timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateToParts() { + + Document agg = project().and(DateOperators.dateOf("date").toParts()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { newDate: { $dateToParts: { date : \"$date\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateToIsoParts() { + + Document agg = project().and(DateOperators.dateOf("date").toParts().iso8601()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo( + Document.parse("{ $project : { newDate: { $dateToParts: { date : \"$date\", iso8601 : true } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateToPartsWithTimezone() { + + Document agg = project() + .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toParts()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project : { newDate: { $dateToParts: { date : \"$date\", timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromString() { + + Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787")).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document + .parse("{ $project : { newDate: { $dateFromString: { dateString : \"2017-02-08T12:10:40.787\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromStringWithFieldReference() { + + Document agg = project().and(DateOperators.dateOf("date").fromString()).as("newDate") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg) + .isEqualTo(Document.parse("{ $project : { newDate: { $dateFromString: { dateString : \"$date\" } } } }")); + } + + @Test // DATAMONGO-1834 + void shouldRenderDateFromStringWithTimezone() { + + Document agg = project() + .and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withTimezone(Timezone.valueOf("America/Chicago"))) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromString: { dateString : \"2017-02-08T12:10:40.787\", timezone : \"America/Chicago\" } } } }")); + } + + @Test // DATAMONGO-2047 + void shouldRenderDateFromStringWithFormat() { + + Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withFormat("dd/mm/yyyy")) + .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse( + "{ $project : { newDate: { $dateFromString: { dateString : \"2017-02-08T12:10:40.787\", format : \"dd/mm/yyyy\" } } } }")); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldIncludeTopLevelFieldsOfType() { + + ProjectionOperation operation = Aggregation.project(Book.class); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause) // + .hasSize(2) // + .containsEntry("title", 1) // + .containsEntry("author", 1); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldMapFieldNames() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + + Document document = Aggregation.project(BookRenamed.class) + .toDocument(new TypeBasedAggregationOperationContext(Book.class, mappingContext, new QueryMapper(converter))); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause) // + .hasSize(2) // + .containsEntry("ti_tl_e", 1) // + .containsEntry("author", 1); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldIncludeInterfaceProjectionValues() { + + ProjectionOperation operation = Aggregation.project(ProjectionInterface.class); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause) // + .hasSize(1) // + .containsEntry("title", 1); + } + + @Test // DATAMONGO-2200 + void typeProjectionShouldBeEmptyIfNoPropertiesFound() { + + ProjectionOperation operation = Aggregation.project(EmptyType.class); + + Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + Document projectClause = DocumentTestUtils.getAsDocument(document, PROJECT); + + assertThat(projectClause).isEmpty(); + } + + @Test // DATAMONGO-2312 + void simpleFieldReferenceAsArray() { + + org.bson.Document doc = Aggregation.newAggregation(project("x", "y", "someField").asArray("myArray")) + .toDocument("coll", Aggregation.DEFAULT_CONTEXT); + + assertThat(doc).isEqualTo(Document.parse( + "{\"aggregate\":\"coll\", \"pipeline\":[ { $project: { myArray: [ \"$x\", \"$y\", \"$someField\" ] } } ] }")); + } + + @Test // DATAMONGO-2312 + void mappedFieldReferenceAsArray() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + + org.bson.Document doc = Aggregation + .newAggregation(BookWithFieldAnnotation.class, project("title", "author").asArray("myArray")) + .toDocument("coll", new TypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)))); + + assertThat(doc).isEqualTo(Document + .parse("{\"aggregate\":\"coll\", \"pipeline\":[ { $project: { myArray: [ \"$ti_t_le\", \"$author\" ] } } ] }")); + } + + @Test // DATAMONGO-2312 + void arrayWithNullValue() { + + Document doc = project() // + .andArrayOf(Fields.field("field-1"), null, "value").as("myArray") // + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(doc).isEqualTo(Document.parse("{ $project: { \"myArray\" : [ \"$field-1\", null, \"value\" ] } }")); + } + + @Test // DATAMONGO-2312 + void nestedArrayField() { + + Document doc = project("_id", "value") // + .andArrayOf(Fields.field("field-1"), "plain - string", ArithmeticOperators.valueOf("field-1").sum().and(10)) + .as("myArray") // + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(doc).isEqualTo(Document.parse( + "{ $project: { \"_id\" : 1, \"value\" : 1, \"myArray\" : [ \"$field-1\", \"plain - string\", { \"$sum\" : [\"$field-1\", 10] } ] } } ] }")); + } + + @Test // DATAMONGO-2312 + void nestedMappedFieldReferenceInArrayField() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + + Document doc = project("author") // + .andArrayOf(Fields.field("title"), "plain - string", ArithmeticOperators.valueOf("title").sum().and(10)) + .as("myArray") // + .toDocument(new TypeBasedAggregationOperationContext(BookWithFieldAnnotation.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)))); + + assertThat(doc).isEqualTo(Document.parse( + "{ $project: { \"author\" : 1, \"myArray\" : [ \"$ti_t_le\", \"plain - string\", { \"$sum\" : [\"$ti_t_le\", 10] } ] } } ] }")); + } + + @Test // GH-4473 + void shouldRenderPercentileAggregationExpression() { + + Document agg = project() + .and(ArithmeticOperators.valueOf("score").percentile(0.3, 0.9)).as("scorePercentiles") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { scorePercentiles: { $percentile: { input: \"$score\", method: \"approximate\", p: [0.3, 0.9] } }} } }")); + } + + @Test // GH-4473 + void shouldRenderPercentileWithMultipleArgsAggregationExpression() { + + Document agg = project() + .and(ArithmeticOperators.valueOf("scoreOne").percentile(0.4).and("scoreTwo")).as("scorePercentiles") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project: { scorePercentiles: { $percentile: { input: [\"$scoreOne\", \"$scoreTwo\"], method: \"approximate\", p: [0.4] } }} } }")); + } + + @Test // GH-4472 + void shouldRenderMedianAggregationExpressions() { + + Document singleArgAgg = project() + .and(ArithmeticOperators.valueOf("score").median()).as("medianValue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(singleArgAgg).isEqualTo(Document.parse("{ $project: { medianValue: { $median: { input: \"$score\", method: \"approximate\" } }} } }")); + + Document multipleArgsAgg = project() + .and(ArithmeticOperators.valueOf("score").median().and("scoreTwo")).as("medianValue") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(multipleArgsAgg).isEqualTo(Document.parse("{ $project: { medianValue: { $median: { input: [\"$score\", \"$scoreTwo\"], method: \"approximate\" } }} } }")); + } + + private static Document extractOperation(String field, Document fromProjectClause) { return (Document) fromProjectClause.get(field); } + static class Book { + + String title; + Author author; + + public Book() {} + + public String getTitle() { + return this.title; + } + + public Author getAuthor() { + return this.author; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(Author author) { + this.author = author; + } + + public String toString() { + return "ProjectionOperationUnitTests.Book(title=" + this.getTitle() + ", author=" + this.getAuthor() + ")"; + } + } + + static class BookWithFieldAnnotation { + + @Field("ti_t_le") String title; + Author author; + + public String getTitle() { + return this.title; + } + + public Author getAuthor() { + return this.author; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(Author author) { + this.author = author; + } + + public String toString() { + return "ProjectionOperationUnitTests.BookWithFieldAnnotation(title=" + this.getTitle() + ", author=" + + this.getAuthor() + ")"; + } + } + + static class BookRenamed { + + @Field("ti_tl_e") String title; + Author author; + + public String getTitle() { + return this.title; + } + + public Author getAuthor() { + return this.author; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(Author author) { + this.author = author; + } + + public String toString() { + return "ProjectionOperationUnitTests.BookRenamed(title=" + this.getTitle() + ", author=" + this.getAuthor() + ")"; + } + } + + static class Author { + + String first; + String last; + String middle; + + public String getFirst() { + return this.first; + } + + public String getLast() { + return this.last; + } + + public String getMiddle() { + return this.middle; + } + + public void setFirst(String first) { + this.first = first; + } + + public void setLast(String last) { + this.last = last; + } + + public void setMiddle(String middle) { + this.middle = middle; + } + + public String toString() { + return "ProjectionOperationUnitTests.Author(first=" + this.getFirst() + ", last=" + this.getLast() + ", middle=" + + this.getMiddle() + ")"; + } + } + + interface ProjectionInterface { + String getTitle(); + } + + private static class EmptyType { + + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java index 6054489b57..55d6bf3b60 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -29,10 +29,15 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.geo.Box; +import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.TestEntities; +import org.springframework.data.mongodb.core.Venue; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration test for aggregation via {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate}. @@ -40,7 +45,7 @@ * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:reactive-infrastructure.xml") public class ReactiveAggregationTests { @@ -61,11 +66,11 @@ public void cleanUp() { private void cleanDb() { - StepVerifier - .create(reactiveMongoTemplate.dropCollection(INPUT_COLLECTION) // - .then(reactiveMongoTemplate.dropCollection(OUTPUT_COLLECTION)) // - .then(reactiveMongoTemplate.dropCollection(Product.class)) // - .then(reactiveMongoTemplate.dropCollection(City.class))) // + reactiveMongoTemplate.dropCollection(INPUT_COLLECTION) // + .then(reactiveMongoTemplate.dropCollection(OUTPUT_COLLECTION)) // + .then(reactiveMongoTemplate.dropCollection(Product.class)) // + .then(reactiveMongoTemplate.dropCollection(City.class)) // + .then(reactiveMongoTemplate.dropCollection(Venue.class)).as(StepVerifier::create) // .verifyComplete(); } @@ -73,7 +78,7 @@ private void cleanDb() { public void expressionsInProjectionExampleShowcase() { Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); - StepVerifier.create(reactiveMongoTemplate.insert(product)).expectNextCount(1).verifyComplete(); + reactiveMongoTemplate.insert(product).as(StepVerifier::create).expectNextCount(1).verifyComplete(); double shippingCosts = 1.2; @@ -82,7 +87,7 @@ public void expressionsInProjectionExampleShowcase() { .andExpression("netPrice * 10", shippingCosts).as("salesPrice") // ); - StepVerifier.create(reactiveMongoTemplate.aggregate(agg, Document.class)).consumeNextWith(actual -> { + reactiveMongoTemplate.aggregate(agg, Document.class).as(StepVerifier::create).consumeNextWith(actual -> { assertThat(actual).containsEntry("_id", product.id); assertThat(actual).containsEntry("name", product.name); @@ -98,13 +103,13 @@ public void shouldProjectMultipleDocuments() { City braunschweig = new City("Braunschweig", 102); City weinheim = new City("Weinheim", 103); - StepVerifier.create(reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim))) + reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim)).as(StepVerifier::create) .expectNextCount(4).verifyComplete(); Aggregation agg = newAggregation( // match(where("population").lt(103))); - StepVerifier.create(reactiveMongoTemplate.aggregate(agg, "city", City.class).collectList()) + reactiveMongoTemplate.aggregate(agg, "city", City.class).collectList().as(StepVerifier::create) .consumeNextWith(actual -> { assertThat(actual).hasSize(3).contains(dresden, linz, braunschweig); }).verifyComplete(); @@ -118,14 +123,62 @@ public void shouldAggregateToOutCollection() { City braunschweig = new City("Braunschweig", 102); City weinheim = new City("Weinheim", 103); - StepVerifier.create(reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim))) + reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim)).as(StepVerifier::create) .expectNextCount(4).verifyComplete(); Aggregation agg = newAggregation( // out(OUTPUT_COLLECTION)); - StepVerifier.create(reactiveMongoTemplate.aggregate(agg, "city", City.class)).expectNextCount(4).verifyComplete(); - StepVerifier.create(reactiveMongoTemplate.find(new Query(), City.class, OUTPUT_COLLECTION)).expectNextCount(4) + reactiveMongoTemplate.aggregate(agg, "city", City.class).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + reactiveMongoTemplate.find(new Query(), City.class, OUTPUT_COLLECTION).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + } + + @Test // DATAMONGO-1986 + public void runMatchOperationCriteriaThroughQueryMapperForTypedAggregation() { + + reactiveMongoTemplate.insertAll(TestEntities.geolocation().newYork()).as(StepVerifier::create).expectNextCount(12) + .verifyComplete(); + + Aggregation aggregation = newAggregation(Venue.class, + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + reactiveMongoTemplate.aggregate(aggregation, "newyork", Document.class).as(StepVerifier::create).expectNextCount(4) .verifyComplete(); } + + @Test // DATAMONGO-1986 + public void runMatchOperationCriteriaThroughQueryMapperForUntypedAggregation() { + + reactiveMongoTemplate.insertAll(TestEntities.geolocation().newYork()).as(StepVerifier::create).expectNextCount(12) + .verifyComplete(); + + Aggregation aggregation = newAggregation( + match(Criteria.where("location") + .within(new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)))), + project("id", "location", "name")); + + reactiveMongoTemplate.aggregate(aggregation, "newyork", Document.class).as(StepVerifier::create).expectNextCount(4) + .verifyComplete(); + } + + @Test // DATAMONGO-2356 + public void skipOutputDoesNotReadBackAggregationResults() { + + Product product = new Product("P1", "A", 1.99, 3, 0.05, 0.19); + reactiveMongoTemplate.insert(product).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + double shippingCosts = 1.2; + + TypedAggregation agg = newAggregation(Product.class, // + project("name", "netPrice") // + .andExpression("netPrice * 10", shippingCosts).as("salesPrice") // + ).withOptions(AggregationOptions.builder().skipOutput().build()); + + reactiveMongoTemplate.aggregate(agg, Document.class).as(StepVerifier::create).verifyComplete(); + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java index 2c4ecad130..68afdb4bfa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,20 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; @@ -40,82 +43,83 @@ * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class ReactiveAggregationUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class ReactiveAggregationUnitTests { - static final String INPUT_COLLECTION = "collection-1"; + private static final String INPUT_COLLECTION = "collection-1"; - ReactiveMongoTemplate template; - ReactiveMongoDatabaseFactory factory; + private ReactiveMongoTemplate template; + private ReactiveMongoDatabaseFactory factory; @Mock MongoClient mongoClient; @Mock MongoDatabase db; @Mock MongoCollection collection; @Mock AggregatePublisher publisher; - @Before - public void setUp() { + @BeforeEach + void setUp() { factory = new SimpleReactiveMongoDatabaseFactory(mongoClient, "db"); template = new ReactiveMongoTemplate(factory); when(mongoClient.getDatabase("db")).thenReturn(db); - when(db.getCollection(INPUT_COLLECTION)).thenReturn(collection); - when(collection.aggregate(any())).thenReturn(publisher); + when(db.getCollection(eq(INPUT_COLLECTION), any(Class.class))).thenReturn(collection); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(publisher); when(publisher.allowDiskUse(any())).thenReturn(publisher); - when(publisher.useCursor(any())).thenReturn(publisher); when(publisher.collation(any())).thenReturn(publisher); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 - public void shouldHandleMissingInputCollection() { - template.aggregate(newAggregation(), (String) null, TagCount.class); + @Test // DATAMONGO-1646 + void shouldHandleMissingInputCollection() { + assertThatIllegalArgumentException() + .isThrownBy(() -> template.aggregate(newAggregation(), (String) null, TagCount.class)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 - public void shouldHandleMissingAggregationPipeline() { - template.aggregate(null, INPUT_COLLECTION, TagCount.class); + @Test // DATAMONGO-1646 + void shouldHandleMissingAggregationPipeline() { + assertThatIllegalArgumentException().isThrownBy(() -> template.aggregate(null, INPUT_COLLECTION, TagCount.class)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 - public void shouldHandleMissingEntityClass() { - template.aggregate(newAggregation(), INPUT_COLLECTION, null); + @Test // DATAMONGO-1646 + void shouldHandleMissingEntityClass() { + assertThatIllegalArgumentException().isThrownBy(() -> template.aggregate(newAggregation(), INPUT_COLLECTION, null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 - public void errorsOnCursorBatchSizeUsage() { - - template.aggregate( - newAggregation(Product.class, // - project("name", "netPrice")) // - .withOptions(AggregationOptions.builder().cursorBatchSize(10).build()), - INPUT_COLLECTION, TagCount.class).subscribe(); + @Test // DATAMONGO-1646 + void errorsOnExplainUsage() { + assertThatIllegalArgumentException().isThrownBy(() -> template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().explain(true).build()), + INPUT_COLLECTION, TagCount.class).subscribe()); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 - public void errorsOnExplainUsage() { + @Test // DATAMONGO-1646, DATAMONGO-1311 + void appliesBatchSizeWhenPresent() { - template - .aggregate(newAggregation(Product.class, // - project("name", "netPrice")) // - .withOptions(AggregationOptions.builder().explain(true).build()), - INPUT_COLLECTION, TagCount.class) - .subscribe(); + when(publisher.batchSize(anyInt())).thenReturn(publisher); + + AggregationOptions options = AggregationOptions.builder().cursorBatchSize(1234).build(); + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(options), + INPUT_COLLECTION, TagCount.class).subscribe(); + + verify(publisher).batchSize(1234); } @Test // DATAMONGO-1646 - public void appliesCollationCorrectlyWhenPresent() { + void appliesCollationCorrectlyWhenPresent() { - template.aggregate( - newAggregation(Product.class, // - project("name", "netPrice")) // - .withOptions(AggregationOptions.builder().collation(Collation.of("en_US")).build()), + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().collation(Collation.of("en_US")).build()), INPUT_COLLECTION, TagCount.class).subscribe(); verify(publisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("en_US").build())); } @Test // DATAMONGO-1646 - public void doesNotSetCollationWhenNotPresent() { + void doesNotSetCollationWhenNotPresent() { template.aggregate(newAggregation(Product.class, // project("name", "netPrice")) // @@ -126,15 +130,12 @@ public void doesNotSetCollationWhenNotPresent() { } @Test // DATAMONGO-1646 - public void appliesDiskUsageCorrectly() { - - template - .aggregate( - newAggregation(Product.class, // - project("name", "netPrice")) // - .withOptions(AggregationOptions.builder().allowDiskUse(true).build()), - INPUT_COLLECTION, TagCount.class) - .subscribe(); + void appliesDiskUsageCorrectly() { + + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(AggregationOptions.builder().allowDiskUse(true).build()), + INPUT_COLLECTION, TagCount.class).subscribe(); verify(publisher).allowDiskUse(eq(true)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/RedactOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/RedactOperationUnitTests.java new file mode 100644 index 0000000000..24566089e7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/RedactOperationUnitTests.java @@ -0,0 +1,105 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link RedactOperation}. + * + * @author Christoph Strobl + */ +class RedactOperationUnitTests { + + Document expected = new Document("$redact", + new Document("$cond", new Document("if", new Document("$eq", Arrays.asList("$level", 5))) + .append("then", "$$PRUNE").append("else", "$$DESCEND"))); + Document expectedMapped = new Document("$redact", + new Document("$cond", new Document("if", new Document("$eq", Arrays.asList("$le_v_el", 5))) + .append("then", "$$PRUNE").append("else", "$$DESCEND"))); + + @Test // DATAMONGO-931 + void errorsOnNullExpression() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new RedactOperation(null)); + } + + @Test // DATAMONGO-931 + void mapsAggregationExpressionCorrectly() { + + assertThat(new RedactOperation(ConditionalOperators.when(Criteria.where("level").is(5)) // + .then(RedactOperation.PRUNE) // + .otherwise(RedactOperation.DESCEND)).toDocument(contextFor(null))).isEqualTo(expected); + } + + @Test // DATAMONGO-931 + void mapsAggregationExpressionViaBuilderCorrectly() { + + assertThat(RedactOperation.builder().when(Criteria.where("level").is(5)) // + .thenPrune() // + .otherwiseDescend().build().toDocument(contextFor(null))).isEqualTo(expected); + } + + @Test // DATAMONGO-931 + void mapsTypedAggregationExpressionCorrectly() { + + assertThat(new RedactOperation(ConditionalOperators.when(Criteria.where("level").is(5)) // + .then(RedactOperation.PRUNE) // + .otherwise(RedactOperation.DESCEND)).toDocument(contextFor(DomainType.class))).isEqualTo(expectedMapped); + } + + static class DomainType { + + @Field("le_v_el") String level; + + public String getLevel() { + return this.level; + } + + public void setLevel(String level) { + this.level = level; + } + + public String toString() { + return "RedactOperationUnitTests.DomainType(level=" + this.getLevel() + ")"; + } + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java index 889e2bd38a..23480dd390 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperation; /** @@ -27,57 +26,57 @@ * * @author Mark Paluch */ -public class ReplaceRootOperationUnitTests { +class ReplaceRootOperationUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1550 - public void rejectsNullField() { - new ReplaceRootOperation((Field) null); + @Test // DATAMONGO-1550 + void rejectsNullField() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((Field) null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1550 - public void rejectsNullExpression() { - new ReplaceRootOperation((AggregationExpression) null); + @Test // DATAMONGO-1550 + void rejectsNullExpression() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((AggregationExpression) null)); } @Test // DATAMONGO-1550 - public void shouldRenderCorrectly() { + void shouldRenderCorrectly() { ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder() .withDocument(new Document("hello", "world")); Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject, is(Document.parse("{ $replaceRoot : { newRoot: { hello: \"world\" } } }"))); + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceRoot : { newRoot: { hello: \"world\" } } }")); } @Test // DATAMONGO-1550 - public void shouldRenderExpressionCorrectly() { + void shouldRenderExpressionCorrectly() { ReplaceRootOperation operation = new ReplaceRootOperation(VariableOperators // .mapItemsOf("array") // .as("element") // - .andApply(AggregationFunctionExpressions.MULTIPLY.of("$$element", 10))); + .andApply(ArithmeticOperators.valueOf("$$element").multiplyBy(10))); Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject, is(Document.parse("{ $replaceRoot : { newRoot : { " - + "$map : { input : \"$array\" , as : \"element\" , in : { $multiply : [ \"$$element\" , 10]} } " + "} } }"))); + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceRoot : { newRoot : { " + + "$map : { input : \"$array\" , as : \"element\" , in : { $multiply : [ \"$$element\" , 10]} } " + "} } }")); } @Test // DATAMONGO-1550 - public void shouldComposeDocument() { + void shouldComposeDocument() { ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder().withDocument() // .andValue("value").as("key") // - .and(AggregationFunctionExpressions.MULTIPLY.of("$$element", 10)).as("multiply"); + .and(ArithmeticOperators.valueOf("$$element").multiplyBy(10)).as("multiply"); Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject, is(Document - .parse("{ $replaceRoot : { newRoot: { key: \"value\", multiply: { $multiply : [ \"$$element\" , 10]} } } }"))); + assertThat(dbObject).isEqualTo(Document + .parse("{ $replaceRoot : { newRoot: { key: \"value\", multiply: { $multiply : [ \"$$element\" , 10]} } } }")); } @Test // DATAMONGO-1550 - public void shouldComposeSubDocument() { + void shouldComposeSubDocument() { Document partialReplacement = new Document("key", "override").append("key2", "value2"); @@ -87,15 +86,16 @@ public void shouldComposeSubDocument() { Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(dbObject, is(Document.parse("{ $replaceRoot : { newRoot: { key: \"override\", key2: \"value2\"} } } }"))); + assertThat(dbObject) + .isEqualTo(Document.parse("{ $replaceRoot : { newRoot: { key: \"override\", key2: \"value2\"} } } }")); } @Test // DATAMONGO-1550 - public void shouldNotExposeFields() { + void shouldNotExposeFields() { ReplaceRootOperation operation = new ReplaceRootOperation(Fields.field("field")); - assertThat(operation.getFields().exposesNoFields(), is(true)); - assertThat(operation.getFields().exposesSingleFieldOnly(), is(false)); + assertThat(operation.getFields().exposesNoFields()).isTrue(); + assertThat(operation.getFields().exposesSingleFieldOnly()).isFalse(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java new file mode 100644 index 0000000000..9d8a1502e2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java @@ -0,0 +1,57 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ReplaceRootOperation}. + * + * @author Christoph Strobl + */ +class ReplaceWithOperationUnitTests { + + @Test // DATAMONGO-2331 + void rejectsNullField() { + assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceWithOperation(null)); + } + + @Test // DATAMONGO-2331 + void shouldRenderValueCorrectly() { + + ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValue(new Document("hello", "world")); + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceWith : { hello: \"world\" } }")); + } + + @Test // DATAMONGO-2331 + void shouldRenderExpressionCorrectly() { + + ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValueOf(VariableOperators // + .mapItemsOf("array") // + .as("element") // + .andApply(ArithmeticOperators.valueOf("$$element").multiplyBy(10))); + + Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(dbObject).isEqualTo(Document.parse("{ $replaceWith : { " + + "$map : { input : \"$array\" , as : \"element\" , in : { $multiply : [ \"$$element\" , 10]} } " + "} }")); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java index 6d4e82b059..718a084f37 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SampleOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link SampleOperation}. @@ -31,14 +30,14 @@ public class SampleOperationUnitTests { private static final String SIZE = "size"; private static final String OP = "$sample"; - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1325 + @Test // DATAMONGO-1325 public void rejectsNegativeSample() { - new SampleOperation(-1L); + assertThatIllegalArgumentException().isThrownBy(() -> new SampleOperation(-1L)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1325 + @Test // DATAMONGO-1325 public void rejectsZeroSample() { - new SampleOperation(0L); + assertThatIllegalArgumentException().isThrownBy(() -> new SampleOperation(0L)); } @Test // DATAMONGO-1325 @@ -50,10 +49,10 @@ public void rendersSampleOperation() { Document sampleOperationDocument = sampleOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertNotNull(sampleOperationDocument.get(OP)); - assertThat(sampleOperationDocument.get(OP), is(instanceOf(Document.class))); + assertThat(sampleOperationDocument.get(OP)).isNotNull(); + assertThat(sampleOperationDocument.get(OP)).isInstanceOf(Document.class); Document sampleSizeDocument = sampleOperationDocument.get(OP, Document.class); - assertEquals(sampleSize, sampleSizeDocument.get(SIZE)); + assertThat(sampleSizeDocument.get(SIZE)).isEqualTo(sampleSize); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ScriptOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ScriptOperatorsUnitTests.java new file mode 100644 index 0000000000..60a8a6b013 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ScriptOperatorsUnitTests.java @@ -0,0 +1,96 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ScriptOperators.*; + +import java.util.Collections; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ScriptOperators}. + * + * @author Christoph Strobl + */ +class ScriptOperatorsUnitTests { + + private static final String FUNCTION_BODY = "function(name) { return hex_md5(name) == \"15b0a220baa16331e8d80e15367677ad\" }"; + private static final Document EMPTY_ARGS_FUNCTION_DOCUMENT = new Document("body", FUNCTION_BODY) + .append("args", Collections.emptyList()).append("lang", "js"); + + private static final String INIT_FUNCTION = "function() { return { count: 0, sum: 0 } }"; + private static final String ACC_FUNCTION = "function(state, numCopies) { return { count: state.count + 1, sum: state.sum + numCopies } }"; + private static final String MERGE_FUNCTION = "function(state1, state2) { return { count: state1.count + state2.count, sum: state1.sum + state2.sum } }"; + private static final String FINALIZE_FUNCTION = "function(state) { return (state.sum / state.count) }"; + + private static final Document $ACCUMULATOR = Document.parse("{" + // + " $accumulator:" + // + " {" + // + " init: '" + INIT_FUNCTION + "'," + // + " accumulate: '" + ACC_FUNCTION + "'," + // + " accumulateArgs: [\"$copies\"]," + // + " merge: '" + MERGE_FUNCTION + "'," + // + " finalize: '" + FINALIZE_FUNCTION + "'," + // + " lang: \"js\"" + // + " }" + // + " }" + // + " }"); + + @Test // DATAMONGO-2623 + void functionWithoutArgsShouldBeRenderedCorrectly() { + + assertThat(function(FUNCTION_BODY).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo($function(EMPTY_ARGS_FUNCTION_DOCUMENT)); + } + + @Test // DATAMONGO-2623 + void functionWithArgsShouldBeRenderedCorrectly() { + + assertThat(function(FUNCTION_BODY).args("$name").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + $function(new Document(EMPTY_ARGS_FUNCTION_DOCUMENT).append("args", Collections.singletonList("$name")))); + } + + @Test // DATAMONGO-2623 + void accumulatorWithStringInput() { + + Accumulator accumulator = accumulatorBuilder() // + .init(INIT_FUNCTION) // + .accumulate(ACC_FUNCTION).accumulateArgs("$copies") // + .merge(MERGE_FUNCTION) // + .finalize(FINALIZE_FUNCTION); + + assertThat(accumulator.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo($ACCUMULATOR); + } + + @Test // DATAMONGO-2623 + void accumulatorWithFunctionInput() { + + Accumulator accumulator = accumulatorBuilder() // + .init(function(INIT_FUNCTION)) // + .accumulate(function(ACC_FUNCTION).args("$copies")) // + .merge(MERGE_FUNCTION) // + .finalize(FINALIZE_FUNCTION); + + assertThat(accumulator.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo($ACCUMULATOR); + } + + static Document $function(Document source) { + return new Document("$function", source); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SelectionOperatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SelectionOperatorUnitTests.java new file mode 100644 index 0000000000..1737d35215 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SelectionOperatorUnitTests.java @@ -0,0 +1,180 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * @author Christoph Strobl + */ +class SelectionOperatorUnitTests { + + @Test // GH-4139 + void bottomRenderedCorrectly() { + + Document document = SelectionOperators.Bottom.bottom().output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse(""" + { + $bottom: + { + output: [ "$playerId", "$score" ], + sortBy: { "score": -1 } + } + } + """)); + } + + @Test // GH-4139 + void bottomMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.Bottom.bottom().output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $bottom: + { + output: [ "$player_id", "$s_cor_e" ], + sortBy: { "s_cor_e": -1 } + } + } + """)); + } + + @Test // GH-4139 + void bottomNRenderedCorrectly() { + + Document document = SelectionOperators.Bottom.bottom(3).output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse(""" + { + $bottomN: + { + n : 3, + output: [ "$playerId", "$score" ], + sortBy: { "score": -1 } + } + } + """)); + } + + @Test // GH-4139 + void topMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.Top.top().output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $top: + { + output: [ "$player_id", "$s_cor_e" ], + sortBy: { "s_cor_e": -1 } + } + } + """)); + } + + @Test // GH-4139 + void topNRenderedCorrectly() { + + Document document = SelectionOperators.Top.top(3).output(Fields.fields("playerId", "score")) + .sortBy(Sort.by(Direction.DESC, "score")).toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(document).isEqualTo(Document.parse(""" + { + $topN: + { + n : 3, + output: [ "$playerId", "$score" ], + sortBy: { "score": -1 } + } + } + """)); + } + + @Test // GH-4139 + void firstNMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.First.first(3).of("score").toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $firstN: + { + n: 3, + input: "$s_cor_e" + } + } + """)); + } + + @Test // GH-4139 + void lastNMapsFieldNamesCorrectly() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + RelaxedTypeBasedAggregationOperationContext aggregationContext = new RelaxedTypeBasedAggregationOperationContext( + Player.class, mappingContext, + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); + + Document document = SelectionOperators.Last.last(3).of("score").toDocument(aggregationContext); + + assertThat(document).isEqualTo(Document.parse(""" + { + $lastN: + { + n: 3, + input: "$s_cor_e" + } + } + """)); + } + + static class Player { + + @Field("player_id") String playerId; + + @Field("s_cor_e") Integer score; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java new file mode 100644 index 0000000000..d6f95216a5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java @@ -0,0 +1,168 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link SetOperation}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class SetOperationUnitTests { + + @Test // DATAMONGO-2331 + void raisesErrorOnNullField() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new SetOperation(null, "value")); + } + + @Test // DATAMONGO-2331 + void rendersFieldReferenceCorrectly() { + + assertThat(new SetOperation("name", "value").toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"name\":\"value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersMappedFieldReferenceCorrectly() { + + assertThat(new SetOperation("student", "value").toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$set\" : {\"student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersNestedMappedFieldReferenceCorrectly() { + + assertThat( + new SetOperation("scoresWithMappedField.student", "value").toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document.parse("{\"$set\" : {\"scoresWithMappedField.student_name\":\"value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersTargetValueFieldReferenceCorrectly() { + + assertThat(new SetOperation("name", Fields.field("value")).toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"name\":\"$value\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersMappedTargetValueFieldReferenceCorrectly() { + + assertThat( + new SetOperation("student", Fields.field("homework")).toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$set\" : {\"student_name\":\"$home_work\"}}")); + } + + @Test // DATAMONGO-2331 + void rendersNestedMappedTargetValueFieldReferenceCorrectly() { + + assertThat(new SetOperation("scoresWithMappedField.student", Fields.field("scoresWithMappedField.homework")) + .toPipelineStages(contextFor(ScoresWrapper.class))) + .containsExactly(Document + .parse("{\"$set\" : {\"scoresWithMappedField.student_name\":\"$scoresWithMappedField.home_work\"}}")); + } + + @Test // DATAMONGO-2363 + void appliesSpelExpressionCorrectly() { + + SetOperation operation = SetOperation.builder().set("totalHomework").withValueOfExpression("sum(homework) * [0]", + 2); + + assertThat(operation.toPipelineStages(contextFor(AddFieldsOperationUnitTests.ScoresWrapper.class))).contains( + Document.parse("{\"$set\" : {\"totalHomework\": { $multiply : [{ \"$sum\" : [\"$homework\"] }, 2] }}}")); + } + + @Test // DATAMONGO-2331 + void rendersTargetValueExpressionCorrectly() { + + assertThat(SetOperation.builder().set("totalHomework").toValueOf(ArithmeticOperators.valueOf("homework").sum()) + .toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"totalHomework\": { \"$sum\" : \"$homework\" }}}")); + } + + @Test // GH-4933 + void rendersTargetFieldReferenceCorrectly() { + + assertThat( + SetOperation.builder().set("totalHomework").toValueOf("homework").toPipelineStages(contextFor(Scores.class))) + .containsExactly(Document.parse("{\"$set\" : {\"totalHomework\": \"$homework\" }}")); + } + + @Test // GH-4933 + void rendersMappedTargetFieldReferenceCorrectly() { + + assertThat(SetOperation.builder().set("totalHomework").toValueOf("homework") + .toPipelineStages(contextFor(ScoresWithMappedField.class))) + .containsExactly(Document.parse("{\"$set\" : {\"totalHomework\": \"$home_work\" }}")); + } + + @Test // DATAMONGO-2331 + void exposesFieldsCorrectly() { + + ExposedFields fields = SetOperation.builder().set("totalHomework").toValue("A+") // + .and() // + .set("totalQuiz").toValue("B-") // + .getFields(); + + assertThat(fields.getField("totalHomework")).isNotNull(); + assertThat(fields.getField("totalQuiz")).isNotNull(); + assertThat(fields.getField("does-not-exist")).isNull(); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new RelaxedTypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class Scores { + + String student; + List homework; + } + + static class ScoresWithMappedField { + + @Field("student_name") String student; + @Field("home_work") List homework; + } + + static class ScoresWrapper { + + Scores scores; + ScoresWithMappedField scoresWithMappedField; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java new file mode 100644 index 0000000000..bc0edcea0b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java @@ -0,0 +1,201 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * Integration tests for {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") +class SetWindowFieldsOperationTests { + + @Template // + private static MongoTestTemplate mongoTemplate; + + @AfterEach + void afterEach() { + mongoTemplate.flush(CakeSale.class); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(162, 282, 427, 134, + 238, 378); + } + + @Test // GH-4745 + void exposesFieldsToNextStageCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation, + /* and now project on the field to see it can be referenced */ + Aggregation.project("cumulativeQuantityForState"))) + .all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(162, 282, 427, 134, + 238, 378); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationWithPartitionExpressionCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByExpression(Year.yearOf("date")) // resolves to $year: "$orderDate" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(134, 296, 104, 224, + 145, 285); + } + + void initCakeSales() { + + mongoTemplate.execute(CakeSale.class, collection -> { + + List source = Arrays.asList(Document.parse( + "{ _id: 0, type: \"chocolate\", orderDate: { $date : \"2020-05-18T14:10:30Z\" }, state: \"CA\", price: 13, quantity: 120 }"), + Document.parse( + "{ _id: 1, type: \"chocolate\", orderDate: { $date : \"2021-03-20T11:30:05Z\"}, state: \"WA\", price: 14, quantity: 140 }"), + Document.parse( + "{ _id: 2, type: \"vanilla\", orderDate: { $date : \"2021-01-11T06:31:15Z\"}, state: \"CA\", price: 12, quantity: 145 }"), + Document.parse( + "{ _id: 3, type: \"vanilla\", orderDate: { $date : \"2020-02-08T13:13:23Z\"}, state: \"WA\", price: 13, quantity: 104 }"), + Document.parse( + "{ _id: 4, type: \"strawberry\", orderDate: { $date : \"2019-05-18T16:09:01Z\"}, state: \"CA\", price: 41, quantity: 162 }"), + Document.parse( + "{ _id: 5, type: \"strawberry\", orderDate: { $date : \"2019-01-08T06:12:03Z\"}, state: \"WA\", price: 43, quantity: 134 }")); + + collection.insertMany(source); + return "OK"; + }); + } + + static class CakeSale { + + @Id Integer id; + + String state; + + @Field("orderDate") // + Date date; + + @Field("quantity") // + Integer qty; + + String type; + + public Integer getId() { + return this.id; + } + + public String getState() { + return this.state; + } + + public Date getDate() { + return this.date; + } + + public Integer getQty() { + return this.qty; + } + + public String getType() { + return this.type; + } + + public void setId(Integer id) { + this.id = id; + } + + public void setState(String state) { + this.state = state; + } + + public void setDate(Date date) { + this.date = date; + } + + public void setQty(Integer qty) { + this.qty = qty; + } + + public void setType(String type) { + this.type = type; + } + + public String toString() { + return "SetWindowFieldsOperationTests.CakeSale(id=" + this.getId() + ", state=" + this.getState() + ", date=" + + this.getDate() + ", qty=" + this.getQty() + ", type=" + this.getType() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java new file mode 100644 index 0000000000..b5f5f596e6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java @@ -0,0 +1,113 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ +class SetWindowFieldsOperationUnitTests { + + @Test // GH-3711 + void rendersTargetFieldNamesCorrectly() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { partitionBy: \"$state\", sortBy: { orderDate: 1 }, output: { cumulativeQuantityForState: { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } } } } }")); + } + + @Test // GH-3711 + void exposesTargetFieldNames() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + assertThat(setWindowFieldsOperation.getFields()).map(ExposedField::getName).containsExactly("f1", "f2"); + } + + @Test // GH-3711 + void rendersMuiltipleOutputFields() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { output: { f1 : { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } }, f2 : { $avg: \"$quantity\", window: { documents: [ -1, 0 ] } } } } }")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class CakeSale { + + String state; + + @Field("orderDate") Date date; + + @Field("quantity") Integer qty; + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java index cc17c85c49..73f78348fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SkipOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link SkipOperation}. @@ -30,9 +29,9 @@ public class SkipOperationUnitTests { static final String OP = "$skip"; - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNegativeSkip() { - new SkipOperation(-1L); + assertThatIllegalArgumentException().isThrownBy(() -> new SkipOperation(-1L)); } @Test @@ -41,6 +40,6 @@ public void rendersSkipOperation() { SkipOperation operation = new SkipOperation(10L); Document document = operation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(document.get(OP), is((Object) 10L)); + assertThat(document.get(OP)).isEqualTo((Object) 10L); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java index 3163c601b8..923778dea4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortByCountOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018-2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,7 @@ import java.util.Arrays; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link SortByCountOperation}. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java index 19a449493b..19f80f2d08 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SortOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,12 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; @@ -39,8 +39,8 @@ public void createsDocumentForAscendingSortCorrectly() { Document result = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document sortValue = getAsDocument(result, "$sort"); - assertThat(sortValue, is(notNullValue())); - assertThat(sortValue.get("foobar"), is((Object) 1)); + assertThat(sortValue).isNotNull(); + assertThat(sortValue.get("foobar")).isEqualTo((Object) 1); } @Test @@ -50,7 +50,7 @@ public void createsDocumentForDescendingSortCorrectly() { Document result = operation.toDocument(Aggregation.DEFAULT_CONTEXT); Document sortValue = getAsDocument(result, "$sort"); - assertThat(sortValue, is(notNullValue())); - assertThat(sortValue.get("foobar"), is((Object) (0 - 1))); + assertThat(sortValue).isNotNull(); + assertThat(sortValue.get("foobar")).isEqualTo((Object) (0 - 1)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java index 051e60451a..86f4ff03cf 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,33 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mapping.MappingException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link SpelExpressionTransformer}. * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class SpelExpressionTransformerIntegrationTests { - @Autowired MongoDbFactory mongoDbFactory; - - @Rule public ExpectedException exception = ExpectedException.none(); + @Autowired MongoDatabaseFactory mongoDbFactory; SpelExpressionTransformer transformer; DbRefResolver dbRefResolver; @@ -62,19 +58,19 @@ public void shouldConvertCompoundExpressionToPropertyPath() { MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); TypeBasedAggregationOperationContext ctxt = new TypeBasedAggregationOperationContext(Data.class, new MongoMappingContext(), new QueryMapper(converter)); - assertThat(transformer.transform("item.primitiveIntValue", ctxt, new Object[0]).toString(), - is("$item.primitiveIntValue")); + assertThat(transformer.transform("item.primitiveIntValue", ctxt, new Object[0]).toString()) + .isEqualTo("$item.primitiveIntValue"); } @Test // DATAMONGO-774 public void shouldThrowExceptionIfNestedPropertyCannotBeFound() { - exception.expect(MappingException.class); - exception.expectMessage("value2"); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); TypeBasedAggregationOperationContext ctxt = new TypeBasedAggregationOperationContext(Data.class, new MongoMappingContext(), new QueryMapper(converter)); - assertThat(transformer.transform("item.value2", ctxt, new Object[0]).toString(), is("$item.value2")); + + assertThatExceptionOfType(InvalidPersistentPropertyPath.class).isThrownBy(() -> { + transformer.transform("item.value2", ctxt, new Object[0]).toString(); + }); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index f08619e4e9..abe2fd5605 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,15 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import org.bson.Document; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.Person; /** @@ -32,15 +32,17 @@ * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Divya Srivastava + * @author Julia Lee */ public class SpelExpressionTransformerUnitTests { - SpelExpressionTransformer transformer = new SpelExpressionTransformer(); + private SpelExpressionTransformer transformer = new SpelExpressionTransformer(); - Data data; + private Data data; - @Before - public void setup() { + @BeforeEach + void beforeEach() { this.data = new Data(); this.data.primitiveLongValue = 42; @@ -51,675 +53,1237 @@ public void setup() { } @Test // DATAMONGO-774 - public void shouldRenderConstantExpression() { + void shouldRenderConstantExpression() { - assertThat(transform("1"), is((Object) "1")); - assertThat(transform("-1"), is((Object) "-1")); - assertThat(transform("1.0"), is((Object) "1.0")); - assertThat(transform("-1.0"), is((Object) "-1.0")); - assertThat(transform("null"), is(nullValue())); + assertThat(transformValue("1")).isEqualTo("1"); + assertThat(transformValue("-1")).isEqualTo("-1"); + assertThat(transformValue("1.0")).isEqualTo("1.0"); + assertThat(transformValue("-1.0")).isEqualTo("-1.0"); + assertThat(transformValue("null")).isNull(); } @Test // DATAMONGO-774 - public void shouldSupportKnownOperands() { + void shouldSupportKnownOperands() { - assertThat(transform("a + b"), is((Object) Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}"))); - assertThat(transform("a - b"), is((Object) Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"))); - assertThat(transform("a * b"), is((Object) Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"))); - assertThat(transform("a / b"), is((Object) Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}"))); - assertThat(transform("a % b"), is((Object) Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}"))); + assertThat(transform("a + b")).isEqualTo("{ \"$add\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a - b")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a * b")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a / b")).isEqualTo("{ \"$divide\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a % b")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-774 - public void shouldThrowExceptionOnUnknownOperand() { - transform("a++"); + @Test // DATAMONGO-774 + void shouldThrowExceptionOnUnknownOperand() { + assertThatIllegalArgumentException().isThrownBy(() -> transform("a++")); } @Test // DATAMONGO-774 - public void shouldRenderSumExpression() { - assertThat(transform("a + 1"), is((Object) Document.parse("{ \"$add\" : [ \"$a\" , 1]}"))); + void shouldRenderSumExpression() { + assertThat(transform("a + 1")).isEqualTo("{ \"$add\" : [ \"$a\" , 1]}"); } @Test // DATAMONGO-774 - public void shouldRenderFormula() { + void shouldRenderFormula() { - assertThat(transform("(netPrice + surCharge) * taxrate + 42"), is((Object) Document.parse( - "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"))); + assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } @Test // DATAMONGO-774 - public void shouldRenderFormulaInCurlyBrackets() { + void shouldRenderFormulaInCurlyBrackets() { - assertThat(transform("{(netPrice + surCharge) * taxrate + 42}"), is((Object) Document.parse( - "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"))); + assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } @Test // DATAMONGO-774 - public void shouldRenderFieldReference() { + void shouldRenderFieldReference() { - assertThat(transform("foo"), is((Object) "$foo")); - assertThat(transform("$foo"), is((Object) "$foo")); + assertThat(transformValue("foo")).isEqualTo("$foo"); + assertThat(transformValue("$foo")).isEqualTo("$foo"); } @Test // DATAMONGO-774 - public void shouldRenderNestedFieldReference() { + void shouldRenderNestedFieldReference() { - assertThat(transform("foo.bar"), is((Object) "$foo.bar")); - assertThat(transform("$foo.bar"), is((Object) "$foo.bar")); + assertThat(transformValue("foo.bar")).isEqualTo("$foo.bar"); + assertThat(transformValue("$foo.bar")).isEqualTo("$foo.bar"); } @Test // DATAMONGO-774 - @Ignore - public void shouldRenderNestedIndexedFieldReference() { + @Disabled + void shouldRenderNestedIndexedFieldReference() { // TODO add support for rendering nested indexed field references - assertThat(transform("foo[3].bar"), is((Object) "$foo[3].bar")); + assertThat(transformValue("foo[3].bar")).isEqualTo("$foo[3].bar"); } @Test // DATAMONGO-774 - public void shouldRenderConsecutiveOperation() { - assertThat(transform("1 + 1 + 1"), is((Object) Document.parse("{ \"$add\" : [ 1 , 1 , 1]}"))); + void shouldRenderConsecutiveOperation() { + assertThat(transform("1 + 1 + 1")).isEqualTo("{ \"$add\" : [ 1 , 1 , 1]}"); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression0() { + void shouldRenderComplexExpression0() { - assertThat(transform("-(1 + q)"), - is((Object) Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}"))); + assertThat(transform("-(1 + q)")) + .isEqualTo("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}"); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression1() { + void shouldRenderComplexExpression1() { - assertThat(transform("1 + (q + 1) / (q - 1)"), is((Object) Document.parse( - "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}"))); + assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo( + "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}"); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression2() { + void shouldRenderComplexExpression2() { - assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)"), is((Object) Document.parse( - "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}"))); + assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo( + "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}"); } @Test // DATAMONGO-774 - public void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { + void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { - assertThat(transform("-4 + 1"), is((Object) Document.parse("{ \"$add\" : [ -4 , 1]}"))); - assertThat(transform("1 + -4"), is((Object) Document.parse("{ \"$add\" : [ 1 , -4]}"))); + assertThat(transform("-4 + 1")).isEqualTo("{ \"$add\" : [ -4 , 1]}"); + assertThat(transform("1 + -4")).isEqualTo("{ \"$add\" : [ 1 , -4]}"); } @Test // DATAMONGO-774 - public void shouldRenderConsecutiveOperationsInComplexExpression() { + void shouldRenderConsecutiveOperationsInComplexExpression() { - assertThat(transform("1 + 1 + (1 + 1 + 1) / q"), is( - (Object) Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}"))); + assertThat(transform("1 + 1 + (1 + 1 + 1) / q")) + .isEqualTo("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}"); } @Test // DATAMONGO-774 - public void shouldRenderParameterExpressionResults() { - assertThat(transform("[0] + [1] + [2]", 1, 2, 3), is((Object) Document.parse("{ \"$add\" : [ 1 , 2 , 3]}"))); + void shouldRenderParameterExpressionResults() { + assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo("{ \"$add\" : [ 1 , 2 , 3]}"); } @Test // DATAMONGO-774 - @Ignore("TODO: mongo3 renders this a bit strange") - public void shouldRenderNestedParameterExpressionResults() { + void shouldRenderNestedParameterExpressionResults() { assertThat( ((Document) transform("[0].primitiveLongValue + [0].primitiveDoubleValue + [0].doubleValue.longValue()", data)) - .toJson(), - is(Document.parse("{ \"$add\" : [ 42 , 1.2345 , 23]}").toJson())); + .toJson()) + .isEqualTo(Document + .parse("{ \"$add\" : [ { $numberLong : \"42\"} , 1.2345 , { $numberLong : \"23\" } ]}").toJson()); } @Test // DATAMONGO-774 - @Ignore("TODO: mongo3 renders this a bit strange") - public void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { + void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { + + Document target = ((Document) transform( + "((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", data)); assertThat( ((Document) transform("((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", - data)).toJson(), - is(new Document("$multiply", Arrays.asList(new Document("$add", Arrays.asList(1, 42L, 1.2345D, 23L)))) - .toJson())); + data))) + .isEqualTo(new Document("$multiply", + Arrays. asList(new Document("$add", Arrays. asList(1, 42L, 1.2345D)), 23L))); } @Test // DATAMONGO-840 - public void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { + void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); - assertThat(transform("[0].age + a.c", person), is((Object) Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }"))); + assertThat(transform("[0].age + a.c", person)).isEqualTo("{ \"$add\" : [ 10 , \"$a.c\"] }"); } @Test // DATAMONGO-840 - public void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { + void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { - assertThat(transform("a.b + a.c"), is((Object) Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}"))); + assertThat(transform("a.b + a.c")).isEqualTo("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeAnd() { - assertThat(transform("and(a, b)"), is((Object) Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeAnd() { + assertThat(transform("and(a, b)")).isEqualTo("{ \"$and\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeOr() { - assertThat(transform("or(a, b)"), is((Object) Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeOr() { + assertThat(transform("or(a, b)")).isEqualTo("{ \"$or\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeNot() { - assertThat(transform("not(a)"), is((Object) Document.parse("{ \"$not\" : [ \"$a\"]}"))); + void shouldRenderMethodReferenceNodeNot() { + assertThat(transform("not(a)")).isEqualTo("{ \"$not\" : [ \"$a\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEquals() { - assertThat(transform("setEquals(a, b)"), is((Object) Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeSetEquals() { + assertThat(transform("setEquals(a, b)")).isEqualTo("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEqualsForArrays() { - assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceNodeSetEqualsForArrays() { + assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})")) + .isEqualTo("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { - assertThat(transform("setEquals(a, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { + assertThat(transform("setEquals(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetIntersection() { - assertThat(transform("setIntersection(a, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceSetIntersection() { + assertThat(transform("setIntersection(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetUnion() { - assertThat(transform("setUnion(a, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceSetUnion() { + assertThat(transform("setUnion(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSeDifference() { - assertThat(transform("setDifference(a, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceSeDifference() { + assertThat(transform("setDifference(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetIsSubset() { - assertThat(transform("setIsSubset(a, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceSetIsSubset() { + assertThat(transform("setIsSubset(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAnyElementTrue() { - assertThat(transform("anyElementTrue(a)"), is((Object) Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}"))); + void shouldRenderMethodReferenceAnyElementTrue() { + assertThat(transform("anyElementTrue(a)")).isEqualTo("{ \"$anyElementTrue\" : [ \"$a\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAllElementsTrue() { - assertThat(transform("allElementsTrue(a, new int[]{4,5,6})"), - is((Object) Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}"))); + void shouldRenderMethodReferenceAllElementsTrue() { + assertThat(transform("allElementsTrue(a, new int[]{4,5,6})")) + .isEqualTo("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCmp() { - assertThat(transform("cmp(a, 250)"), is((Object) Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceCmp() { + assertThat(transform("cmp(a, 250)")).isEqualTo("{ \"$cmp\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceEq() { - assertThat(transform("eq(a, 250)"), is((Object) Document.parse("{ \"$eq\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceEq() { + assertThat(transform("eq(a, 250)")).isEqualTo("{ \"$eq\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceGt() { - assertThat(transform("gt(a, 250)"), is((Object) Document.parse("{ \"$gt\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceGt() { + assertThat(transform("gt(a, 250)")).isEqualTo("{ \"$gt\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceGte() { - assertThat(transform("gte(a, 250)"), is((Object) Document.parse("{ \"$gte\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceGte() { + assertThat(transform("gte(a, 250)")).isEqualTo("{ \"$gte\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLt() { - assertThat(transform("lt(a, 250)"), is((Object) Document.parse("{ \"$lt\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceLt() { + assertThat(transform("lt(a, 250)")).isEqualTo("{ \"$lt\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLte() { - assertThat(transform("lte(a, 250)"), is((Object) Document.parse("{ \"$lte\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceLte() { + assertThat(transform("lte(a, 250)")).isEqualTo("{ \"$lte\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNe() { - assertThat(transform("ne(a, 250)"), is((Object) Document.parse("{ \"$ne\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceNe() { + assertThat(transform("ne(a, 250)")).isEqualTo("{ \"$ne\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAbs() { - assertThat(transform("abs(1)"), is((Object) Document.parse("{ \"$abs\" : 1}"))); + void shouldRenderMethodReferenceAbs() { + assertThat(transform("abs(1)")).isEqualTo("{ \"$abs\" : 1}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAdd() { - assertThat(transform("add(a, 250)"), is((Object) Document.parse("{ \"$add\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceAdd() { + assertThat(transform("add(a, 250)")).isEqualTo("{ \"$add\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCeil() { - assertThat(transform("ceil(7.8)"), is((Object) Document.parse("{ \"$ceil\" : 7.8}"))); + void shouldRenderMethodReferenceCeil() { + assertThat(transform("ceil(7.8)")).isEqualTo("{ \"$ceil\" : 7.8}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDivide() { - assertThat(transform("divide(a, 250)"), is((Object) Document.parse("{ \"$divide\" : [ \"$a\" , 250]}"))); + void shouldRenderMethodReferenceDivide() { + assertThat(transform("divide(a, 250)")).isEqualTo("{ \"$divide\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceExp() { - assertThat(transform("exp(2)"), is((Object) Document.parse("{ \"$exp\" : 2}"))); + void shouldRenderMethodReferenceExp() { + assertThat(transform("exp(2)")).isEqualTo("{ \"$exp\" : 2}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceFloor() { - assertThat(transform("floor(2)"), is((Object) Document.parse("{ \"$floor\" : 2}"))); + void shouldRenderMethodReferenceFloor() { + assertThat(transform("floor(2)")).isEqualTo("{ \"$floor\" : 2}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLn() { - assertThat(transform("ln(2)"), is((Object) Document.parse("{ \"$ln\" : 2}"))); + void shouldRenderMethodReferenceLn() { + assertThat(transform("ln(2)")).isEqualTo("{ \"$ln\" : 2}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLog() { - assertThat(transform("log(100, 10)"), is((Object) Document.parse("{ \"$log\" : [ 100 , 10]}"))); + void shouldRenderMethodReferenceLog() { + assertThat(transform("log(100, 10)")).isEqualTo("{ \"$log\" : [ 100 , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLog10() { - assertThat(transform("log10(100)"), is((Object) Document.parse("{ \"$log10\" : 100}"))); + void shouldRenderMethodReferenceLog10() { + assertThat(transform("log10(100)")).isEqualTo("{ \"$log10\" : 100}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMod() { - assertThat(transform("mod(a, b)"), is((Object) Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeMod() { + assertThat(transform("mod(a, b)")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMultiply() { - assertThat(transform("multiply(a, b)"), is((Object) Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeMultiply() { + assertThat(transform("multiply(a, b)")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodePow() { - assertThat(transform("pow(a, 2)"), is((Object) Document.parse("{ \"$pow\" : [ \"$a\" , 2]}"))); + void shouldRenderMethodReferenceNodePow() { + assertThat(transform("pow(a, 2)")).isEqualTo("{ \"$pow\" : [ \"$a\" , 2]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSqrt() { - assertThat(transform("sqrt(2)"), is((Object) Document.parse("{ \"$sqrt\" : 2}"))); + void shouldRenderMethodReferenceSqrt() { + assertThat(transform("sqrt(2)")).isEqualTo("{ \"$sqrt\" : 2}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSubtract() { - assertThat(transform("subtract(a, b)"), is((Object) Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeSubtract() { + assertThat(transform("subtract(a, b)")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceTrunc() { - assertThat(transform("trunc(2.1)"), is((Object) Document.parse("{ \"$trunc\" : 2.1}"))); + void shouldRenderMethodReferenceTrunc() { + assertThat(transform("trunc(2.1)")).isEqualTo("{ \"$trunc\" : 2.1}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeConcat() { - assertThat(transform("concat(a, b, 'c')"), - is((Object) Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}"))); + void shouldRenderMethodReferenceNodeConcat() { + assertThat(transform("concat(a, b, 'c')")).isEqualTo("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSubstrc() { - assertThat(transform("substr(a, 0, 1)"), is((Object) Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}"))); + void shouldRenderMethodReferenceNodeSubstrc() { + assertThat(transform("substr(a, 0, 1)")).isEqualTo("{ \"$substr\" : [ \"$a\" , 0 , 1]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceToLower() { - assertThat(transform("toLower(a)"), is((Object) Document.parse("{ \"$toLower\" : \"$a\"}"))); + void shouldRenderMethodReferenceToLower() { + assertThat(transform("toLower(a)")).isEqualTo("{ \"$toLower\" : \"$a\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceToUpper() { - assertThat(transform("toUpper(a)"), is((Object) Document.parse("{ \"$toUpper\" : \"$a\"}"))); + void shouldRenderMethodReferenceToUpper() { + assertThat(transform("toUpper(a)")).isEqualTo("{ \"$toUpper\" : \"$a\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStrCaseCmp() { - assertThat(transform("strcasecmp(a, b)"), is((Object) Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeStrCaseCmp() { + assertThat(transform("strcasecmp(a, b)")).isEqualTo("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMeta() { - assertThat(transform("meta('textScore')"), is((Object) Document.parse("{ \"$meta\" : \"textScore\"}"))); + void shouldRenderMethodReferenceMeta() { + assertThat(transform("meta('textScore')")).isEqualTo("{ \"$meta\" : \"textScore\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeArrayElemAt() { - assertThat(transform("arrayElemAt(a, 10)"), is((Object) Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}"))); + void shouldRenderMethodReferenceNodeArrayElemAt() { + assertThat(transform("arrayElemAt(a, 10)")).isEqualTo("{ \"$arrayElemAt\" : [ \"$a\" , 10]}"); + } + + @Test // GH-3694 + void shouldRenderMethodReferenceNodeFirst() { + assertThat(transform("first(a)")).isEqualTo("{ \"$first\" : \"$a\" }"); + } + + @Test // GH-3694 + void shouldRenderMethodReferenceNodeLast() { + assertThat(transform("last(a)")).isEqualTo("{ \"$last\" : \"$a\" }"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeConcatArrays() { - assertThat(transform("concatArrays(a, b, c)"), - is((Object) Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}"))); + void shouldRenderMethodReferenceNodeConcatArrays() { + assertThat(transform("concatArrays(a, b, c)")) + .isEqualTo("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeFilter() { - assertThat(transform("filter(a, 'num', '$$num' > 10)"), is((Object) Document.parse( - "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}"))); + void shouldRenderMethodReferenceNodeFilter() { + assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo( + "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceIsArray() { - assertThat(transform("isArray(a)"), is((Object) Document.parse("{ \"$isArray\" : \"$a\"}"))); + void shouldRenderMethodReferenceIsArray() { + assertThat(transform("isArray(a)")).isEqualTo("{ \"$isArray\" : \"$a\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceIsSize() { - assertThat(transform("size(a)"), is((Object) Document.parse("{ \"$size\" : \"$a\"}"))); + void shouldRenderMethodReferenceIsSize() { + assertThat(transform("size(a)")).isEqualTo("{ \"$size\" : \"$a\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSlice() { - assertThat(transform("slice(a, 10)"), is((Object) Document.parse("{ \"$slice\" : [ \"$a\" , 10]}"))); + void shouldRenderMethodReferenceNodeSlice() { + assertThat(transform("slice(a, 10)")).isEqualTo("{ \"$slice\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMap() { - assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)"), is((Object) Document.parse( - "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}"))); + void shouldRenderMethodReferenceNodeMap() { + assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo( + "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeLet() { - assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))"), is((Object) Document.parse( - "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}"))); + void shouldRenderMethodReferenceNodeLet() { + assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo( + "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLiteral() { - assertThat(transform("literal($1)"), is((Object) Document.parse("{ \"$literal\" : \"$1\"}"))); + void shouldRenderMethodReferenceLiteral() { + assertThat(transform("literal($1)")).isEqualTo("{ \"$literal\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfYear() { - assertThat(transform("dayOfYear($1)"), is((Object) Document.parse("{ \"$dayOfYear\" : \"$1\"}"))); + void shouldRenderMethodReferenceDayOfYear() { + assertThat(transform("dayOfYear($1)")).isEqualTo("{ \"$dayOfYear\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfMonth() { - assertThat(transform("dayOfMonth($1)"), is((Object) Document.parse("{ \"$dayOfMonth\" : \"$1\"}"))); + void shouldRenderMethodReferenceDayOfMonth() { + assertThat(transform("dayOfMonth($1)")).isEqualTo("{ \"$dayOfMonth\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfWeek() { - assertThat(transform("dayOfWeek($1)"), is((Object) Document.parse("{ \"$dayOfWeek\" : \"$1\"}"))); + void shouldRenderMethodReferenceDayOfWeek() { + assertThat(transform("dayOfWeek($1)")).isEqualTo("{ \"$dayOfWeek\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceYear() { - assertThat(transform("year($1)"), is((Object) Document.parse("{ \"$year\" : \"$1\"}"))); + void shouldRenderMethodReferenceYear() { + assertThat(transform("year($1)")).isEqualTo("{ \"$year\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMonth() { - assertThat(transform("month($1)"), is((Object) Document.parse("{ \"$month\" : \"$1\"}"))); + void shouldRenderMethodReferenceMonth() { + assertThat(transform("month($1)")).isEqualTo("{ \"$month\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceWeek() { - assertThat(transform("week($1)"), is((Object) Document.parse("{ \"$week\" : \"$1\"}"))); + void shouldRenderMethodReferenceWeek() { + assertThat(transform("week($1)")).isEqualTo("{ \"$week\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceHour() { - assertThat(transform("hour($1)"), is((Object) Document.parse("{ \"$hour\" : \"$1\"}"))); + void shouldRenderMethodReferenceHour() { + assertThat(transform("hour($1)")).isEqualTo("{ \"$hour\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMinute() { - assertThat(transform("minute($1)"), is((Object) Document.parse("{ \"$minute\" : \"$1\"}"))); + void shouldRenderMethodReferenceMinute() { + assertThat(transform("minute($1)")).isEqualTo("{ \"$minute\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSecond() { - assertThat(transform("second($1)"), is((Object) Document.parse("{ \"$second\" : \"$1\"}"))); + void shouldRenderMethodReferenceSecond() { + assertThat(transform("second($1)")).isEqualTo("{ \"$second\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMillisecond() { - assertThat(transform("millisecond($1)"), is((Object) Document.parse("{ \"$millisecond\" : \"$1\"}"))); + void shouldRenderMethodReferenceMillisecond() { + assertThat(transform("millisecond($1)")).isEqualTo("{ \"$millisecond\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDateToString() { - assertThat(transform("dateToString('%Y-%m-%d', $date)"), - is((Object) Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}"))); + void shouldRenderMethodReferenceDateToString() { + assertThat(transform("dateToString('%Y-%m-%d', $date)")) + .isEqualTo("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCond() { - assertThat(transform("cond(qty > 250, 30, 20)"), is((Object) Document - .parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}"))); + void shouldRenderMethodReferenceCond() { + assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( + "{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeIfNull() { - assertThat(transform("ifNull(a, 10)"), is((Object) Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}"))); + void shouldRenderMethodReferenceNodeIfNull() { + assertThat(transform("ifNull(a, 10)")).isEqualTo("{ \"$ifNull\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSum() { - assertThat(transform("sum(a, b)"), is((Object) Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeSum() { + assertThat(transform("sum(a, b)")).isEqualTo("{ \"$sum\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeAvg() { - assertThat(transform("avg(a, b)"), is((Object) Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeAvg() { + assertThat(transform("avg(a, b)")).isEqualTo("{ \"$avg\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceFirst() { - assertThat(transform("first($1)"), is((Object) Document.parse("{ \"$first\" : \"$1\"}"))); + void shouldRenderMethodReferenceFirst() { + assertThat(transform("first($1)")).isEqualTo("{ \"$first\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLast() { - assertThat(transform("last($1)"), is((Object) Document.parse("{ \"$last\" : \"$1\"}"))); + void shouldRenderMethodReferenceLast() { + assertThat(transform("last($1)")).isEqualTo("{ \"$last\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMax() { - assertThat(transform("max(a, b)"), is((Object) Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeMax() { + assertThat(transform("max(a, b)")).isEqualTo("{ \"$max\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMin() { - assertThat(transform("min(a, b)"), is((Object) Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}"))); + void shouldRenderMethodReferenceNodeMin() { + assertThat(transform("min(a, b)")).isEqualTo("{ \"$min\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodePush() { - assertThat(transform("push({'item':'$item', 'quantity':'$qty'})"), - is((Object) Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}"))); + void shouldRenderMethodReferenceNodePush() { + assertThat(transform("push({'item':'$item', 'quantity':'$qty'})")) + .isEqualTo("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAddToSet() { - assertThat(transform("addToSet($1)"), is((Object) Document.parse("{ \"$addToSet\" : \"$1\"}"))); + void shouldRenderMethodReferenceAddToSet() { + assertThat(transform("addToSet($1)")).isEqualTo("{ \"$addToSet\" : \"$1\"}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStdDevPop() { - assertThat(transform("stdDevPop(scores.score)"), - is((Object) Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}"))); + void shouldRenderMethodReferenceNodeStdDevPop() { + assertThat(transform("stdDevPop(scores.score)")) + .isEqualTo("{ \"$stdDevPop\" : [ \"$scores.score\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStdDevSamp() { - assertThat(transform("stdDevSamp(age)"), is((Object) Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}"))); + void shouldRenderMethodReferenceNodeStdDevSamp() { + assertThat(transform("stdDevSamp(age)")).isEqualTo("{ \"$stdDevSamp\" : [ \"$age\"]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeEq() { - assertThat(transform("foo == 10"), is((Object) Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}"))); + void shouldRenderOperationNodeEq() { + assertThat(transform("foo == 10")).isEqualTo("{ \"$eq\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeNe() { - assertThat(transform("foo != 10"), is((Object) Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}"))); + void shouldRenderOperationNodeNe() { + assertThat(transform("foo != 10")).isEqualTo("{ \"$ne\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeGt() { - assertThat(transform("foo > 10"), is((Object) Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}"))); + void shouldRenderOperationNodeGt() { + assertThat(transform("foo > 10")).isEqualTo("{ \"$gt\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeGte() { - assertThat(transform("foo >= 10"), is((Object) Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}"))); + void shouldRenderOperationNodeGte() { + assertThat(transform("foo >= 10")).isEqualTo("{ \"$gte\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeLt() { - assertThat(transform("foo < 10"), is((Object) Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}"))); + void shouldRenderOperationNodeLt() { + assertThat(transform("foo < 10")).isEqualTo("{ \"$lt\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeLte() { - assertThat(transform("foo <= 10"), is((Object) Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}"))); + void shouldRenderOperationNodeLte() { + assertThat(transform("foo <= 10")).isEqualTo("{ \"$lte\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodePow() { - assertThat(transform("foo^2"), is((Object) Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}"))); + void shouldRenderOperationNodePow() { + assertThat(transform("foo^2")).isEqualTo("{ \"$pow\" : [ \"$foo\" , 2]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeOr() { - assertThat(transform("true || false"), is((Object) Document.parse("{ \"$or\" : [ true , false]}"))); + void shouldRenderOperationNodeOr() { + assertThat(transform("true || false")).isEqualTo("{ \"$or\" : [ true , false]}"); } @Test // DATAMONGO-1530 - public void shouldRenderComplexOperationNodeOr() { - assertThat(transform("1+2 || concat(a, b) || true"), is((Object) Document - .parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"))); + void shouldRenderComplexOperationNodeOr() { + assertThat(transform("1+2 || concat(a, b) || true")).isEqualTo( + "{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeAnd() { - assertThat(transform("true && false"), is((Object) Document.parse("{ \"$and\" : [ true , false]}"))); + void shouldRenderOperationNodeAnd() { + assertThat(transform("true && false")).isEqualTo("{ \"$and\" : [ true , false]}"); } @Test // DATAMONGO-1530 - public void shouldRenderComplexOperationNodeAnd() { - assertThat(transform("1+2 && concat(a, b) && true"), is((Object) Document - .parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"))); + void shouldRenderComplexOperationNodeAnd() { + assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( + "{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); } @Test // DATAMONGO-1530 - public void shouldRenderNotCorrectly() { - assertThat(transform("!true"), is((Object) Document.parse("{ \"$not\" : [ true]}"))); + void shouldRenderNotCorrectly() { + assertThat(transform("!true")).isEqualTo("{ \"$not\" : [ true]}"); } @Test // DATAMONGO-1530 - public void shouldRenderComplexNotCorrectly() { - assertThat(transform("!(foo > 10)"), is((Object) Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}"))); + void shouldRenderComplexNotCorrectly() { + assertThat(transform("!(foo > 10)")).isEqualTo("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceIndexOfBytes() { - assertThat(transform("indexOfBytes(item, 'foo')"), - is(Document.parse("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}"))); + void shouldRenderMethodReferenceIndexOfBytes() { + assertThat(transform("indexOfBytes(item, 'foo')")) + .isEqualTo("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceIndexOfCP() { - assertThat(transform("indexOfCP(item, 'foo')"), is(Document.parse("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}"))); + void shouldRenderMethodReferenceIndexOfCP() { + assertThat(transform("indexOfCP(item, 'foo')")) + .isEqualTo("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceSplit() { - assertThat(transform("split(item, ',')"), is(Document.parse("{ \"$split\" : [ \"$item\" , \",\"]}"))); + void shouldRenderMethodReferenceSplit() { + assertThat(transform("split(item, ',')")).isEqualTo("{ \"$split\" : [ \"$item\" , \",\"]}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceStrLenBytes() { - assertThat(transform("strLenBytes(item)"), is(Document.parse("{ \"$strLenBytes\" : \"$item\"}"))); + void shouldRenderMethodReferenceStrLenBytes() { + assertThat(transform("strLenBytes(item)")).isEqualTo("{ \"$strLenBytes\" : \"$item\"}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceStrLenCP() { - assertThat(transform("strLenCP(item)"), is(Document.parse("{ \"$strLenCP\" : \"$item\"}"))); + void shouldRenderMethodReferenceStrLenCP() { + assertThat(transform("strLenCP(item)")).isEqualTo("{ \"$strLenCP\" : \"$item\"}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodSubstrCP() { - assertThat(transform("substrCP(item, 0, 5)"), is(Document.parse("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}"))); + void shouldRenderMethodSubstrCP() { + assertThat(transform("substrCP(item, 0, 5)")).isEqualTo("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceReverseArray() { - assertThat(transform("reverseArray(array)"), is(Document.parse("{ \"$reverseArray\" : \"$array\"}"))); + void shouldRenderMethodReferenceReverseArray() { + assertThat(transform("reverseArray(array)")).isEqualTo("{ \"$reverseArray\" : \"$array\"}"); } @Test // DATAMONGO-1548 - @Ignore("Document API cannot render String[]") - public void shouldRenderMethodReferenceReduce() { - assertThat(transform("reduce(field, '', {'$concat':new String[]{'$$value','$$this'}})"), is(Document.parse( - "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}"))); + void shouldRenderMethodReferenceReduce() { + assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo( + "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceZip() { - assertThat(transform("zip(new String[]{'$array1', '$array2'})"), - is(Document.parse("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}"))); + void shouldRenderMethodReferenceZip() { + assertThat(transform("zip(new String[]{'$array1', '$array2'})")) + .isEqualTo("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceZipWithOptionalArgs() { - assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})"), is(Document.parse( - "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}"))); + void shouldRenderMethodReferenceZipWithOptionalArgs() { + assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo( + "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodIn() { - assertThat(transform("in('item', array)"), is(Document.parse("{ \"$in\" : [ \"item\" , \"$array\"]}"))); + void shouldRenderMethodIn() { + assertThat(transform("in('item', array)")).isEqualTo("{ \"$in\" : [ \"item\" , \"$array\"]}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoDayOfWeek() { - assertThat(transform("isoDayOfWeek(date)"), is(Document.parse("{ \"$isoDayOfWeek\" : \"$date\"}"))); + void shouldRenderMethodRefereneIsoDayOfWeek() { + assertThat(transform("isoDayOfWeek(date)")).isEqualTo("{ \"$isoDayOfWeek\" : \"$date\"}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoWeek() { - assertThat(transform("isoWeek(date)"), is(Document.parse("{ \"$isoWeek\" : \"$date\"}"))); + void shouldRenderMethodRefereneIsoWeek() { + assertThat(transform("isoWeek(date)")).isEqualTo("{ \"$isoWeek\" : \"$date\"}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoWeekYear() { - assertThat(transform("isoWeekYear(date)"), is(Document.parse("{ \"$isoWeekYear\" : \"$date\"}"))); + void shouldRenderMethodRefereneIsoWeekYear() { + assertThat(transform("isoWeekYear(date)")).isEqualTo("{ \"$isoWeekYear\" : \"$date\"}"); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneType() { - assertThat(transform("type(a)"), is(Document.parse("{ \"$type\" : \"$a\"}"))); + void shouldRenderMethodRefereneType() { + assertThat(transform("type(a)")).isEqualTo("{ \"$type\" : \"$a\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderArrayToObjectWithFieldReference() { + assertThat(transform("arrayToObject(field)")).isEqualTo("{ \"$arrayToObject\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderArrayToObjectWithArray() { + + assertThat(transform("arrayToObject(new String[]{'key', 'value'})")) + .isEqualTo("{ \"$arrayToObject\" : [\"key\", \"value\"]}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderObjectToArrayWithFieldReference() { + assertThat(transform("objectToArray(field)")).isEqualTo("{ \"$objectToArray\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderMergeObjects() { + + assertThat(transform("mergeObjects(field1, $$ROOT)")) + .isEqualTo("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderTrimWithoutChars() { + assertThat(transform("trim(field)")).isEqualTo("{ \"$trim\" : {\"input\" : \"$field\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderTrimWithChars() { + + assertThat(transform("trim(field, 'ie')")) + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderTrimWithCharsFromFieldReference() { + + assertThat(transform("trim(field1, field2)")) + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderLtrimWithoutChars() { + assertThat(transform("ltrim(field)")).isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderLtrimWithChars() { + + assertThat(transform("ltrim(field, 'ie')")) + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderLtrimWithCharsFromFieldReference() { + + assertThat(transform("ltrim(field1, field2)")) + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRtrimWithoutChars() { + assertThat(transform("rtrim(field)")).isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRtrimWithChars() { + + assertThat(transform("rtrim(field, 'ie')")) + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRtrimWithCharsFromFieldReference() { + + assertThat(transform("rtrim(field1, field2)")) + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithoutOptions() { + + assertThat(transform("regexFind(field1,'e')")) + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + + assertThat(transform("regexFind(field1,'e','i')")) + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsFromFieldReference() { + + assertThat(transform("regexFind(field1,'e',field2)")) + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithoutOptions() { + + assertThat(transform("regexFindAll(field1,'e')")) + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { + + assertThat(transform("regexFindAll(field1,'e','i')")) + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsFromFieldReference() { + + assertThat(transform("regexFindAll(field1,'e',field2)")) + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithoutOptions() { + + assertThat(transform("regexMatch(field1,'e')")) + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { + + assertThat(transform("regexMatch(field1,'e','i')")) + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsFromFieldReference() { + + assertThat(transform("regexMatch(field1,'e',field2)")) + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceOne() { + + assertThat(transform("replaceOne(field, 'bar', 'baz')")) + .isEqualTo("{ \"$replaceOne\" : {\"input\" : \"$field\" , \"find\" : \"bar\" , \"replacement\" : \"baz\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceAll() { + + assertThat(transform("replaceAll(field, 'bar', 'baz')")) + .isEqualTo("{ \"$replaceAll\" : {\"input\" : \"$field\" , \"find\" : \"bar\" , \"replacement\" : \"baz\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderConvertWithoutOptionalParameters() { + + assertThat(transform("convert(field, 'string')")) + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderConvertWithOnError() { + + assertThat(transform("convert(field, 'int', 'Not an integer.')")) + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderConvertWithOnErrorOnNull() { + + assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo( + "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToBool() { + assertThat(transform("toBool(field)")).isEqualTo("{ \"$toBool\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToDate() { + assertThat(transform("toDate(field)")).isEqualTo("{ \"$toDate\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToDecimal() { + assertThat(transform("toDecimal(field)")).isEqualTo("{ \"$toDecimal\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToDouble() { + assertThat(transform("toDouble(field)")).isEqualTo("{ \"$toDouble\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToInt() { + assertThat(transform("toInt(field)")).isEqualTo("{ \"$toInt\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToLong() { + assertThat(transform("toLong(field)")).isEqualTo("{ \"$toLong\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToObjectId() { + assertThat(transform("toObjectId(field)")).isEqualTo("{ \"$toObjectId\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderToString() { + assertThat(transform("toString(field)")).isEqualTo("{ \"$toString\" : \"$field\"}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithoutOptionalParameters() { + + assertThat(transform("dateFromString(field)")) + .isEqualTo("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormat() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY')")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormatAndTimezone() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { + + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}"); + } + + @Test // DATAMONGO-2077, DATAMONGO-2671 + void shouldRenderDateFromParts() { + + assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); + } + + @Test // DATAMONGO-2077, DATAMONGO-2671 + void shouldRenderIsoDateFromParts() { + + assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderDateToParts() { + + assertThat(transform("dateToParts(field, 'UTC', false)")).isEqualTo( + "{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderIndexOfArray() { + + assertThat(transform("indexOfArray(field, 2)")) + .isEqualTo("{ \"$indexOfArray\" : [\"$field\", 2 ]}"); + } + + @Test // DATAMONGO-2077 + void shouldRenderRange() { + + assertThat(transform("range(0, 10, 2)")).isEqualTo("{ \"$range\" : [0, 10, 2 ]}"); + } + + @Test // DATAMONGO-2370 + void shouldRenderRound() { + assertThat(transform("round(field)")).isEqualTo("{ \"$round\" : [\"$field\"]}"); + } + + @Test // DATAMONGO-2370 + void shouldRenderRoundWithPlace() { + assertThat(transform("round(field, 2)")).isEqualTo("{ \"$round\" : [\"$field\", 2]}"); + } + + @Test // GH-3714 + void shouldRenderDegreesToRadians() { + assertThat(transform("degreesToRadians(angle_a)")).isEqualTo("{ \"$degreesToRadians\" : \"$angle_a\"}"); + } + + @Test // GH-3712 + void shouldRenderCovariancePop() { + assertThat(transform("covariancePop(field1, field2)")) + .isEqualTo("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}"); + } + + @Test // GH-3712 + void shouldRenderCovarianceSamp() { + assertThat(transform("covarianceSamp(field1, field2)")) + .isEqualTo("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}"); + } + + @Test // GH-3715 + void shouldRenderRank() { + assertThat(transform("rank()")).isEqualTo("{ $rank : {} }"); + } + + @Test // GH-3715 + void shouldRenderDenseRank() { + assertThat(transform("denseRank()")).isEqualTo("{ $denseRank : {} }"); + } + + @Test // GH-3717 + void shouldRenderDocumentNumber() { + assertThat(transform("documentNumber()")).isEqualTo("{ $documentNumber : {} }"); + } + + @Test // GH-3727 + void rendersShift() { + + assertThat(transform("shift(quantity, 1)")) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(transform("shift(quantity, 1, 'Not available')")) + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); + } + + @Test // GH-3716 + void shouldRenderDerivative() { + assertThat(transform("derivative(miles, 'hour')")) + .isEqualTo("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }"); + } + + @Test // GH-3721 + void shouldRenderIntegral() { + assertThat(transform("integral(field)")).isEqualTo("{ \"$integral\" : { \"input\" : \"$field\" }}"); + } + + @Test // GH-3721 + void shouldRenderIntegralWithUnit() { + assertThat(transform("integral(field, 'hour')")) + .isEqualTo("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}"); + } + + @Test // GH-3728 + void shouldRenderSin() { + assertThat(transform("sin(angle)")).isEqualTo("{ \"$sin\" : \"$angle\"}"); + } + + @Test // GH-3728 + void shouldRenderSinh() { + assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); + } + + @Test // GH-3708 + void shouldRenderASin() { + assertThat(transform("asin(number)")).isEqualTo("{ \"$asin\" : \"$number\"}"); + } + + @Test // GH-3708 + void shouldRenderASinh() { + assertThat(transform("asinh(number)")).isEqualTo("{ \"$asinh\" : \"$number\"}"); + } + + @Test // GH-3710 + void shouldRenderCos() { + assertThat(transform("cos(angle)")).isEqualTo("{ \"$cos\" : \"$angle\"}"); + } + + @Test // GH-3710 + void shouldRenderCosh() { + assertThat(transform("cosh(angle)")).isEqualTo("{ \"$cosh\" : \"$angle\"}"); + } + + @Test // GH-3707 + void shouldRenderACos() { + assertThat(transform("acos(angle)")).isEqualTo("{ \"$acos\" : \"$angle\"}"); + } + + @Test // GH-3707 + void shouldRenderACosh() { + assertThat(transform("acosh(angle)")).isEqualTo("{ \"$acosh\" : \"$angle\"}"); + } + + @Test // GH-3730 + void shouldRenderTan() { + assertThat(transform("tan(angle)")).isEqualTo("{ \"$tan\" : \"$angle\"}"); + } + + @Test // GH-3730 + void shouldRenderTanh() { + assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATan() { + assertThat(transform("atan(number)")).isEqualTo("{ \"$atan\" : \"$number\"}"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATan2() { + assertThat(transform("atan2(number1,number2)")).isEqualTo("{ \"$atan2\" : [ \"$number1\" , \"$number2\" ] }"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATanh() { + assertThat(transform("atanh(number)")).isEqualTo("{ \"$atanh\" : \"$number\"}"); + } + + @Test // GH-3713 + void shouldRenderDateAdd() { + assertThat(transform("dateAdd(purchaseDate, 'day', 3)")) + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-4139 + void shouldRenderDateSubtract() { + assertThat(transform("dateSubtract(purchaseDate, 'day', 3)")) + .isEqualTo("{ $dateSubtract: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); + } + + @Test // GH-3713 + void shouldRenderDateDiff() { + assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")) + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); + } + + @Test // GH-3724 + void shouldRenderRand() { + assertThat(transform("rand()")).isEqualTo("{ $rand : {} }"); + } + + @Test // GH-4139 + void shouldRenderBottom() { + assertThat(transform("bottom(new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $bottom : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderBottomN() { + assertThat(transform("bottomN(3, new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $bottomN : { n : 3, output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderTop() { + assertThat(transform("top(new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $top : { output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderTopN() { + assertThat(transform("topN(3, new String[]{\"$playerId\", \"$score\" }, { \"score\" : -1 })")).isEqualTo("{ $topN : { n : 3, output: [ \"$playerId\", \"$score\" ], sortBy: { \"score\": -1 }}}"); + } + + @Test // GH-4139 + void shouldRenderFirstN() { + assertThat(transform("firstN(3, \"$score\")")).isEqualTo("{ $firstN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderLastN() { + assertThat(transform("lastN(3, \"$score\")")).isEqualTo("{ $lastN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderMaxN() { + assertThat(transform("maxN(3, \"$score\")")).isEqualTo("{ $maxN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderMinN() { + assertThat(transform("minN(3, \"$score\")")).isEqualTo("{ $minN : { n : 3, input : \"$score\" }}"); + } + + @Test // GH-4139 + void shouldRenderDateTrunc() { + assertThat(transform("dateTrunc(purchaseDate, \"week\", 2, \"monday\")")).isEqualTo("{ $dateTrunc : { date : \"$purchaseDate\", unit : \"week\", binSize : 2, startOfWeek : \"monday\" }}"); + } + + @Test // GH-4139 + void shouldRenderGetField() { + assertThat(transform("getField(\"score\", source)")).isEqualTo("{ $getField : { field : \"score\", input : \"$source\" }}"); + } + + @Test // GH-4139 + void shouldRenderSetField() { + assertThat(transform("setField(\"score\", 100, source)")).isEqualTo("{ $setField : { field : \"score\", value : 100, input : \"$source\" }}"); + } + + @Test // GH-4139 + void shouldRenderSortArray() { + assertThat(transform( + "sortArray(team, new org.bson.Document(\"name\" , 1))")).isEqualTo("{ $sortArray : { input : \"$team\", sortBy : {\"name\" : 1 } }}"); + } + + @Test // GH-4139 + void shouldTsIncrement() { + assertThat(transform("tsIncrement(saleTimestamp)")).isEqualTo("{ $tsIncrement: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void shouldTsSecond() { + assertThat(transform("tsSecond(saleTimestamp)")).isEqualTo("{ $tsSecond: \"$saleTimestamp\" }"); + } + + @Test // GH-4139 + void shouldRenderLocf() { + assertThat(transform("locf(price)")).isEqualTo("{ $locf: \"$price\" }"); + } + + @Test // GH-4473 + void shouldRenderPercentile() { + assertThat(transform("percentile(new String[]{\"$scoreOne\", \"$scoreTwo\" }, new double[]{0.4}, \"approximate\")")) + .isEqualTo("{ $percentile : { input : [\"$scoreOne\", \"$scoreTwo\"], p : [0.4], method : \"approximate\" }}"); + + assertThat(transform("percentile(score, new double[]{0.4, 0.85}, \"approximate\")")) + .isEqualTo("{ $percentile : { input : \"$score\", p : [0.4, 0.85], method : \"approximate\" }}"); + + assertThat(transform("percentile(\"$score\", new double[]{0.4, 0.85}, \"approximate\")")) + .isEqualTo("{ $percentile : { input : \"$score\", p : [0.4, 0.85], method : \"approximate\" }}"); + } + + @Test // GH-4472 + void shouldRenderMedian() { + + assertThat(transform("median(new String[]{\"$scoreOne\", \"$scoreTwo\" }, \"approximate\")")) + .isEqualTo("{ $median : { input : [\"$scoreOne\", \"$scoreTwo\"], method : \"approximate\" }}"); + + assertThat(transform("median(score, \"approximate\")")) + .isEqualTo("{ $median : { input : \"$score\", method : \"approximate\" }}"); + } + + private Document transform(String expression, Object... params) { + return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); } - private Object transform(String expression, Object... params) { + private Object transformValue(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java index b7d349a65d..a5a47ec85a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StateStats.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java new file mode 100644 index 0000000000..61d2951ebb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java @@ -0,0 +1,312 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.regex.Pattern; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * Unit test for {@link StringOperators}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + * @currentRead Royal Assassin - Robin Hobb + */ +class StringOperatorsUnitTests { + + private static final String EXPRESSION_STRING = "{ \"$fitz\" : \"chivalry\" }"; + private static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + private static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + + @Test // DATAMONGO-2049 + void shouldRenderTrim() { + + assertThat(StringOperators.valueOf("shrewd").trim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).trim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimWithChars() { + + assertThat(StringOperators.valueOf("shrewd").trim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimWithCharsExpression() { + + assertThat(StringOperators.valueOf("shrewd").trim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimLeft() { + + assertThat(StringOperators.valueOf("shrewd").trim().left().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimLeftWithChars() { + + assertThat(StringOperators.valueOf("shrewd").trim("sh").left().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimRight() { + + assertThat(StringOperators.valueOf("shrewd").trim().right().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderTrimRightWithChars() { + + assertThat(StringOperators.valueOf("shrewd").trim("sh").right().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrim() { + + assertThat(StringOperators.valueOf("shrewd").ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrimForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrimWithChars() { + + assertThat(StringOperators.valueOf("shrewd").ltrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderLTrimWithCharsExpression() { + + assertThat(StringOperators.valueOf("shrewd").ltrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrim() { + + assertThat(StringOperators.valueOf("shrewd").rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrimForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrimWithChars() { + + assertThat(StringOperators.valueOf("shrewd").rtrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); + } + + @Test // DATAMONGO-2049 + void shouldRenderRTrimWithCharsExpression() { + + assertThat(StringOperators.valueOf("shrewd").rtrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAll() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithPattern() { + + assertThat(StringOperators.valueOf("shrewd") + .regexFindAll( + Pattern.compile("foo", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL | Pattern.COMMENTS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"foo\" , \"options\" : \"imsx\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatch() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(Pattern.compile("foo", Pattern.CASE_INSENSITIVE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"i\"} } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFind() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); + } + + @Test // GH-3725 + void shouldRenderRegexFindForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(Pattern.compile("foo", Pattern.MULTILINE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"m\"} } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } + + @Test // GH-3695 + void shouldRenderReplaceOne() { + + assertThat(StringOperators.valueOf("bar").replaceOne("foobar","baz").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceOne : {\"find\" : \"foobar\", \"input\" : \"$bar\", \"replacement\" : \"baz\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceOneForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).replaceOne("a","s").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceOne : {\"find\" : \"a\", \"input\" : " + EXPRESSION_STRING + ", \"replacement\" : \"s\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceAll() { + + assertThat(StringOperators.valueOf("bar").replaceAll("foobar","baz").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceAll : {\"find\" : \"foobar\", \"input\" : \"$bar\", \"replacement\" : \"baz\"}}"); + } + + @Test // GH-3695 + void shouldRenderReplaceAllForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).replaceAll("a","s").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $replaceAll : {\"find\" : \"a\", \"input\" : " + EXPRESSION_STRING + ", \"replacement\" : \"s\"}}"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java index 5efb95d60c..bcc0b6e17f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/TypeBasedAggregationOperationContextUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,39 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.core.aggregation.Fields.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import java.util.List; import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.core.convert.converter.Converter; import org.springframework.core.convert.support.GenericConversionService; import org.springframework.data.annotation.Id; -import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.convert.CustomConversions; +import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce; +import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.Variable; import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.SetOperators.SetUnion; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.Criteria; /** @@ -56,7 +58,7 @@ * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class TypeBasedAggregationOperationContextUnitTests { MongoMappingContext context; @@ -65,7 +67,7 @@ public class TypeBasedAggregationOperationContextUnitTests { @Mock DbRefResolver dbRefResolver; - @Before + @BeforeEach public void setUp() { this.context = new MongoMappingContext(); @@ -75,12 +77,12 @@ public void setUp() { @Test public void findsSimpleReference() { - assertThat(getContext(Foo.class).getReference("bar"), is(notNullValue())); + assertThat(getContext(Foo.class).getReference("bar")).isNotNull(); } - @Test(expected = MappingException.class) + @Test public void rejectsInvalidFieldReference() { - getContext(Foo.class).getReference("foo"); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> getContext(Foo.class).getReference("foo")); } @Test // DATAMONGO-741 @@ -90,16 +92,17 @@ public void returnsReferencesToNestedFieldsCorrectly() { Field field = field("bar.name"); - assertThat(context.getReference("bar.name"), is(notNullValue())); - assertThat(context.getReference(field), is(notNullValue())); - assertThat(context.getReference(field), is(context.getReference("bar.name"))); + assertThat(context.getReference("bar.name")).isNotNull(); + assertThat(context.getReference(field)).isNotNull(); + assertThat(context.getReference(field)).isEqualTo(context.getReference("bar.name")); } @Test // DATAMONGO-806 public void aliasesIdFieldCorrectly() { AggregationOperationContext context = getContext(Foo.class); - assertThat(context.getReference("id"), is(new DirectFieldReference(new ExposedField(field("id", "_id"), true)))); + assertThat(context.getReference("id")) + .isEqualTo(new DirectFieldReference(new ExposedField(field("id", "_id"), true))); } @Test // DATAMONGO-912 @@ -117,7 +120,7 @@ public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInFirstStag org.bson.Document agg = newAggregation(matchStage, projectStage).toDocument("test", context); org.bson.Document age = getValue(getValue(getPipelineElementFromAggregationAt(agg, 0), "$match"), "age"); - assertThat(age, is(new org.bson.Document("v", 10))); + assertThat(age).isEqualTo(new Document("v", 10)); } @Test // DATAMONGO-912 @@ -135,7 +138,7 @@ public void shouldUseCustomConversionIfPresentAndConversionIsRequiredInLaterStag org.bson.Document agg = newAggregation(projectStage, matchStage).toDocument("test", context); org.bson.Document age = getValue(getValue(getPipelineElementFromAggregationAt(agg, 1), "$match"), "age"); - assertThat(age, is(new org.bson.Document("v", 10))); + assertThat(age).isEqualTo(new Document("v", 10)); } @Test // DATAMONGO-960 @@ -149,13 +152,13 @@ public void rendersAggregationOptionsInTypedAggregationContextCorrectly() { org.bson.Document document = agg.toDocument("person", context); org.bson.Document projection = getPipelineElementFromAggregationAt(document, 0); - assertThat(projection.containsKey("$project"), is(true)); + assertThat(projection.containsKey("$project")).isTrue(); - assertThat(projection.get("$project"), is(new Document("name", 1).append("age", 1))); + assertThat(projection.get("$project")).isEqualTo(new Document("name", 1).append("age", 1)); - assertThat(document.get("allowDiskUse"), is(true)); - assertThat(document.get("explain"), is(true)); - assertThat(document.get("cursor"), is(new Document("foo", 1))); + assertThat(document.get("allowDiskUse")).isEqualTo(true); + assertThat(document.get("explain")).isEqualTo(true); + assertThat(document.get("cursor")).isEqualTo(new Document("foo", 1)); } @Test // DATAMONGO-1585 @@ -169,7 +172,7 @@ public void rendersSortOfProjectedFieldCorrectly() { Document sort = getPipelineElementFromAggregationAt(dbo, 1); Document definition = (Document) sort.get("$sort"); - assertThat(definition.get("counter"), is(equalTo(1))); + assertThat(definition.get("counter")).isEqualTo(1); } @Test // DATAMONGO-1586 @@ -183,9 +186,21 @@ public void rendersFieldAliasingProjectionCorrectly() { Document dbo = agg.toDocument("person", context); Document projection = getPipelineElementFromAggregationAt(dbo, 0); - assertThat(getAsDocument(projection, "$project"), isBsonObject() // - .containing("person_name", "$name") // - .containing("age", "$age.value")); + assertThat(getAsDocument(projection, "$project")).containsEntry("person_name", "$name") // + .containsEntry("age", "$age.value"); + } + + @Test // DATAMONGO-1893 + public void considersIncludedFieldsFromSingleExclusionsCorrectly() { + + AggregationOperationContext context = getContext(FooPerson.class); + TypedAggregation agg = newAggregation(FooPerson.class, project() // + .andExclude("name"), sort(Sort.by("age.value", "lastName"))); + + Document dbo = agg.toDocument("person", context); + + Document sort = getPipelineElementFromAggregationAt(dbo, 1); + assertThat(getAsDocument(sort, "$sort")).isEqualTo(new Document("age.value", 1).append("last_name", 1)); } @Test // DATAMONGO-1133 @@ -200,7 +215,7 @@ public void shouldHonorAliasedFieldsInGroupExpressions() { org.bson.Document definition = (org.bson.Document) group.get("$group"); - assertThat(definition.get("_id"), is(equalTo("$counter_name"))); + assertThat(definition.get("_id")).isEqualTo("$counter_name"); } @Test // DATAMONGO-1326, DATAMONGO-1585 @@ -216,8 +231,8 @@ public void lookupShouldInheritFieldsFromInheritingAggregationOperation() { org.bson.Document definition = (org.bson.Document) sort.get("$sort"); - assertThat(definition.get("resourceId"), is(equalTo(1))); - assertThat(definition.get("counter_name"), is(equalTo(1))); + assertThat(definition.get("resourceId")).isEqualTo(1); + assertThat(definition.get("counter_name")).isEqualTo(1); } @Test // DATAMONGO-1326 @@ -232,7 +247,7 @@ public void groupLookupShouldInheritFieldsFromPreviousAggregationOperation() { org.bson.Document definition = (org.bson.Document) sort.get("$sort"); - assertThat(definition.get("foreignKey"), is(equalTo(1))); + assertThat(definition.get("foreignKey")).isEqualTo(1); } @Test // DATAMONGO-1326 @@ -249,7 +264,7 @@ public void lookupGroupAggregationShouldUseCorrectGroupField() { org.bson.Document definition = (org.bson.Document) group.get("$group"); org.bson.Document field = (org.bson.Document) definition.get("something_totally_different"); - assertThat(field.get("$min"), is(equalTo("$lookup.otherkey"))); + assertThat(field.get("$min")).isEqualTo("$lookup.otherkey"); } @Test // DATAMONGO-1326 @@ -266,10 +281,10 @@ public void lookupGroupAggregationShouldOverwriteExposedFields() { org.bson.Document definition = (org.bson.Document) sort.get("$sort"); - assertThat(definition.get("something_totally_different"), is(equalTo(1))); + assertThat(definition.get("something_totally_different")).isEqualTo(1); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1326 + @Test // DATAMONGO-1326 public void lookupGroupAggregationShouldFailInvalidFieldReference() { TypeBasedAggregationOperationContext context = getContext(MeterData.class); @@ -277,7 +292,7 @@ public void lookupGroupAggregationShouldFailInvalidFieldReference() { lookup("OtherCollection", "resourceId", "otherId", "lookup"), group().min("lookup.otherkey").as("something_totally_different"), sort(Direction.ASC, "resourceId")); - agg.toDocument("meterData", context); + assertThatIllegalArgumentException().isThrownBy(() -> agg.toDocument("meterData", context)); } @Test // DATAMONGO-861 @@ -293,14 +308,14 @@ public void rendersAggregationConditionalInTypedAggregationContextCorrectly() { Document document = agg.toDocument("person", context); Document projection = getPipelineElementFromAggregationAt(document, 0); - assertThat(projection.containsKey("$project"), is(true)); + assertThat(projection.containsKey("$project")).isTrue(); Document project = getValue(projection, "$project"); Document age = getValue(project, "age"); - assertThat(getValue(age, "$cond"), isBsonObject().containing("then.value", 0)); - assertThat(getValue(age, "$cond"), isBsonObject().containing("then._class", Age.class.getName())); - assertThat(getValue(age, "$cond"), isBsonObject().containing("else", "$age")); + assertThat((Document) getValue(age, "$cond")).containsEntry("then.value", 0); + assertThat((Document) getValue(age, "$cond")).containsEntry("then._class", Age.class.getName()); + assertThat((Document) getValue(age, "$cond")).containsEntry("else", "$age"); } /** @@ -318,17 +333,17 @@ public void rendersAggregationIfNullInTypedAggregationContextCorrectly() { Document document = agg.toDocument("person", context); Document projection = getPipelineElementFromAggregationAt(document, 0); - assertThat(projection.containsKey("$project"), is(true)); + assertThat(projection.containsKey("$project")).isTrue(); Document project = getValue(projection, "$project"); Document age = getValue(project, "age"); - assertThat(age, is(Document.parse( - "{ $ifNull: [ \"$age\", { \"_class\":\"org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContextUnitTests$Age\", \"value\": 0} ] }"))); + assertThat(age).isEqualTo(Document.parse( + "{ $ifNull: [ \"$age\", { \"_class\":\"org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContextUnitTests$Age\", \"value\": 0} ] }")); - assertThat(age, isBsonObject().containing("$ifNull.[0]", "$age")); - assertThat(age, isBsonObject().containing("$ifNull.[1].value", 0)); - assertThat(age, isBsonObject().containing("$ifNull.[1]._class", Age.class.getName())); + assertThat(age).containsEntry("$ifNull.[0]", "$age"); + assertThat(age).containsEntry("$ifNull.[1].value", 0); + assertThat(age).containsEntry("$ifNull.[1]._class", Age.class.getName()); } @Test // DATAMONGO-1756 @@ -339,8 +354,130 @@ public void projectOperationShouldRenderNestedFieldNamesCorrectlyForTypedAggrega Document agg = newAggregation(Wrapper.class, project().and("nested1.value1").plus("nested2.value2").as("val")) .toDocument("collection", context); - assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project"), is( - equalTo(new Document("val", new Document("$add", Arrays.asList("$nested1.value1", "$field2.nestedValue2")))))); + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", new Document("$add", Arrays.asList("$nested1.value1", "$field2.nestedValue2")))); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnUnwrappableTypeFieldCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, project().and("unwrappedValue.stringValue").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, project().and("unwrappedValue.atFieldAnnotatedValue").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$with-at-field-annotation")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnPrefixedUnwrappedFieldCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, project().and("prefixedUnwrappedValue.stringValue").as("val")) + .toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$prefix-stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnPrefixedUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WithUnwrapped.class); + + Document agg = newAggregation(WithUnwrapped.class, + project().and("prefixedUnwrappedValue.atFieldAnnotatedValue").as("val")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$prefix-with-at-field-annotation")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedUnwrappedFieldCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.unwrappedValue.stringValue").as("val")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.unwrappedValue.atFieldAnnotatedValue").as("val")).toDocument("collection", + context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.with-at-field-annotation")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedPrefixedUnwrappedFieldCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.prefixedUnwrappedValue.stringValue").as("val")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.prefix-stringValue")); + } + + @Test // DATAMONGO-1902 + void rendersProjectOnNestedPrefixedUnwrappedFieldWithAtFieldAnnotationCorrectly() { + + AggregationOperationContext context = getContext(WrapperAroundWithUnwrapped.class); + + Document agg = newAggregation(WrapperAroundWithUnwrapped.class, + project().and("withUnwrapped.prefixedUnwrappedValue.atFieldAnnotatedValue").as("val")).toDocument("collection", + context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")) + .isEqualTo(new Document("val", "$withUnwrapped.prefix-with-at-field-annotation")); + } + + @Test // GH-4070 + void rendersLocalVariables() { + + AggregationOperationContext context = getContext(WithLists.class); + + Document agg = newAggregation(WithLists.class, + project() + .and(Reduce.arrayOf("listOfListOfString").withInitialValue(field("listOfString")) + .reduce(SetUnion.arrayAsSet(Variable.VALUE.getTarget()).union(Variable.THIS.getTarget()))) + .as("listOfString")).toDocument("collection", context); + + assertThat(getPipelineElementFromAggregationAt(agg, 0).get("$project")).isEqualTo(Document.parse(""" + { + "listOfString" : { + "$reduce" : { + "in" : { "$setUnion" : ["$$value", "$$this"] }, + "initialValue" : "$listOfString", + "input" : "$listOfListOfString" + } + } + } + """)); } @org.springframework.data.mongodb.core.mapping.Document(collection = "person") @@ -348,12 +485,13 @@ public static class FooPerson { final ObjectId id; final String name; + @org.springframework.data.mongodb.core.mapping.Field("last_name") final String lastName; final Age age; - @PersistenceConstructor - FooPerson(ObjectId id, String name, Age age) { + public FooPerson(ObjectId id, String name, String lastName, Age age) { this.id = id; this.name = name; + this.lastName = lastName; this.age = age; } } @@ -424,4 +562,31 @@ static class Nested { String value1; @org.springframework.data.mongodb.core.mapping.Field("nestedValue2") String value2; } + + static class WrapperAroundWithUnwrapped { + + String id; + WithUnwrapped withUnwrapped; + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + @Unwrapped.Nullable("prefix-") UnwrappableType prefixedUnwrappedValue; + } + + static class UnwrappableType { + + String stringValue; + + @org.springframework.data.mongodb.core.mapping.Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + } + + static class WithLists { + public List listOfString; + public List> listOfListOfString; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperationUnitTests.java new file mode 100644 index 0000000000..e47fea289e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnionWithOperationUnitTests.java @@ -0,0 +1,130 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link UnionWithOperation}. + * + * @author Christoph Strobl + */ +class UnionWithOperationUnitTests { + + @Test // DATAMONGO-2622 + void throwsErrorWhenNoCollectionPresent() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> UnionWithOperation.unionWith(null)); + } + + @Test // DATAMONGO-2622 + void rendersJustCollectionCorrectly() { + + assertThat(UnionWithOperation.unionWith("coll-1").toPipelineStages(contextFor(Warehouse.class))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1"))); + } + + @Test // DATAMONGO-2622 + void rendersPipelineCorrectly() { + + assertThat(UnionWithOperation.unionWith("coll-1").mapFieldsTo(Warehouse.class) + .pipeline(Aggregation.project().and("location").as("region")).toPipelineStages(contextFor(Warehouse.class))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("region", 1)))))); + } + + @Test // DATAMONGO-2622 + void rendersPipelineCorrectlyForDifferentDomainType() { + + assertThat(UnionWithOperation.unionWith("coll-1").pipeline(Aggregation.project().and("name").as("name")) + .mapFieldsTo(Supplier.class).toPipelineStages(contextFor(Warehouse.class))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("name", "$supplier")))))); + } + + @Test // DATAMONGO-2622 + void rendersPipelineCorrectlyForUntypedContext() { + + assertThat(UnionWithOperation.unionWith("coll-1").pipeline(Aggregation.project("region")) + .toPipelineStages(contextFor(null))) + .containsExactly(new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("region", 1)))))); + } + + @Test // DATAMONGO-2622 + void doesNotMapAgainstFieldsFromAPreviousStage() { + + TypedAggregation agg = TypedAggregation.newAggregation(Supplier.class, + Aggregation.project().and("name").as("supplier"), + UnionWithOperation.unionWith("coll-1").pipeline(Aggregation.project().and("name").as("name"))); + + List pipeline = agg.toPipeline(contextFor(Supplier.class)); + assertThat(pipeline).containsExactly(new Document("$project", new Document("supplier", 1)), // + new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("name", 1)))))); + } + + @Test // DATAMONGO-2622 + void mapAgainstUnionWithDomainTypeEvenWhenInsideTypedAggregation() { + + TypedAggregation agg = TypedAggregation.newAggregation(Supplier.class, + Aggregation.project().and("name").as("supplier"), UnionWithOperation.unionWith("coll-1") + .mapFieldsTo(Warehouse.class).pipeline(Aggregation.project().and("location").as("location"))); + + List pipeline = agg.toPipeline(contextFor(Supplier.class)); + assertThat(pipeline).containsExactly(new Document("$project", new Document("supplier", 1)), // + new Document("$unionWith", new Document("coll", "coll-1").append("pipeline", + Arrays.asList(new Document("$project", new Document("location", "$region")))))); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class Warehouse { + + String name; + @Field("region") String location; + String state; + } + + static class Supplier { + + @Field("supplier") String name; + String state; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnsetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnsetOperationUnitTests.java new file mode 100644 index 0000000000..2f081cc9fc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnsetOperationUnitTests.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * Unit tests for {@link UnsetOperation}. + * + * @author Christoph Strobl + */ +public class UnsetOperationUnitTests { + + @Test // DATAMONGO-2331 + public void raisesErrorOnNullField() { + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new UnsetOperation(null)); + } + + @Test // DATAMONGO-2331 + public void rendersSingleFieldReferenceCorrectly() { + + assertThat(new UnsetOperation(Collections.singletonList("title")).toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : \"title\" }")); + } + + @Test // DATAMONGO-2331 + public void rendersSingleMappedFieldReferenceCorrectly() { + + assertThat(new UnsetOperation(Collections.singletonList("stock")).toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : \"copies\" }")); + } + + @Test // DATAMONGO-2331 + public void rendersSingleNestedMappedFieldReferenceCorrectly() { + + assertThat( + new UnsetOperation(Collections.singletonList("author.firstname")).toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : \"author.first\"}")); + } + + @Test // DATAMONGO-2331 + public void rendersMultipleFieldReferencesCorrectly() { + + assertThat(new UnsetOperation(Arrays.asList("title", "author.firstname", "stock.location")) + .toPipelineStages(contextFor(Book.class))) + .containsExactly(Document.parse("{\"$unset\" : [\"title\", \"author.first\", \"copies.warehouse\"] }")); + } + + @Test // DATAMONGO-2331 + public void exposesFieldsCorrectly() { + assertThat(UnsetOperation.unset("title").and("isbn").getFields()).isEqualTo(ExposedFields.from()); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class Book { + + @Id Integer id; + String title; + String isbn; + Author author; + @Field("copies") Collection stock; + } + + static class Author { + + @Field("first") String firstname; + @Field("last") String lastname; + } + + static class Warehouse { + + @Field("warehouse") String location; + Integer qty; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java index 335f06a09b..8d46363c2d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UnwindOperationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.DocumentTestUtils; /** @@ -38,7 +37,7 @@ public void unwindWithPathOnlyShouldUsePreMongo32Syntax() { Document pipeline = unwindOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(pipeline, isBsonObject().containing("$unwind", "$a")); + assertThat(pipeline).containsEntry("$unwind", "$a"); } @Test // DATAMONGO-1391 @@ -48,10 +47,9 @@ public void unwindWithArrayIndexShouldUseMongo32Syntax() { Document unwindClause = extractDocumentFromUnwindOperation(unwindOperation); - assertThat(unwindClause, - isBsonObject().containing("path", "$a").// - containing("preserveNullAndEmptyArrays", false).// - containing("includeArrayIndex", "index")); + assertThat(unwindClause).containsEntry("path", "$a").// + containsEntry("preserveNullAndEmptyArrays", false).// + containsEntry("includeArrayIndex", "index"); } @Test // DATAMONGO-1391 @@ -59,7 +57,7 @@ public void unwindWithArrayIndexShouldExposeArrayIndex() { UnwindOperation unwindOperation = Aggregation.unwind("a", "index"); - assertThat(unwindOperation.getFields().getField("index"), is(not(nullValue()))); + assertThat(unwindOperation.getFields().getField("index")).isNotNull(); } @Test // DATAMONGO-1391 @@ -67,7 +65,7 @@ public void plainUnwindShouldNotExposeIndex() { UnwindOperation unwindOperation = Aggregation.unwind("a"); - assertThat(unwindOperation.getFields().exposesNoFields(), is(true)); + assertThat(unwindOperation.getFields().exposesNoFields()).isTrue(); } @Test // DATAMONGO-1391 @@ -77,10 +75,9 @@ public void unwindWithPreserveNullShouldUseMongo32Syntax() { Document unwindClause = extractDocumentFromUnwindOperation(unwindOperation); - assertThat(unwindClause, - isBsonObject().containing("path", "$a").// - containing("preserveNullAndEmptyArrays", true).// - notContaining("includeArrayIndex")); + assertThat(unwindClause).containsEntry("path", "$a").// + containsEntry("preserveNullAndEmptyArrays", true).// + doesNotContainKey("includeArrayIndex"); } @Test // DATAMONGO-1391 @@ -89,7 +86,7 @@ public void lookupBuilderBuildsCorrectClause() { UnwindOperation unwindOperation = UnwindOperation.newUnwind().path("$foo").noArrayIndex().skipNullAndEmptyArrays(); Document pipeline = unwindOperation.toDocument(Aggregation.DEFAULT_CONTEXT); - assertThat(pipeline, isBsonObject().containing("$unwind", "$foo")); + assertThat(pipeline).containsEntry("$unwind", "$foo"); } @Test // DATAMONGO-1391 @@ -100,10 +97,9 @@ public void lookupBuilderBuildsCorrectClauseForMongo32() { Document unwindClause = extractDocumentFromUnwindOperation(unwindOperation); - assertThat(unwindClause, - isBsonObject().containing("path", "$foo").// - containing("preserveNullAndEmptyArrays", true).// - containing("includeArrayIndex", "myindex")); + assertThat(unwindClause).containsEntry("path", "$foo").// + containsEntry("preserveNullAndEmptyArrays", true).// + containsEntry("includeArrayIndex", "myindex"); } private Document extractDocumentFromUnwindOperation(UnwindOperation unwindOperation) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java index 9a1cbd9590..3eb7f4f884 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/UserWithLikes.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,27 +18,70 @@ import java.util.Arrays; import java.util.Date; import java.util.HashSet; +import java.util.Objects; import java.util.Set; -import lombok.Data; -import lombok.NoArgsConstructor; - /** * @author Thomas Darimont * @author Christoph Strobl */ -@Data -@NoArgsConstructor public class UserWithLikes { String id; Date joined; Set likes = new HashSet(); + public UserWithLikes() {} + public UserWithLikes(String id, Date joined, String... likes) { this.id = id; this.joined = joined; this.likes = new HashSet(Arrays.asList(likes)); } + + public String getId() { + return this.id; + } + + public Date getJoined() { + return this.joined; + } + + public Set getLikes() { + return this.likes; + } + + public void setId(String id) { + this.id = id; + } + + public void setJoined(Date joined) { + this.joined = joined; + } + + public void setLikes(Set likes) { + this.likes = likes; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + UserWithLikes that = (UserWithLikes) o; + return Objects.equals(id, that.id) && Objects.equals(joined, that.joined) && Objects.equals(likes, that.likes); + } + + @Override + public int hashCode() { + return Objects.hash(id, joined, likes); + } + + public String toString() { + return "UserWithLikes(id=" + this.getId() + ", joined=" + this.getJoined() + ", likes=" + this.getLikes() + ")"; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperationUnitTests.java new file mode 100644 index 0000000000..c4628eda99 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchOperationUnitTests.java @@ -0,0 +1,124 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Limit; +import org.springframework.data.mongodb.core.aggregation.VectorSearchOperation.SearchType; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; + +/** + * Unit tests for {@link VectorSearchOperation}. + * + * @author Christoph Strobl + */ +class VectorSearchOperationUnitTests { + + static final Document $VECTOR_SEARCH = Document.parse( + "{'index' : 'vector_index', 'limit' : 10, 'path' : 'plot_embedding', 'queryVector' : [-0.0016261312, -0.028070757, -0.011342932]}"); + static final VectorSearchOperation SEARCH_OPERATION = VectorSearchOperation.search("vector_index") + .path("plot_embedding").vector(-0.0016261312, -0.028070757, -0.011342932).limit(10); + + @Test // GH-4706 + void requiredArgs() { + + List stages = SEARCH_OPERATION.toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH)); + } + + @Test // GH-4706 + void optionalArgs() { + + VectorSearchOperation $search = SEARCH_OPERATION.numCandidates(150).searchType(SearchType.ENN) + .filter(new Criteria().andOperator(Criteria.where("year").gt(1955), Criteria.where("year").lt(1975))); + + List stages = $search.toPipelineStages(Aggregation.DEFAULT_CONTEXT); + + Document filter = new Document("$and", + List.of(new Document("year", new Document("$gt", 1955)), new Document("year", new Document("$lt", 1975)))); + assertThat(stages).containsExactly(new Document("$vectorSearch", + new Document($VECTOR_SEARCH).append("exact", true).append("filter", filter).append("numCandidates", 150))); + } + + @Test // GH-4706 + void withScore() { + + List stages = SEARCH_OPERATION.withSearchScore().toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH), + new Document("$addFields", new Document("score", new Document("$meta", "vectorSearchScore")))); + } + + @Test // GH-4706 + void withScoreFilter() { + + List stages = SEARCH_OPERATION.withFilterBySore(score -> score.gt(50)) + .toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH), + new Document("$addFields", new Document("score", new Document("$meta", "vectorSearchScore"))), + new Document("$match", new Document("score", new Document("$gt", 50)))); + } + + @Test // GH-4706 + void withScoreFilterOnCustomFieldName() { + + List stages = SEARCH_OPERATION.withFilterBySore(score -> score.gt(50)).withSearchScore("s-c-o-r-e") + .toPipelineStages(Aggregation.DEFAULT_CONTEXT); + assertThat(stages).containsExactly(new Document("$vectorSearch", $VECTOR_SEARCH), + new Document("$addFields", new Document("s-c-o-r-e", new Document("$meta", "vectorSearchScore"))), + new Document("$match", new Document("s-c-o-r-e", new Document("$gt", 50)))); + } + + @Test // GH-4706 + void mapsCriteriaToDomainType() { + + VectorSearchOperation $search = SEARCH_OPERATION + .filter(new Criteria().andOperator(Criteria.where("y").gt(1955), Criteria.where("y").lt(1975))); + + List stages = $search.toPipelineStages(TestAggregationContext.contextFor(Movie.class)); + + Document filter = new Document("$and", + List.of(new Document("year", new Document("$gt", 1955)), new Document("year", new Document("$lt", 1975)))); + assertThat(stages) + .containsExactly(new Document("$vectorSearch", new Document($VECTOR_SEARCH).append("filter", filter))); + } + + @Test // GH-4963 + void shouldSkipLimitIfUnlimited() { + + VectorSearchOperation $search = VectorSearchOperation.search("vector_index").path("plot_embedding") + .vector(-0.0016261312, -0.028070757, -0.011342932).limit(Limit.unlimited()); + + List stages = $search.toPipelineStages(TestAggregationContext.contextFor(Movie.class)); + assertThat(stages.get(0)).doesNotContainKey("$vectorSearch.limit"); + } + + static class Movie { + + @Id String id; + String title; + + @Field("year") String y; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchTests.java new file mode 100644 index 0000000000..18991c1768 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/VectorSearchTests.java @@ -0,0 +1,242 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import org.assertj.core.api.InstanceOfAssertFactories; +import org.bson.BinaryVector; +import org.bson.Document; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.domain.Vector; +import org.springframework.data.mongodb.core.aggregation.VectorSearchOperation.SearchType; +import org.springframework.data.mongodb.core.index.VectorIndex; +import org.springframework.data.mongodb.core.index.VectorIndex.SimilarityFunction; +import org.springframework.data.mongodb.core.mapping.MongoVector; +import org.springframework.data.mongodb.test.util.AtlasContainer; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; + +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Integration tests using Vector Search and Vector Indexes through local MongoDB Atlas. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@Testcontainers(disabledWithoutDocker = true) +public class VectorSearchTests { + + private static final String SCORE_FIELD = "vector-search-tests"; + private static final @Container AtlasContainer atlasLocal = AtlasContainer.bestMatch(); + private static final String COLLECTION_NAME = "collection-1"; + + static MongoClient client; + static MongoTestTemplate template; + + @BeforeAll + static void beforeAll() throws InterruptedException { + + client = MongoClients.create(atlasLocal.getConnectionString()); + template = new MongoTestTemplate(client, SCORE_FIELD); + + Thread.sleep(250); // just wait a little or the index will be broken + + initDocuments(); + initIndexes(); + } + + @AfterAll + static void afterAll() { + template.dropCollection(WithVectorFields.class); + } + + @ParameterizedTest // GH-4706 + @MethodSource("vectorAggregations") + void searchUsingArraysAddingScore(VectorSearchOperation searchOperation) { + + VectorSearchOperation $search = searchOperation.withSearchScore(SCORE_FIELD); + + AggregationResults results = template.aggregate(Aggregation.newAggregation($search), + WithVectorFields.class, Document.class); + + assertThat(results).hasSize(10); + assertScoreIsDecreasing(results); + assertThat(results.iterator().next()).containsKey(SCORE_FIELD) + .extracting(it -> it.get(SCORE_FIELD), InstanceOfAssertFactories.DOUBLE).isEqualByComparingTo(1D); + } + + @ParameterizedTest // GH-4706 + @MethodSource("binaryVectorAggregations") + void searchUsingBinaryVectorAddingScore(VectorSearchOperation searchOperation) { + + VectorSearchOperation $search = searchOperation.withSearchScore(SCORE_FIELD); + + AggregationResults results = template.aggregate(Aggregation.newAggregation($search), + WithVectorFields.class, Document.class); + + assertThat(results).hasSize(10); + assertScoreIsDecreasing(results); + assertThat(results.iterator().next()).containsKey(SCORE_FIELD) + .extracting(it -> it.get(SCORE_FIELD), InstanceOfAssertFactories.DOUBLE).isEqualByComparingTo(1D); + } + + private static Stream binaryVectorAggregations() { + + return Stream.of(// + Arguments.arguments(VectorSearchOperation.search("raw-index").path("rawInt8vector") // + .vector(new byte[] { 0, 1, 2, 3, 4 }) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("wrapper-index").path("int8vector") // + .vector(BinaryVector.int8Vector(new byte[] { 0, 1, 2, 3, 4 })) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("wrapper-index").path("float32vector") // + .vector(BinaryVector.floatVector(new float[] { 0.0001f, 1.12345f, 2.23456f, 3.34567f, 4.45678f })) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN))); + } + + private static Stream vectorAggregations() { + + return Stream.of(// + Arguments.arguments(VectorSearchOperation.search("raw-index").path("rawFloat32vector") // + .vector(0.0001f, 1.12345f, 2.23456f, 3.34567f, 4.45678f) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("raw-index").path("rawFloat64vector") // + .vector(1.0001d, 2.12345d, 3.23456d, 4.34567d, 5.45678d) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN)), + Arguments.arguments(VectorSearchOperation.search("wrapper-index").path("float64vector") // + .vector(Vector.of(1.0001d, 2.12345d, 3.23456d, 4.34567d, 5.45678d)) // + .limit(10)// + .numCandidates(20) // + .searchType(SearchType.ANN))); + } + + static void initDocuments() { + IntStream.range(0, 10).mapToObj(WithVectorFields::instance).forEach(template::save); + } + + static void initIndexes() { + + VectorIndex rawIndex = new VectorIndex("raw-index") + .addVector("rawInt8vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("rawFloat32vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("rawFloat64vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addFilter("justSomeArgument"); + + VectorIndex wrapperIndex = new VectorIndex("wrapper-index") + .addVector("int8vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("float32vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addVector("float64vector", it -> it.similarity(SimilarityFunction.COSINE).dimensions(5)) + .addFilter("justSomeArgument"); + + template.searchIndexOps(WithVectorFields.class).createIndex(rawIndex); + template.searchIndexOps(WithVectorFields.class).createIndex(wrapperIndex); + + template.awaitIndexCreation(WithVectorFields.class, rawIndex.getName()); + template.awaitIndexCreation(WithVectorFields.class, wrapperIndex.getName()); + } + + private static void assertScoreIsDecreasing(Iterable documents) { + + double previousScore = Integer.MAX_VALUE; + for (Document document : documents) { + + Double vectorSearchScore = document.getDouble(SCORE_FIELD); + assertThat(vectorSearchScore).isGreaterThan(0D); + assertThat(vectorSearchScore).isLessThan(previousScore); + previousScore = vectorSearchScore; + } + } + + @org.springframework.data.mongodb.core.mapping.Document(COLLECTION_NAME) + static class WithVectorFields { + + String id; + + Vector int8vector; + Vector float32vector; + Vector float64vector; + + BinaryVector rawInt8vector; + float[] rawFloat32vector; + double[] rawFloat64vector; + + int justSomeArgument; + + static WithVectorFields instance(int offset) { + + WithVectorFields instance = new WithVectorFields(); + instance.id = "id-%s".formatted(offset); + instance.rawFloat32vector = new float[5]; + instance.rawFloat64vector = new double[5]; + + byte[] int8 = new byte[5]; + for (int i = 0; i < 5; i++) { + + int v = i + offset; + int8[i] = (byte) v; + } + instance.rawInt8vector = BinaryVector.int8Vector(int8); + + if (offset == 0) { + instance.rawFloat32vector[0] = 0.0001f; + instance.rawFloat64vector[0] = 0.0001d; + } else { + instance.rawFloat32vector[0] = Float.parseFloat("%s.000%s".formatted(offset, offset)); + instance.rawFloat64vector[0] = Double.parseDouble("%s.000%s".formatted(offset, offset)); + } + instance.rawFloat32vector[1] = Float.parseFloat("%s.12345".formatted(offset + 1)); + instance.rawFloat64vector[1] = Double.parseDouble("%s.12345".formatted(offset + 1)); + instance.rawFloat32vector[2] = Float.parseFloat("%s.23456".formatted(offset + 2)); + instance.rawFloat64vector[2] = Double.parseDouble("%s.23456".formatted(offset + 2)); + instance.rawFloat32vector[3] = Float.parseFloat("%s.34567".formatted(offset + 3)); + instance.rawFloat64vector[3] = Double.parseDouble("%s.34567".formatted(offset + 3)); + instance.rawFloat32vector[4] = Float.parseFloat("%s.45678".formatted(offset + 4)); + instance.rawFloat64vector[4] = Double.parseDouble("%s.45678".formatted(offset + 4)); + + instance.justSomeArgument = offset; + + instance.int8vector = MongoVector.of(instance.rawInt8vector); + instance.float32vector = MongoVector.of(BinaryVector.floatVector(instance.rawFloat32vector)); + instance.float64vector = Vector.of(instance.rawFloat64vector); + + return instance; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java index 1dd3503a1a..7cf01122f2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ZipInfo.java @@ -8,7 +8,7 @@ * Data model from mongodb reference data set * * @see Aggregation Examples - * @see zips.json */ class ZipInfo { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/MongoTemplateAuditingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/MongoTemplateAuditingTests.java new file mode 100644 index 0000000000..e2e1937da6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/MongoTemplateAuditingTests.java @@ -0,0 +1,146 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.auditing; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.annotation.Version; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.config.EnableMongoAuditing; +import org.springframework.data.mongodb.core.KAuditableVersionedEntity; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; + +/** + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +public class MongoTemplateAuditingTests { + + static @Client MongoClient mongoClient; + + @Configuration + @EnableMongoAuditing + static class Conf extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "mongo-template-audit-tests"; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return new HashSet<>(Arrays.asList(ImmutableAuditableEntityWithVersion.class, KAuditableVersionedEntity.class)); + } + } + + @Autowired MongoTemplate template; + + @Test // DATAMONGO-2346 + public void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedEntityOnSave() throws InterruptedException { + + template.remove(new Query(), ImmutableAuditableEntityWithVersion.class); + + ImmutableAuditableEntityWithVersion entity = new ImmutableAuditableEntityWithVersion("id-1", "value", null, null); + ImmutableAuditableEntityWithVersion inserted = template.save(entity); + + TimeUnit.MILLISECONDS.sleep(500); + + ImmutableAuditableEntityWithVersion modified = inserted.withValue("changed-value"); + ImmutableAuditableEntityWithVersion updated = template.save(modified); + + ImmutableAuditableEntityWithVersion fetched = template.findOne(Query.query(Criteria.where("id").is(entity.id)), + ImmutableAuditableEntityWithVersion.class); + + assertThat(updated.modificationDate).isAfter(inserted.modificationDate); + assertThat(fetched.modificationDate).isAfter(inserted.modificationDate); + assertThat(fetched.modificationDate).isEqualTo(updated.modificationDate.truncatedTo(ChronoUnit.MILLIS)); + } + + @Test // DATAMONGO-2346 + public void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedKotlinEntityOnSave() + throws InterruptedException { + + template.remove(new Query(), KAuditableVersionedEntity.class); + + KAuditableVersionedEntity entity = new KAuditableVersionedEntity("kId-1", "value", null, null); + KAuditableVersionedEntity inserted = template.save(entity); + + TimeUnit.MILLISECONDS.sleep(500); + + KAuditableVersionedEntity updated = template.save(inserted.withValue("changed-value")); + + KAuditableVersionedEntity fetched = template.findOne(Query.query(Criteria.where("id").is(entity.getId())), + KAuditableVersionedEntity.class); + + assertThat(updated.getModificationDate()).isAfter(inserted.getModificationDate()); + assertThat(fetched.getModificationDate()).isAfter(inserted.getModificationDate()); + assertThat(fetched.getModificationDate()).isEqualTo(updated.getModificationDate().truncatedTo(ChronoUnit.MILLIS)); + } + + static class ImmutableAuditableEntityWithVersion { + + final @Id String id; + final String value; + final @Version Integer version; + final @LastModifiedDate Instant modificationDate; + + ImmutableAuditableEntityWithVersion(String id, String value, Integer version, Instant modificationDate) { + + this.id = id; + this.value = value; + this.version = version; + this.modificationDate = modificationDate; + } + + ImmutableAuditableEntityWithVersion withValue(String value) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withModificationDate(Instant modificationDate) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withVersion(Integer version) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/ReactiveMongoTemplateAuditingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/ReactiveMongoTemplateAuditingTests.java new file mode 100644 index 0000000000..28429b53dc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/auditing/ReactiveMongoTemplateAuditingTests.java @@ -0,0 +1,175 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.auditing; + +import static org.assertj.core.api.Assertions.*; + +import reactor.test.StepVerifier; +import reactor.util.function.Tuples; + +import java.time.Duration; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Collections; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.annotation.Version; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.data.mongodb.config.EnableReactiveMongoAuditing; +import org.springframework.data.mongodb.core.KAuditableVersionedEntity; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * Integration tests for {@link EnableReactiveMongoAuditing} through {@link ReactiveMongoTemplate}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +class ReactiveMongoTemplateAuditingTests { + + static final String DB_NAME = "mongo-template-audit-tests"; + + static @Client MongoClient mongoClient; + + @Configuration + @EnableReactiveMongoAuditing + static class Conf extends AbstractReactiveMongoConfiguration { + + @Bean + @Override + public MongoClient reactiveMongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DB_NAME; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.emptySet(); + } + } + + @Autowired ReactiveMongoTemplate template; + @Autowired MongoClient client; + + @BeforeEach + void setUp() { + + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(ImmutableAuditableEntityWithVersion.class), + client); + MongoTestUtils.flushCollection(DB_NAME, template.getCollectionName(KAuditableVersionedEntity.class), client); + } + + @Test // DATAMONGO-2346 + void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedEntityOnSave() { + + ImmutableAuditableEntityWithVersion entity = new ImmutableAuditableEntityWithVersion(null, "value", null, null); + + template.save(entity).delayElement(Duration.ofMillis(500)) // + .flatMap(inserted -> template.save(inserted.withValue("changed-value")) // + .map(updated -> Tuples.of(inserted, updated))) // + .flatMap(tuple2 -> template + .findOne(Query.query(Criteria.where("id").is(tuple2.getT1().id)), ImmutableAuditableEntityWithVersion.class) + .map(fetched -> Tuples.of(tuple2.getT1(), tuple2.getT2(), fetched))) // + .as(StepVerifier::create) // + .consumeNextWith(tuple3 -> { + + assertThat(tuple3.getT2().modificationDate).isAfter(tuple3.getT1().modificationDate); + assertThat(tuple3.getT3().modificationDate).isAfter(tuple3.getT1().modificationDate); + assertThat(tuple3.getT3().modificationDate) + .isEqualTo(tuple3.getT2().modificationDate.truncatedTo(ChronoUnit.MILLIS)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2346 + void auditingSetsLastModifiedDateCorrectlyForImmutableVersionedKotlinEntityOnSave() { + + KAuditableVersionedEntity entity = new KAuditableVersionedEntity(null, "value", null, null); + + template.save(entity).delayElement(Duration.ofMillis(500)) // + .flatMap(inserted -> template.save(inserted.withValue("changed-value")) // + .map(updated -> Tuples.of(inserted, updated))) // + .flatMap(tuple2 -> template + .findOne(Query.query(Criteria.where("id").is(tuple2.getT1().getId())), KAuditableVersionedEntity.class) + .map(fetched -> Tuples.of(tuple2.getT1(), tuple2.getT2(), fetched))) // + .as(StepVerifier::create) // + .consumeNextWith(tuple3 -> { + + assertThat(tuple3.getT2().getModificationDate()).isAfter(tuple3.getT1().getModificationDate()); + assertThat(tuple3.getT3().getModificationDate()).isAfter(tuple3.getT1().getModificationDate()); + assertThat(tuple3.getT3().getModificationDate()) + .isEqualTo(tuple3.getT2().getModificationDate().truncatedTo(ChronoUnit.MILLIS)); + }) // + .verifyComplete(); + } + + @Document("versioned-auditable") + static class ImmutableAuditableEntityWithVersion { + + final @Id String id; + final String value; + final @Version Integer version; + final @LastModifiedDate Instant modificationDate; + + ImmutableAuditableEntityWithVersion(String id, String value, Integer version, Instant modificationDate) { + + this.id = id; + this.value = value; + this.version = version; + this.modificationDate = modificationDate; + } + + ImmutableAuditableEntityWithVersion withId(String id) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withValue(String value) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withModificationDate(Instant modificationDate) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + ImmutableAuditableEntityWithVersion withVersion(Integer version) { + return new ImmutableAuditableEntityWithVersion(id, value, version, modificationDate); + } + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java index b9ca85b6db..dfd6b0ab56 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/AbstractMongoConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,14 +18,18 @@ import static org.mockito.Mockito.*; import org.bson.conversions.Bson; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.data.convert.CustomConversions; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToObjectIdConverter; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.util.TypeInformation; import com.mongodb.DBRef; @@ -59,6 +63,21 @@ public MongoTypeMapper getTypeMapper() { throw new UnsupportedOperationException(); } + @Override + public ProjectionFactory getProjectionFactory() { + return null; + } + + @Override + public CustomConversions getCustomConversions() { + return null; + } + + @Override + public R project(EntityProjection descriptor, Bson bson) { + return null; + } + @Override public MappingContext, MongoPersistentProperty> getMappingContext() { throw new UnsupportedOperationException(); @@ -80,7 +99,7 @@ public Object convertToMongoType(Object obj, TypeInformation typeInformation) } @Override - public DBRef toDBRef(Object object, MongoPersistentProperty referingProperty) { + public DBRef toDBRef(Object object, MongoPersistentProperty referringProperty) { throw new UnsupportedOperationException(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java index 5525d6a323..5da3e896e6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,24 @@ */ package org.springframework.data.mongodb.core.convert; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.HashSet; import org.bson.Document; -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.convert.CustomConversions; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** @@ -39,20 +40,19 @@ * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class CustomConvertersUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class CustomConvertersUnitTests { - MappingMongoConverter converter; + private MappingMongoConverter converter; @Mock BarToDocumentConverter barToDocumentConverter; @Mock DocumentToBarConverter documentToBarConverter; - @Mock MongoDbFactory mongoDbFactory; - MongoMappingContext context; + private MongoMappingContext context; - @Before - @SuppressWarnings("unchecked") - public void setUp() throws Exception { + @BeforeEach + void setUp() { when(barToDocumentConverter.convert(any(Bar.class))).thenReturn(new Document()); when(documentToBarConverter.convert(any(Document.class))).thenReturn(new Bar()); @@ -61,17 +61,17 @@ public void setUp() throws Exception { Arrays.asList(barToDocumentConverter, documentToBarConverter)); context = new MongoMappingContext(); - context.setInitialEntitySet(new HashSet>(Arrays.asList(Foo.class, Bar.class))); + context.setInitialEntitySet(new HashSet<>(Arrays.asList(Foo.class, Bar.class))); context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); context.initialize(); - converter = new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), context); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); converter.setCustomConversions(conversions); converter.afterPropertiesSet(); } @Test // DATADOC-101 - public void nestedToDocumentConverterGetsInvoked() { + void nestedToDocumentConverterGetsInvoked() { Foo foo = new Foo(); foo.bar = new Bar(); @@ -81,7 +81,7 @@ public void nestedToDocumentConverterGetsInvoked() { } @Test // DATADOC-101 - public void nestedFromDocumentConverterGetsInvoked() { + void nestedFromDocumentConverterGetsInvoked() { Document document = new Document(); document.put("bar", new Document()); @@ -91,25 +91,25 @@ public void nestedFromDocumentConverterGetsInvoked() { } @Test // DATADOC-101 - public void toDocumentConverterGetsInvoked() { + void toDocumentConverterGetsInvoked() { converter.write(new Bar(), new Document()); verify(barToDocumentConverter).convert(any(Bar.class)); } @Test // DATADOC-101 - public void fromDocumentConverterGetsInvoked() { + void fromDocumentConverterGetsInvoked() { converter.read(Bar.class, new Document()); verify(documentToBarConverter).convert(any(Document.class)); } @Test // DATADOC-101 - public void foo() { + void foo() { Document document = new Document(); document.put("foo", null); - Assert.assertThat(document.containsKey("foo"), CoreMatchers.is(true)); + assertThat(document).containsKey("foo"); } public static class Foo { @@ -122,11 +122,7 @@ public static class Bar { public String foo; } - private interface BarToDocumentConverter extends Converter { - - } + private interface BarToDocumentConverter extends Converter {} - private interface DocumentToBarConverter extends Converter { - - } + private interface DocumentToBarConverter extends Converter {} } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java index 0ec68983db..92e5003628 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DataMongo273Tests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.ArrayList; @@ -24,9 +24,10 @@ import java.util.Map; import org.bson.Document; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.mapping.MongoMappingContext; /** @@ -39,7 +40,7 @@ public class DataMongo273Tests { MappingMongoConverter converter; - @Before + @BeforeEach public void setupMongoConverter() { MongoMappingContext mappingContext = new MongoMappingContext(); @@ -67,13 +68,13 @@ public void convertMapOfThings() { @SuppressWarnings("unchecked") Map mapOfThings2 = converter.read(Map.class, result); - assertTrue(mapOfThings2.get("plane") instanceof Plane); - assertTrue(mapOfThings2.get("train") instanceof Train); - assertTrue(mapOfThings2.get("automobile") instanceof Automobile); + assertThat(mapOfThings2.get("plane") instanceof Plane).isTrue(); + assertThat(mapOfThings2.get("train") instanceof Train).isTrue(); + assertThat(mapOfThings2.get("automobile") instanceof Automobile).isTrue(); } @Test // DATAMONGO-294 - @Ignore("TODO: Mongo3 - this is no longer supported as DBList is no Bson type :/") + @Disabled("TODO: Mongo3 - this is no longer supported as DBList is no Bson type :/") @SuppressWarnings({ "rawtypes", "unchecked" }) public void convertListOfThings() { Plane plane = new Plane("Boeing", 4); @@ -90,9 +91,9 @@ public void convertListOfThings() { List listOfThings2 = converter.read(List.class, result); - assertTrue(listOfThings2.get(0) instanceof Plane); - assertTrue(listOfThings2.get(1) instanceof Train); - assertTrue(listOfThings2.get(2) instanceof Automobile); + assertThat(listOfThings2.get(0) instanceof Plane).isTrue(); + assertThat(listOfThings2.get(1) instanceof Train).isTrue(); + assertThat(listOfThings2.get(2) instanceof Automobile).isTrue(); } @Test // DATAMONGO-294 @@ -120,9 +121,9 @@ public void convertListOfThings_NestedInMap() { List listOfThings2 = (List) shipment2.getBoxes().get("one"); - assertTrue(listOfThings2.get(0) instanceof Plane); - assertTrue(listOfThings2.get(1) instanceof Train); - assertTrue(listOfThings2.get(2) instanceof Automobile); + assertThat(listOfThings2.get(0) instanceof Plane).isTrue(); + assertThat(listOfThings2.get(1) instanceof Train).isTrue(); + assertThat(listOfThings2.get(2) instanceof Automobile).isTrue(); } static class Plane { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index 793db31f7e..b53531f301 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,8 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.any; import static org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils.*; import java.io.Serializable; @@ -36,25 +34,28 @@ import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.annotation.AccessType; import org.springframework.data.annotation.AccessType.Type; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.convert.MappingMongoConverterUnitTests.Person; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.SerializationUtils; @@ -64,45 +65,46 @@ import com.mongodb.client.MongoDatabase; /** - * Unit tests for {@link DbRefMappingMongoConverter}. + * Unit tests for {@link MappingMongoConverter}. * * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class DbRefMappingMongoConverterUnitTests { +@ExtendWith(MockitoExtension.class) +class DbRefMappingMongoConverterUnitTests { - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; - @Mock MongoDbFactory dbFactory; - DefaultDbRefResolver dbRefResolver; + @Mock MongoDatabaseFactory dbFactory; + private DefaultDbRefResolver dbRefResolver; - @Before - public void setUp() { + @BeforeEach + void setUp() { when(dbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); this.dbRefResolver = spy(new DefaultDbRefResolver(dbFactory)); this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); } @Test // DATAMONGO-347 - public void createsSimpleDBRefCorrectly() { + void createsSimpleDBRefCorrectly() { Person person = new Person(); person.id = "foo"; DBRef dbRef = converter.toDBRef(person, null); - assertThat(dbRef.getId(), is("foo")); - assertThat(dbRef.getCollectionName(), is("person")); + assertThat(dbRef.getId()).isEqualTo("foo"); + assertThat(dbRef.getCollectionName()).isEqualTo("person"); } @Test // DATAMONGO-657 - public void convertDocumentWithMapDBRef() { + void convertDocumentWithMapDBRef() { Document mapValDocument = new Document(); mapValDocument.put("_id", BigInteger.ONE); @@ -111,18 +113,16 @@ public void convertDocumentWithMapDBRef() { when(dbRef.getId()).thenReturn(BigInteger.ONE); when(dbRef.getCollectionName()).thenReturn("collection-1"); - if (MongoClientVersion.isMongo3Driver()) { - MongoDatabase dbMock = mock(MongoDatabase.class); - MongoCollection collectionMock = mock(MongoCollection.class); - when(dbFactory.getDb()).thenReturn(dbMock); - when(dbMock.getCollection(anyString(), eq(Document.class))).thenReturn(collectionMock); - - FindIterable fi = mock(FindIterable.class); - when(fi.first()).thenReturn(mapValDocument); - when(collectionMock.find(Mockito.any(Bson.class))).thenReturn(fi); - } else { - when(dbRefResolver.fetch(dbRef)).thenReturn(mapValDocument); - } + MongoDatabase dbMock = mock(MongoDatabase.class); + MongoCollection collectionMock = mock(MongoCollection.class); + when(dbFactory.getMongoDatabase()).thenReturn(dbMock); + when(dbMock.getCollection(anyString(), eq(Document.class))).thenReturn(collectionMock); + + FindIterable fi = mock(FindIterable.class); + when(fi.limit(anyInt())).thenReturn(fi); + when(fi.sort(any())).thenReturn(fi); + when(fi.first()).thenReturn(mapValDocument); + when(collectionMock.find(Mockito.any(Bson.class))).thenReturn(fi); MapDBRef mapDBRef = new MapDBRef(); @@ -139,17 +139,17 @@ public void convertDocumentWithMapDBRef() { Document map = (Document) document.get("map"); - assertThat(map.get("test"), instanceOf(DBRef.class)); + assertThat(map.get("test")).isInstanceOf(DBRef.class); ((Document) document.get("map")).put("test", dbRef); MapDBRef read = converter.read(MapDBRef.class, document); - assertThat(read.map.get("test").id, is(BigInteger.ONE)); + assertThat(read.map.get("test").id).isEqualTo(BigInteger.ONE); } @Test // DATAMONGO-347 - public void createsDBRefWithClientSpecCorrectly() { + void createsDBRefWithClientSpecCorrectly() { PropertyPath path = PropertyPath.from("person", PersonClient.class); MongoPersistentProperty property = mappingContext.getPersistentPropertyPath(path).getLeafProperty(); @@ -158,12 +158,12 @@ public void createsDBRefWithClientSpecCorrectly() { person.id = "foo"; DBRef dbRef = converter.toDBRef(person, property); - assertThat(dbRef.getId(), is("foo")); - assertThat(dbRef.getCollectionName(), is("person")); + assertThat(dbRef.getId()).isEqualTo("foo"); + assertThat(dbRef.getCollectionName()).isEqualTo("person"); } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnInterface() { + void lazyLoadingProxyForLazyDbRefOnInterface() { String id = "42"; String value = "bubu"; @@ -178,13 +178,14 @@ public void lazyLoadingProxyForLazyDbRefOnInterface() { ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToInterface, false); - assertThat(result.dbRefToInterface.get(0).getId(), is(id)); + assertThat(result.dbRefToInterface.get(0).getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToInterface, true); - assertThat(result.dbRefToInterface.get(0).getValue(), is(value)); + assertThat(result.dbRefToInterface.get(0).getValue()).isEqualTo(value); } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { String id = "42"; String value = "bubu"; @@ -199,13 +200,13 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteCollection, false); - assertThat(result.dbRefToConcreteCollection.get(0).getId(), is(id)); + assertThat(result.dbRefToConcreteCollection.get(0).getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteCollection, true); - assertThat(result.dbRefToConcreteCollection.get(0).getValue(), is(value)); + assertThat(result.dbRefToConcreteCollection.get(0).getValue()).isEqualTo(value); } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteType() { + void lazyLoadingProxyForLazyDbRefOnConcreteType() { String id = "42"; String value = "bubu"; @@ -220,13 +221,13 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteType() { ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteType, false); - assertThat(result.dbRefToConcreteType.getId(), is(id)); + assertThat(result.dbRefToConcreteType.getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteType, true); - assertThat(result.dbRefToConcreteType.getValue(), is(value)); + assertThat(result.dbRefToConcreteType.getValue()).isEqualTo(value); } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor() { + void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor() { String id = "42"; String value = "bubu"; @@ -241,13 +242,13 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructor, false); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getId(), is(id)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructor, true); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getValue(), is(value)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor.getValue()).isEqualTo(value); } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructorButWithoutDefaultConstructor() { + void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructorButWithoutDefaultConstructor() { String id = "42"; String value = "bubu"; @@ -263,13 +264,14 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, false); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getId(), is(id)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getId()).isEqualTo(id); assertProxyIsResolved(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, true); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getValue(), is(value)); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor.getValue()) + .isEqualTo(value); } @Test // DATAMONGO-348 - public void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { + void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { String id = "42"; String value = "bubu"; @@ -285,13 +287,13 @@ public void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { SerializableClassWithLazyDbRefs deserializedResult = (SerializableClassWithLazyDbRefs) transport(result); - assertThat(deserializedResult.dbRefToSerializableTarget.getId(), is(id)); + assertThat(deserializedResult.dbRefToSerializableTarget.getId()).isEqualTo(id); assertProxyIsResolved(deserializedResult.dbRefToSerializableTarget, true); - assertThat(deserializedResult.dbRefToSerializableTarget.getValue(), is(value)); + assertThat(deserializedResult.dbRefToSerializableTarget.getValue()).isEqualTo(value); } @Test // DATAMONGO-884 - public void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { + void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { String id = "42"; String value = "bubu"; @@ -305,14 +307,14 @@ public void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToToStringObjectMethodOverride, is(notNullValue())); + assertThat(result.dbRefToToStringObjectMethodOverride).isNotNull(); assertProxyIsResolved(result.dbRefToToStringObjectMethodOverride, false); - assertThat(result.dbRefToToStringObjectMethodOverride.toString(), is(id + ":" + value)); + assertThat(result.dbRefToToStringObjectMethodOverride.toString()).isEqualTo(id + ":" + value); assertProxyIsResolved(result.dbRefToToStringObjectMethodOverride, true); } @Test // DATAMONGO-884 - public void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; @@ -326,21 +328,21 @@ public void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProx WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToPlainObject, is(notNullValue())); + assertThat(result.dbRefToPlainObject).isNotNull(); assertProxyIsResolved(result.dbRefToPlainObject, false); // calling Object#toString does not initialize the proxy. String proxyString = result.dbRefToPlainObject.toString(); - assertThat(proxyString, is("lazyDbRefTarget" + ":" + id + "$LazyLoadingProxy")); + assertThat(proxyString).isEqualTo("lazyDbRefTarget" + ":" + id + "$LazyLoadingProxy"); assertProxyIsResolved(result.dbRefToPlainObject, false); // calling another method not declared on object triggers proxy initialization. - assertThat(result.dbRefToPlainObject.getValue(), is(value)); + assertThat(result.dbRefToPlainObject.getValue()).isEqualTo(value); assertProxyIsResolved(result.dbRefToPlainObject, true); } @Test // DATAMONGO-884 - public void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; @@ -354,18 +356,18 @@ public void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToPlainObject, is(notNullValue())); + assertThat(result.dbRefToPlainObject).isNotNull(); assertProxyIsResolved(result.dbRefToPlainObject, false); - assertThat(result.dbRefToPlainObject, is(equalTo(result.dbRefToPlainObject))); - assertThat(result.dbRefToPlainObject, is(not(equalTo(null)))); - assertThat(result.dbRefToPlainObject, is(not(equalTo((Object) lazyDbRefs.dbRefToToStringObjectMethodOverride)))); + assertThat(result.dbRefToPlainObject).isEqualTo(result.dbRefToPlainObject); + assertThat(result.dbRefToPlainObject).isNotEqualTo(null); + assertThat(result.dbRefToPlainObject).isNotEqualTo((Object) lazyDbRefs.dbRefToToStringObjectMethodOverride); assertProxyIsResolved(result.dbRefToPlainObject, false); } @Test // DATAMONGO-884 - public void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; @@ -379,16 +381,16 @@ public void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); - assertThat(result.dbRefToPlainObject, is(notNullValue())); + assertThat(result.dbRefToPlainObject).isNotNull(); assertProxyIsResolved(result.dbRefToPlainObject, false); - assertThat(result.dbRefToPlainObject.hashCode(), is(311365444)); + assertThat(result.dbRefToPlainObject.hashCode()).isEqualTo(311365444); assertProxyIsResolved(result.dbRefToPlainObject, false); } @Test // DATAMONGO-884 - public void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { + void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { String id = "42"; String value = "bubu"; @@ -406,18 +408,18 @@ public void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { WithObjectMethodOverrideLazyDbRefs result = converterSpy.read(WithObjectMethodOverrideLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride1, false); - assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride1, is(notNullValue())); + assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride1).isNotNull(); result.dbRefEqualsAndHashcodeObjectMethodOverride1.equals(null); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride1, true); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride2, false); - assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride2, is(notNullValue())); + assertThat(result.dbRefEqualsAndHashcodeObjectMethodOverride2).isNotNull(); result.dbRefEqualsAndHashcodeObjectMethodOverride2.hashCode(); assertProxyIsResolved(result.dbRefEqualsAndHashcodeObjectMethodOverride2, true); } @Test // DATAMONGO-987 - public void shouldNotGenerateLazyLoadingProxyForNullValues() { + void shouldNotGenerateLazyLoadingProxyForNullValues() { Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); @@ -426,16 +428,16 @@ public void shouldNotGenerateLazyLoadingProxyForNullValues() { ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, document); - assertThat(result.id, is(lazyDbRefs.id)); - assertThat(result.dbRefToInterface, is(nullValue())); - assertThat(result.dbRefToConcreteCollection, is(nullValue())); - assertThat(result.dbRefToConcreteType, is(nullValue())); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor, is(nullValue())); - assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor, is(nullValue())); + assertThat(result.id).isEqualTo(lazyDbRefs.id); + assertThat(result.dbRefToInterface).isNull(); + assertThat(result.dbRefToConcreteCollection).isNull(); + assertThat(result.dbRefToConcreteType).isNull(); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructor).isNull(); + assertThat(result.dbRefToConcreteTypeWithPersistenceConstructorWithoutDefaultConstructor).isNull(); } @Test // DATAMONGO-1005 - public void shouldBeAbleToStoreDirectReferencesToSelf() { + void shouldBeAbleToStoreDirectReferencesToSelf() { Document document = new Document(); @@ -446,12 +448,12 @@ public void shouldBeAbleToStoreDirectReferencesToSelf() { ClassWithDbRefField found = converter.read(ClassWithDbRefField.class, document); - assertThat(found, is(notNullValue())); - assertThat(found.reference, is(found)); + assertThat(found).isNotNull(); + assertThat(found.reference).isEqualTo(found); } @Test // DATAMONGO-1005 - public void shouldBeAbleToStoreNestedReferencesToSelf() { + void shouldBeAbleToStoreNestedReferencesToSelf() { Document document = new Document(); @@ -464,13 +466,13 @@ public void shouldBeAbleToStoreNestedReferencesToSelf() { ClassWithNestedDbRefField found = converter.read(ClassWithNestedDbRefField.class, document); - assertThat(found, is(notNullValue())); - assertThat(found.nested, is(notNullValue())); - assertThat(found.nested.reference, is(found)); + assertThat(found).isNotNull(); + assertThat(found.nested).isNotNull(); + assertThat(found.nested.reference).isEqualTo(found); } @Test // DATAMONGO-1012 - public void shouldEagerlyResolveIdPropertyWithFieldAccess() { + void shouldEagerlyResolveIdPropertyWithFieldAccess() { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(ClassWithLazyDbRefs.class); MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToConcreteType"); @@ -487,12 +489,12 @@ public void shouldEagerlyResolveIdPropertyWithFieldAccess() { MongoPersistentProperty idProperty = mappingContext.getRequiredPersistentEntity(LazyDbRefTarget.class) .getIdProperty(); - assertThat(accessor.getProperty(idProperty), is(notNullValue())); + assertThat(accessor.getProperty(idProperty)).isNotNull(); assertProxyIsResolved(result.dbRefToConcreteType, false); } @Test // DATAMONGO-1012 - public void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { + void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(ClassWithLazyDbRefs.class); MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToConcreteTypeWithPropertyAccess"); @@ -505,12 +507,13 @@ public void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { ClassWithLazyDbRefs result = converter.read(ClassWithLazyDbRefs.class, object); LazyDbRefTargetPropertyAccess proxy = result.dbRefToConcreteTypeWithPropertyAccess; - assertThat(ReflectionTestUtils.getField(proxy, "id"), is(nullValue())); + assertThat(ReflectionTestUtils.getField(proxy, "id")).isNull(); assertProxyIsResolved(proxy, false); } @Test // DATAMONGO-1076 - public void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { MongoPersistentEntity entity = mappingContext .getRequiredPersistentEntity(WithObjectMethodOverrideLazyDbRefs.class); @@ -528,7 +531,8 @@ public void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoke } @Test // DATAMONGO-1194 - public void shouldBulkFetchListOfReferences() { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void shouldBulkFetchListOfReferences() { String id1 = "1"; String id2 = "2"; @@ -548,15 +552,15 @@ public void shouldBulkFetchListOfReferences() { ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteCollection, false); - assertThat(result.dbRefToConcreteCollection.get(0).getId(), is(id1)); + assertThat(result.dbRefToConcreteCollection.get(0).getId()).isEqualTo(id1); assertProxyIsResolved(result.dbRefToConcreteCollection, true); - assertThat(result.dbRefToConcreteCollection.get(1).getId(), is(id2)); + assertThat(result.dbRefToConcreteCollection.get(1).getId()).isEqualTo(id2); verify(converterSpy, never()).readRef(Mockito.any(DBRef.class)); } @Test // DATAMONGO-1666 - public void shouldBulkFetchSetOfReferencesForConstructorCreation() { + void shouldBulkFetchSetOfReferencesForConstructorCreation() { String id1 = "1"; String id2 = "2"; @@ -572,13 +576,14 @@ public void shouldBulkFetchSetOfReferencesForConstructorCreation() { ClassWithDbRefSetConstructor result = converterSpy.read(ClassWithDbRefSetConstructor.class, document); - assertThat(result.dbRefToInterface, is(instanceOf(Set.class))); + assertThat(result.dbRefToInterface).isInstanceOf(Set.class); verify(converterSpy, never()).readRef(Mockito.any(DBRef.class)); } @Test // DATAMONGO-1194 - public void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { String id1 = "1"; String id2 = "2"; @@ -597,16 +602,16 @@ public void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointT ClassWithLazyDbRefs result = converterSpy.read(ClassWithLazyDbRefs.class, document); assertProxyIsResolved(result.dbRefToConcreteCollection, false); - assertThat(result.dbRefToConcreteCollection.get(0).getId(), is(id1)); + assertThat(result.dbRefToConcreteCollection.get(0).getId()).isEqualTo(id1); assertProxyIsResolved(result.dbRefToConcreteCollection, true); - assertThat(result.dbRefToConcreteCollection.get(1).getId(), is(id2)); + assertThat(result.dbRefToConcreteCollection.get(1).getId()).isEqualTo(id2); verify(converterSpy, times(2)).readRef(Mockito.any(DBRef.class)); verify(converterSpy, never()).bulkReadRefs(anyList()); } @Test // DATAMONGO-1194 - public void shouldBulkFetchMapOfReferences() { + void shouldBulkFetchMapOfReferences() { MapDBRefVal val1 = new MapDBRefVal(); val1.id = BigInteger.ONE; @@ -629,16 +634,16 @@ public void shouldBulkFetchMapOfReferences() { MapDBRef result = converterSpy.read(MapDBRef.class, document); // assertProxyIsResolved(result.map, false); - assertThat(result.map.get("one").id, is(val1.id)); + assertThat(result.map.get("one").id).isEqualTo(val1.id); // assertProxyIsResolved(result.map, true); - assertThat(result.map.get("two").id, is(val2.id)); + assertThat(result.map.get("two").id).isEqualTo(val2.id); verify(converterSpy, times(1)).bulkReadRefs(anyList()); verify(converterSpy, never()).readRef(Mockito.any(DBRef.class)); } @Test // DATAMONGO-1194 - public void shouldBulkFetchLazyMapOfReferences() { + void shouldBulkFetchLazyMapOfReferences() { MapDBRefVal val1 = new MapDBRefVal(); val1.id = BigInteger.ONE; @@ -661,9 +666,9 @@ public void shouldBulkFetchLazyMapOfReferences() { MapDBRef result = converterSpy.read(MapDBRef.class, document); assertProxyIsResolved(result.lazyMap, false); - assertThat(result.lazyMap.get("one").id, is(val1.id)); + assertThat(result.lazyMap.get("one").id).isEqualTo(val1.id); assertProxyIsResolved(result.lazyMap, true); - assertThat(result.lazyMap.get("two").id, is(val2.id)); + assertThat(result.lazyMap.get("two").id).isEqualTo(val2.id); verify(converterSpy, times(1)).bulkReadRefs(anyList()); verify(converterSpy, never()).readRef(any()); @@ -725,15 +730,15 @@ static class LazyDbRefTarget implements Serializable { @Id String id; String value; - public LazyDbRefTarget() { + LazyDbRefTarget() { this(null); } - public LazyDbRefTarget(String id) { + LazyDbRefTarget(String id) { this(id, null); } - public LazyDbRefTarget(String id, String value) { + LazyDbRefTarget(String id, String value) { this.id = id; this.value = value; } @@ -753,7 +758,7 @@ static class LazyDbRefTargetPropertyAccess implements Serializable { @Id @AccessType(Type.PROPERTY) String id; - public LazyDbRefTargetPropertyAccess(String id) { + LazyDbRefTargetPropertyAccess(String id) { this.id = id; } @@ -770,7 +775,7 @@ static class LazyDbRefTargetWithPeristenceConstructor extends LazyDbRefTarget { public LazyDbRefTargetWithPeristenceConstructor() {} @PersistenceConstructor - public LazyDbRefTargetWithPeristenceConstructor(String id, String value) { + LazyDbRefTargetWithPeristenceConstructor(String id, String value) { super(id, value); this.persistenceConstructorCalled = true; } @@ -786,7 +791,7 @@ static class LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor e boolean persistenceConstructorCalled; @PersistenceConstructor - public LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor(String id, String value) { + LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor(String id, String value) { super(id, value); this.persistenceConstructorCalled = true; } @@ -800,7 +805,7 @@ static class SerializableLazyDbRefTarget extends LazyDbRefTarget implements Seri public SerializableLazyDbRefTarget() {} - public SerializableLazyDbRefTarget(String id, String value) { + SerializableLazyDbRefTarget(String id, String value) { super(id, value); } @@ -813,14 +818,10 @@ static class ToStringObjectMethodOverrideLazyDbRefTarget extends LazyDbRefTarget public ToStringObjectMethodOverrideLazyDbRefTarget() {} - public ToStringObjectMethodOverrideLazyDbRefTarget(String id, String value) { + ToStringObjectMethodOverrideLazyDbRefTarget(String id, String value) { super(id, value); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return this.id + ":" + this.value; @@ -833,7 +834,7 @@ static class EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget extends LazyDb public EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget() {} - public EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget(String id, String value) { + EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget(String id, String value) { super(id, value); } @@ -847,7 +848,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) return true; if (obj == null) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java index 00117f0e78..75c7cc4366 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,16 +24,20 @@ import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import com.mongodb.DBRef; import com.mongodb.client.FindIterable; @@ -46,28 +50,30 @@ * @author Christoph Strobl * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class DefaultDbRefResolverUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class DefaultDbRefResolverUnitTests { - @Mock MongoDbFactory factoryMock; + @Mock MongoDatabaseFactory factoryMock; @Mock MongoDatabase dbMock; @Mock MongoCollection collectionMock; @Mock FindIterable cursorMock; - DefaultDbRefResolver resolver; + private DefaultDbRefResolver resolver; - @Before - public void setUp() { + @BeforeEach + void setUp() { - when(factoryMock.getDb()).thenReturn(dbMock); - when(dbMock.getCollection(anyString())).thenReturn(collectionMock); - when(collectionMock.find(Mockito.any(Document.class))).thenReturn(cursorMock); + when(factoryMock.getMongoDatabase()).thenReturn(dbMock); + when(factoryMock.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbMock.getCollection(anyString(), any(Class.class))).thenReturn(collectionMock); + when(collectionMock.find(any(Document.class))).thenReturn(cursorMock); resolver = new DefaultDbRefResolver(factoryMock); } @Test // DATAMONGO-1194 @SuppressWarnings("unchecked") - public void bulkFetchShouldLoadDbRefsCorrectly() { + void bulkFetchShouldLoadDbRefsCorrectly() { DBRef ref1 = new DBRef("collection-1", new ObjectId()); DBRef ref2 = new DBRef("collection-1", new ObjectId()); @@ -84,25 +90,26 @@ public void bulkFetchShouldLoadDbRefsCorrectly() { assertThat($in).hasSize(2); } - @Test(expected = InvalidDataAccessApiUsageException.class) // DATAMONGO-1194 - public void bulkFetchShouldThrowExceptionWhenUsingDifferntCollectionsWithinSetOfReferences() { + @Test // DATAMONGO-1194 + void bulkFetchShouldThrowExceptionWhenUsingDifferntCollectionsWithinSetOfReferences() { DBRef ref1 = new DBRef("collection-1", new ObjectId()); DBRef ref2 = new DBRef("collection-2", new ObjectId()); - resolver.bulkFetch(Arrays.asList(ref1, ref2)); + assertThatThrownBy(() -> resolver.bulkFetch(Arrays.asList(ref1, ref2))) + .isInstanceOf(InvalidDataAccessApiUsageException.class); } @Test // DATAMONGO-1194 - public void bulkFetchShouldReturnEarlyForEmptyLists() { + void bulkFetchShouldReturnEarlyForEmptyLists() { - resolver.bulkFetch(Collections. emptyList()); + resolver.bulkFetch(Collections.emptyList()); verify(collectionMock, never()).find(Mockito.any(Document.class)); } @Test // DATAMONGO-1194 - public void bulkFetchShouldRestoreOriginalOrder() { + void bulkFetchShouldRestoreOriginalOrder() { Document o1 = new Document("_id", new ObjectId()); Document o2 = new Document("_id", new ObjectId()); @@ -116,7 +123,7 @@ public void bulkFetchShouldRestoreOriginalOrder() { } @Test // DATAMONGO-1765 - public void bulkFetchContainsDuplicates() { + void bulkFetchContainsDuplicates() { Document document = new Document("_id", new ObjectId()); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java index 02ee9f0cc4..75fca5b267 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,16 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.data.convert.ConfigurableTypeInformationMapper; import org.springframework.data.convert.SimpleTypeInformationMapper; import org.springframework.data.mongodb.core.DocumentTestUtils; @@ -42,7 +42,7 @@ public class DefaultMongoTypeMapperUnitTests { DefaultMongoTypeMapper typeMapper; - @Before + @BeforeEach public void setUp() { configurableTypeInformationMapper = new ConfigurableTypeInformationMapper( @@ -115,8 +115,8 @@ public void writesTypeRestrictionsCorrectly() { Document typeInfo = DocumentTestUtils.getAsDocument(result, DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); List aliases = DocumentTestUtils.getAsDBList(typeInfo, "$in"); - assertThat(aliases, hasSize(1)); - assertThat(aliases.get(0), is((Object) String.class.getName())); + assertThat(aliases).hasSize(1); + assertThat(aliases.get(0)).isEqualTo((Object) String.class.getName()); } @Test @@ -173,15 +173,15 @@ public void returnsNullIfTypeKeySetToNull() { @Test public void returnsCorrectTypeKey() { - assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(true)); + assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isTrue(); typeMapper = new DefaultMongoTypeMapper("_custom"); - assertThat(typeMapper.isTypeKey("_custom"), is(true)); - assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(false)); + assertThat(typeMapper.isTypeKey("_custom")).isTrue(); + assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isFalse(); typeMapper = new DefaultMongoTypeMapper(null); - assertThat(typeMapper.isTypeKey("_custom"), is(false)); - assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(false)); + assertThat(typeMapper.isTypeKey("_custom")).isFalse(); + assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isFalse(); } private void readsTypeFromField(Document document, Class type) { @@ -189,10 +189,10 @@ private void readsTypeFromField(Document document, Class type) { TypeInformation typeInfo = typeMapper.readType(document); if (type != null) { - assertThat(typeInfo, is(notNullValue())); - assertThat(typeInfo.getType(), is(typeCompatibleWith(type))); + assertThat(typeInfo).isNotNull(); + assertThat(typeInfo.getType()).isAssignableFrom(type); } else { - assertThat(typeInfo, is(nullValue())); + assertThat(typeInfo).isNull(); } } @@ -201,10 +201,10 @@ private void writesTypeToField(String field, Document document, Class type) { typeMapper.writeType(type, document); if (field == null) { - assertThat(document.keySet().isEmpty(), is(true)); + assertThat(document.keySet().isEmpty()).isTrue(); } else { - assertThat(document.containsKey(field), is(true)); - assertThat(document.get(field), is((Object) type.getName())); + assertThat(document.containsKey(field)).isTrue(); + assertThat(document.get(field)).isEqualTo((Object) type.getName()); } } @@ -213,10 +213,10 @@ private void writesTypeToField(Document document, Class type, Object value) { typeMapper.writeType(type, document); if (value == null) { - assertThat(document.keySet().isEmpty(), is(true)); + assertThat(document.keySet().isEmpty()).isTrue(); } else { - assertThat(document.containsKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(true)); - assertThat(document.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(value)); + assertThat(document.containsKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isTrue(); + assertThat(document.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(value); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java index 5f1cc4b7d2..4f46283b74 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentAccessorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,12 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.BsonDocument; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; @@ -49,7 +49,7 @@ public void putsNestedFieldCorrectly() { accessor.put(fooProperty, "FooBar"); Document aDocument = DocumentTestUtils.getAsDocument(document, "a"); - assertThat(aDocument.get("b"), is((Object) "FooBar")); + assertThat(aDocument.get("b")).isEqualTo((Object) "FooBar"); } @Test // DATAMONGO-766 @@ -58,24 +58,24 @@ public void getsNestedFieldCorrectly() { Document source = new Document("a", new Document("b", "FooBar")); DocumentAccessor accessor = new DocumentAccessor(source); - assertThat(accessor.get(fooProperty), is((Object) "FooBar")); + assertThat(accessor.get(fooProperty)).isEqualTo((Object) "FooBar"); } @Test // DATAMONGO-766 public void returnsNullForNonExistingFieldPath() { DocumentAccessor accessor = new DocumentAccessor(new Document()); - assertThat(accessor.get(fooProperty), is(nullValue())); + assertThat(accessor.get(fooProperty)).isNull(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-766 + @Test // DATAMONGO-766 public void rejectsNonDocuments() { - new DocumentAccessor(new BsonDocument()); + assertThatIllegalArgumentException().isThrownBy(() -> new DocumentAccessor(new BsonDocument())); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-766 + @Test // DATAMONGO-766 public void rejectsNullDocument() { - new DocumentAccessor(null); + assertThatIllegalArgumentException().isThrownBy(() -> new DocumentAccessor(null)); } @Test // DATAMONGO-1335 @@ -92,9 +92,9 @@ public void writesAllNestingsCorrectly() { Document nestedA = DocumentTestUtils.getAsDocument(target, "a"); - assertThat(nestedA, is(notNullValue())); - assertThat(nestedA.get("b"), is((Object) "b")); - assertThat(nestedA.get("c"), is((Object) "c")); + assertThat(nestedA).isNotNull(); + assertThat(nestedA.get("b")).isEqualTo((Object) "b"); + assertThat(nestedA.get("c")).isEqualTo((Object) "c"); } @Test // DATAMONGO-1471 @@ -103,9 +103,9 @@ public void exposesAvailabilityOfFields() { DocumentAccessor accessor = new DocumentAccessor(new Document("a", new BasicDBObject("c", "d"))); MongoPersistentEntity entity = context.getRequiredPersistentEntity(ProjectingType.class); - assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("foo")), is(false)); - assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("a")), is(true)); - assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("name")), is(false)); + assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("foo"))).isFalse(); + assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("a"))).isTrue(); + assertThat(accessor.hasValue(entity.getRequiredPersistentProperty("name"))).isFalse(); } static class ProjectingType { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java new file mode 100644 index 0000000000..ce6cfc6517 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java @@ -0,0 +1,210 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.core.convert.DocumentPointerFactory.LinkageDocument; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; + +/** + * @author Christoph Strobl + */ +public class DocumentPointerFactoryUnitTests { + + @Test // GH-3602 + void errorsOnMongoOperatorUsage() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : { '$eq' : 1 } }"); + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> getPointerValue(source, new Book())) // + .withMessageContaining("$eq"); + } + + @Test // GH-3602 + void computesStaticPointer() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : 1 }"); + + assertThat(getPointerValue(source, new Book())).isEqualTo(new Document("_id", 1)); + } + + @Test // GH-3602 + void computesPointerWithIdValuePlaceholder() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : ?#{id} }"); + + assertThat(getPointerValue(source, new Book("book-1", null, null))).isEqualTo(new Document("id", "book-1")); + } + + @Test // GH-3602 + void computesPointerForNonIdValuePlaceholder() { + + LinkageDocument source = LinkageDocument.from("{ 'title' : ?#{book_title} }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null))) + .isEqualTo(new Document("book_title", "Living With A Seal")); + } + + @Test // GH-3602 + void computesPlaceholderFromNestedPathValue() { + + LinkageDocument source = LinkageDocument.from("{ 'metadata.pages' : ?#{p} } }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272)))) + .isEqualTo(new Document("p", 272)); + } + + @Test // GH-3602 + void computesNestedPlaceholderPathValue() { + + LinkageDocument source = LinkageDocument.from("{ 'metadata' : { 'pages' : ?#{metadata.pages} } }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272)))) + .isEqualTo(new Document("metadata", new Document("pages", 272))); + } + + Object getPointerValue(LinkageDocument linkageDocument, Object value) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(value.getClass()); + return linkageDocument + .getDocumentPointer(mappingContext, persistentEntity, persistentEntity.getPropertyPathAccessor(value)) + .getPointer(); + } + + static class Book { + + String id; + String title; + List author; + Metadata metadata; + + public Book() {} + + public Book(String id, String title, List author) { + this.id = id; + this.title = title; + this.author = author; + } + + public Book(String id, String title, List author, Metadata metadata) { + this.id = id; + this.title = title; + this.author = author; + this.metadata = metadata; + } + + public String getId() { + return this.id; + } + + public String getTitle() { + return this.title; + } + + public List getAuthor() { + return this.author; + } + + public Metadata getMetadata() { + return this.metadata; + } + + public void setId(String id) { + this.id = id; + } + + public void setTitle(String title) { + this.title = title; + } + + public void setAuthor(List author) { + this.author = author; + } + + public void setMetadata(Metadata metadata) { + this.metadata = metadata; + } + + public String toString() { + return "DocumentPointerFactoryUnitTests.Book(id=" + this.getId() + ", title=" + this.getTitle() + ", author=" + + this.getAuthor() + ", metadata=" + this.getMetadata() + ")"; + } + } + + static class Metadata { + + int pages; + + public Metadata(int pages) { + this.pages = pages; + } + + public int getPages() { + return pages; + } + + public void setPages(int pages) { + this.pages = pages; + } + } + + static class Author { + + String id; + String firstname; + String lastname; + + public Author() {} + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public String toString() { + return "DocumentPointerFactoryUnitTests.Author(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java index 71ccc9a8c0..7fb664b00c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoConvertersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,21 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.List; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; -import org.springframework.data.mongodb.core.convert.GeoConverters.BoxToDocumentConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.CircleToDocumentConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToBoxConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToCircleConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToPointConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToPolygonConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToSphereConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.GeoCommandToDocumentConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.PointToDocumentConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.PolygonToDocumentConverter; -import org.springframework.data.mongodb.core.convert.GeoConverters.SphereToDocumentConverter; +import org.springframework.data.mongodb.core.convert.GeoConverters.*; import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.query.GeoCommand; @@ -61,8 +51,8 @@ public void convertsBoxToDocumentAndBackCorrectly() { Document document = BoxToDocumentConverter.INSTANCE.convert(box); Box result = DocumentToBoxConverter.INSTANCE.convert(document); - assertThat(result, is(box)); - assertThat(result.getClass().equals(Box.class), is(true)); + assertThat(result).isEqualTo(box); + assertThat(result.getClass().equals(Box.class)).isTrue(); } @Test // DATAMONGO-858 @@ -73,7 +63,7 @@ public void convertsCircleToDocumentAndBackCorrectlyNeutralDistance() { Document document = CircleToDocumentConverter.INSTANCE.convert(circle); Circle result = DocumentToCircleConverter.INSTANCE.convert(document); - assertThat(result, is(circle)); + assertThat(result).isEqualTo(circle); } @Test // DATAMONGO-858 @@ -85,8 +75,8 @@ public void convertsCircleToDocumentAndBackCorrectlyMilesDistance() { Document document = CircleToDocumentConverter.INSTANCE.convert(circle); Circle result = DocumentToCircleConverter.INSTANCE.convert(document); - assertThat(result, is(circle)); - assertThat(result.getRadius(), is(radius)); + assertThat(result).isEqualTo(circle); + assertThat(result.getRadius()).isEqualTo(radius); } @Test // DATAMONGO-858 @@ -97,8 +87,8 @@ public void convertsPolygonToDocumentAndBackCorrectly() { Document document = PolygonToDocumentConverter.INSTANCE.convert(polygon); Polygon result = DocumentToPolygonConverter.INSTANCE.convert(document); - assertThat(result, is(polygon)); - assertThat(result.getClass().equals(Polygon.class), is(true)); + assertThat(result).isEqualTo(polygon); + assertThat(result.getClass().equals(Polygon.class)).isTrue(); } @Test // DATAMONGO-858 @@ -109,8 +99,8 @@ public void convertsSphereToDocumentAndBackCorrectlyWithNeutralDistance() { Document document = SphereToDocumentConverter.INSTANCE.convert(sphere); Sphere result = DocumentToSphereConverter.INSTANCE.convert(document); - assertThat(result, is(sphere)); - assertThat(result.getClass().equals(Sphere.class), is(true)); + assertThat(result).isEqualTo(sphere); + assertThat(result.getClass().equals(Sphere.class)).isTrue(); } @Test // DATAMONGO-858 @@ -122,9 +112,9 @@ public void convertsSphereToDocumentAndBackCorrectlyWithKilometerDistance() { Document document = SphereToDocumentConverter.INSTANCE.convert(sphere); Sphere result = DocumentToSphereConverter.INSTANCE.convert(document); - assertThat(result, is(sphere)); - assertThat(result.getRadius(), is(radius)); - assertThat(result.getClass().equals(org.springframework.data.mongodb.core.geo.Sphere.class), is(true)); + assertThat(result).isEqualTo(sphere); + assertThat(result.getRadius()).isEqualTo(radius); + assertThat(result.getClass().equals(Sphere.class)).isTrue(); } @Test // DATAMONGO-858 @@ -135,8 +125,8 @@ public void convertsPointToListAndBackCorrectly() { Document document = PointToDocumentConverter.INSTANCE.convert(point); Point result = DocumentToPointConverter.INSTANCE.convert(document); - assertThat(result, is(point)); - assertThat(result.getClass().equals(Point.class), is(true)); + assertThat(result).isEqualTo(point); + assertThat(result.getClass().equals(Point.class)).isTrue(); } @Test // DATAMONGO-858 @@ -147,19 +137,19 @@ public void convertsGeoCommandToDocumentCorrectly() { Document document = GeoCommandToDocumentConverter.INSTANCE.convert(cmd); - assertThat(document, is(notNullValue())); + assertThat(document).isNotNull(); List boxObject = (List) document.get("$box"); - assertThat(boxObject, - is((Object) Arrays.asList(GeoConverters.toList(box.getFirst()), GeoConverters.toList(box.getSecond())))); + assertThat(boxObject) + .isEqualTo((Object) Arrays.asList(GeoConverters.toList(box.getFirst()), GeoConverters.toList(box.getSecond()))); } @Test // DATAMONGO-1607 public void convertsPointCorrectlyWhenUsingNonDoubleForCoordinates() { - assertThat(DocumentToPointConverter.INSTANCE.convert(new Document().append("x", 1L).append("y", 2L)), - is(new Point(1, 2))); + assertThat(DocumentToPointConverter.INSTANCE.convert(new Document().append("x", 1L).append("y", 2L))) + .isEqualTo(new Point(1, 2)); } @Test // DATAMONGO-1607 @@ -169,7 +159,8 @@ public void convertsCircleCorrectlyWhenUsingNonDoubleForCoordinates() { circle.put("center", new Document().append("x", 1).append("y", 2)); circle.put("radius", 3L); - assertThat(DocumentToCircleConverter.INSTANCE.convert(circle), is(new Circle(new Point(1, 2), new Distance(3)))); + assertThat(DocumentToCircleConverter.INSTANCE.convert(circle)) + .isEqualTo(new Circle(new Point(1, 2), new Distance(3))); } @Test // DATAMONGO-1607 @@ -179,7 +170,8 @@ public void convertsSphereCorrectlyWhenUsingNonDoubleForCoordinates() { sphere.put("center", new Document().append("x", 1).append("y", 2)); sphere.put("radius", 3L); - assertThat(DocumentToSphereConverter.INSTANCE.convert(sphere), is(new Sphere(new Point(1, 2), new Distance(3)))); + assertThat(DocumentToSphereConverter.INSTANCE.convert(sphere)) + .isEqualTo(new Sphere(new Point(1, 2), new Distance(3))); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java index 6ae8d9df06..36d69d5c71 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/GeoJsonConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,16 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.IsEqual.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import org.bson.Document; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonLineStringConverter; import org.springframework.data.mongodb.core.convert.GeoConverters.DocumentToGeoJsonMultiLineStringConverter; @@ -177,30 +174,25 @@ public class GeoJsonConverterUnitTests { public static class DocumentToGeoJsonPolygonConverterUnitTests { DocumentToGeoJsonPolygonConverter converter = DocumentToGeoJsonPolygonConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(POLYGON_DOC), equalTo(POLYGON)); + assertThat(converter.convert(POLYGON_DOC)).isEqualTo(POLYGON); } @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPolygon() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to Polygon"); - - converter.convert(new Document("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } @Test // DATAMONGO-1399 public void shouldConvertDboWithMultipleRingsCorrectly() { - assertThat(converter.convert(POLYGON_WITH_2_RINGS_DOC), equalTo(POLYGON_WITH_2_RINGS)); + assertThat(converter.convert(POLYGON_WITH_2_RINGS_DOC)).isEqualTo(POLYGON_WITH_2_RINGS); } } @@ -211,25 +203,21 @@ public void shouldConvertDboWithMultipleRingsCorrectly() { public static class DocumentToGeoJsonPointConverterUnitTests { DocumentToGeoJsonPointConverter converter = DocumentToGeoJsonPointConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(SINGLE_POINT_DOC), equalTo(SINGLE_POINT)); + assertThat(converter.convert(SINGLE_POINT_DOC)).isEqualTo(SINGLE_POINT); } @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to Point"); - - converter.convert(new Document("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } @@ -239,25 +227,20 @@ public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { public static class DocumentToGeoJsonLineStringConverterUnitTests { DocumentToGeoJsonLineStringConverter converter = DocumentToGeoJsonLineStringConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(LINE_STRING_DOC), equalTo(LINE_STRING)); + assertThat(converter.convert(LINE_STRING_DOC)).isEqualTo(LINE_STRING); } @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to LineString"); - - converter.convert(new Document("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } @@ -267,25 +250,20 @@ public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { public static class DocumentToGeoJsonMultiLineStringConverterUnitTests { DocumentToGeoJsonMultiLineStringConverter converter = DocumentToGeoJsonMultiLineStringConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(MULTI_LINE_STRING_DOC), equalTo(MULTI_LINE_STRING)); + assertThat(converter.convert(MULTI_LINE_STRING_DOC)).isEqualTo(MULTI_LINE_STRING); } @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to MultiLineString"); - - converter.convert(new Document("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } @@ -295,25 +273,20 @@ public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { public static class DocumentToGeoJsonMultiPointConverterUnitTests { DocumentToGeoJsonMultiPointConverter converter = DocumentToGeoJsonMultiPointConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(MULTI_POINT_DOC), equalTo(MULTI_POINT)); + assertThat(converter.convert(MULTI_POINT_DOC)).isEqualTo(MULTI_POINT); } @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to MultiPoint"); - - converter.convert(new Document("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } @@ -323,25 +296,20 @@ public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { public static class DocumentToGeoJsonMultiPolygonConverterUnitTests { DocumentToGeoJsonMultiPolygonConverter converter = DocumentToGeoJsonMultiPolygonConverter.INSTANCE; - public @Rule ExpectedException expectedException = ExpectedException.none(); @Test // DATAMONGO-1137 public void shouldConvertDboCorrectly() { - assertThat(converter.convert(MULTI_POLYGON_DOC), equalTo(MULTI_POLYGON)); + assertThat(converter.convert(MULTI_POLYGON_DOC)).isEqualTo(MULTI_POLYGON); } @Test // DATAMONGO-1137 public void shouldReturnNullWhenConvertIsGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1137 public void shouldThrowExceptionWhenTypeDoesNotMatchPoint() { - - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("'YouDontKonwMe' to MultiPolygon"); - - converter.convert(new Document("type", "YouDontKonwMe")); + assertThatIllegalArgumentException().isThrownBy(() -> converter.convert(new Document("type", "YouDontKonwMe"))); } } @@ -354,47 +322,47 @@ public static class GeoJsonToDocumentConverterUnitTests { // DATAMONGO-1135 public void convertShouldReturnNullWhenGivenNull() { - assertThat(converter.convert(null), nullValue()); + assertThat(converter.convert(null)).isNull(); } @Test // DATAMONGO-1135 public void shouldConvertGeoJsonPointCorrectly() { - assertThat(converter.convert(SINGLE_POINT), equalTo(SINGLE_POINT_DOC)); + assertThat(converter.convert(SINGLE_POINT)).isEqualTo(SINGLE_POINT_DOC); } @Test // DATAMONGO-1135 public void shouldConvertGeoJsonPolygonCorrectly() { - assertThat(converter.convert(POLYGON), equalTo(POLYGON_DOC)); + assertThat(converter.convert(POLYGON)).isEqualTo(POLYGON_DOC); } @Test // DATAMONGO-1137 public void shouldConvertGeoJsonLineStringCorrectly() { - assertThat(converter.convert(LINE_STRING), equalTo(LINE_STRING_DOC)); + assertThat(converter.convert(LINE_STRING)).isEqualTo(LINE_STRING_DOC); } @Test // DATAMONGO-1137 public void shouldConvertGeoJsonMultiLineStringCorrectly() { - assertThat(converter.convert(MULTI_LINE_STRING), equalTo(MULTI_LINE_STRING_DOC)); + assertThat(converter.convert(MULTI_LINE_STRING)).isEqualTo(MULTI_LINE_STRING_DOC); } @Test // DATAMONGO-1137 public void shouldConvertGeoJsonMultiPointCorrectly() { - assertThat(converter.convert(MULTI_POINT), equalTo(MULTI_POINT_DOC)); + assertThat(converter.convert(MULTI_POINT)).isEqualTo(MULTI_POINT_DOC); } @Test // DATAMONGO-1137 public void shouldConvertGeoJsonMultiPolygonCorrectly() { - assertThat(converter.convert(MULTI_POLYGON), equalTo(MULTI_POLYGON_DOC)); + assertThat(converter.convert(MULTI_POLYGON)).isEqualTo(MULTI_POLYGON_DOC); } @Test // DATAMONGO-1137 public void shouldConvertGeometryCollectionCorrectly() { - assertThat(converter.convert(GEOMETRY_COLLECTION), equalTo(GEOMETRY_COLLECTION_DOC)); + assertThat(converter.convert(GEOMETRY_COLLECTION)).isEqualTo(GEOMETRY_COLLECTION_DOC); } @Test // DATAMONGO-1399 public void shouldConvertGeoJsonPolygonWithMultipleRingsCorrectly() { - assertThat(converter.convert(POLYGON_WITH_2_RINGS), equalTo(POLYGON_WITH_2_RINGS_DOC)); + assertThat(converter.convert(POLYGON_WITH_2_RINGS)).isEqualTo(POLYGON_WITH_2_RINGS_DOC); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java index 9a1f3937a5..43ea9f3a64 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,18 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.Is.*; -import static org.hamcrest.core.IsEqual.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.LazyLoadingException; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import com.mongodb.DBRef; @@ -38,26 +36,23 @@ * * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class LazyLoadingInterceptorUnitTests { - - public @Rule ExpectedException exception = ExpectedException.none(); +@ExtendWith(MockitoExtension.class) +class LazyLoadingInterceptorUnitTests { @Mock MongoPersistentProperty propertyMock; @Mock DBRef dbrefMock; @Mock DbRefResolverCallback callbackMock; @Test // DATAMONGO-1437 - public void shouldPreserveCauseForNonTranslatableExceptions() throws Throwable { + void shouldPreserveCauseForNonTranslatableExceptions() throws Throwable { NullPointerException npe = new NullPointerException("Some Exception we did not think about."); when(callbackMock.resolve(propertyMock)).thenThrow(npe); - exception.expect(LazyLoadingException.class); - exception.expectCause(is(equalTo(npe))); - - new LazyLoadingInterceptor(propertyMock, dbrefMock, new NullExceptionTranslator(), callbackMock).intercept(null, - LazyLoadingProxy.class.getMethod("getTarget"), null, null); + assertThatExceptionOfType(LazyLoadingException.class).isThrownBy(() -> { + new LazyLoadingInterceptor(propertyMock, callbackMock, dbrefMock, new NullExceptionTranslator()).intercept(null, + LazyLoadingProxy.class.getMethod("getTarget"), null, null); + }).withCause(npe); } static class NullExceptionTranslator implements PersistenceExceptionTranslator { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java index 1d777cec07..54f82f6921 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,14 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.*; + +import java.util.function.Consumer; import org.springframework.aop.framework.Advised; import org.springframework.cglib.proxy.Factory; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.test.util.ReflectionTestUtils; /** @@ -40,12 +42,46 @@ public class LazyLoadingTestUtils { public static void assertProxyIsResolved(Object target, boolean expected) { LazyLoadingInterceptor interceptor = extractInterceptor(target); - assertThat(ReflectionTestUtils.getField(interceptor, "resolved"), is((Object) expected)); - assertThat(ReflectionTestUtils.getField(interceptor, "result"), is(expected ? notNullValue() : nullValue())); + assertThat(ReflectionTestUtils.getField(interceptor, "resolved")).isEqualTo((Object) expected); + + if (expected) { + assertThat(ReflectionTestUtils.getField(interceptor, "result")).isNotNull(); + } else { + assertThat(ReflectionTestUtils.getField(interceptor, "result")).isNull(); + + } + } + + public static void assertProxy(Object proxy, Consumer verification) { + + LazyLoadingInterceptor interceptor = (LazyLoadingInterceptor) (proxy instanceof Advised + ? ((Advised) proxy).getAdvisors()[0].getAdvice() + : ((Factory) proxy).getCallback(0)); + + verification.accept(new LazyLoadingProxyValueRetriever(interceptor)); } private static LazyLoadingInterceptor extractInterceptor(Object proxy) { return (LazyLoadingInterceptor) (proxy instanceof Advised ? ((Advised) proxy).getAdvisors()[0].getAdvice() : ((Factory) proxy).getCallback(0)); } + + public static class LazyLoadingProxyValueRetriever { + + LazyLoadingInterceptor interceptor; + + public LazyLoadingProxyValueRetriever(LazyLoadingInterceptor interceptor) { + this.interceptor = interceptor; + } + + public boolean isResolved() { + return (boolean) ReflectionTestUtils.getField(interceptor, "resolved"); + } + + @Unwrapped.Nullable + public Object currentValue() { + return ReflectionTestUtils.getField(interceptor, "result"); + } + + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java new file mode 100644 index 0000000000..1ce58eeb47 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java @@ -0,0 +1,483 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; + +/** + * Integration tests for {@link MappingMongoConverter}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MappingMongoConverterTests { + + private static final String DATABASE = "mapping-converter-tests"; + + private static @Client MongoClient client; + + private MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(client, DATABASE); + + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private DbRefResolver dbRefResolver; + + @BeforeEach + void setUp() { + + MongoDatabase database = client.getDatabase(DATABASE); + + database.getCollection("samples").deleteMany(new Document()); + database.getCollection("java-time-types").deleteMany(new Document()); + + dbRefResolver = spy(new DefaultDbRefResolver(factory)); + + mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + mappingContext.setInitialEntitySet(Set.of(WithLazyDBRefAsConstructorArg.class, WithLazyDBRef.class, WithJavaTimeTypes.class)); + mappingContext.setAutoIndexCreation(false); + mappingContext.afterPropertiesSet(); + + converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.afterPropertiesSet(); + } + + @Test // DATAMONGO-2004 + void resolvesLazyDBRefOnAccess() { + + client.getDatabase(DATABASE).getCollection("samples") + .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), + new Document("_id", "sample-2").append("value", "two"))); + + Document source = new Document("_id", "id-1").append("lazyList", + Arrays.asList(new com.mongodb.DBRef("samples", "sample-1"), new com.mongodb.DBRef("samples", "sample-2"))); + + WithLazyDBRef target = converter.read(WithLazyDBRef.class, source); + + verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); + + assertThat(target.lazyList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyList()).contains(new Sample("sample-1", "one"), new Sample("sample-2", "two")); + + verify(dbRefResolver).bulkFetch(any()); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedReferences() { + + Document sampleSource = new Document("_id", "sample-1").append("value", "one"); + Document source = new Document("_id", "id-1").append("sample", sampleSource); + + WithSingleValueDbRef read = converter.read(WithSingleValueDbRef.class, source); + + assertThat(read.sample).isEqualTo(converter.read(Sample.class, sampleSource)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedListOfReferences() { + + Document sample1Source = new Document("_id", "sample-1").append("value", "one"); + Document sample2Source = new Document("_id", "sample-2").append("value", "two"); + Document source = new Document("_id", "id-1").append("lazyList", List.of(sample1Source, sample2Source)); + + WithLazyDBRef read = converter.read(WithLazyDBRef.class, source); + + assertThat(read.lazyList).containsExactly(converter.read(Sample.class, sample1Source), + converter.read(Sample.class, sample2Source)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedMapOfReferences() { + + Document sample1Source = new Document("_id", "sample-1").append("value", "one"); + Document sample2Source = new Document("_id", "sample-2").append("value", "two"); + Document source = new Document("_id", "id-1").append("sampleMap", + new Document("s1", sample1Source).append("s2", sample2Source)); + + WithMapValueDbRef read = converter.read(WithMapValueDbRef.class, source); + + assertThat(read.sampleMap) // + .containsEntry("s1", converter.read(Sample.class, sample1Source)) // + .containsEntry("s2", converter.read(Sample.class, sample2Source)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedMapOfLazyReferences() { + + Document sample1Source = new Document("_id", "sample-1").append("value", "one"); + Document sample2Source = new Document("_id", "sample-2").append("value", "two"); + Document source = new Document("_id", "id-1").append("sampleMapLazy", + new Document("s1", sample1Source).append("s2", sample2Source)); + + WithMapValueDbRef read = converter.read(WithMapValueDbRef.class, source); + + assertThat(read.sampleMapLazy) // + .containsEntry("s1", converter.read(Sample.class, sample1Source)) // + .containsEntry("s2", converter.read(Sample.class, sample2Source)); + verifyNoInteractions(dbRefResolver); + } + + @Test // GH-4312 + void resolvesLazyDBRefMapOnAccess() { + + client.getDatabase(DATABASE).getCollection("samples") + .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), + new Document("_id", "sample-2").append("value", "two"))); + + Document source = new Document("_id", "id-1").append("sampleMapLazy", + new Document("s1", new com.mongodb.DBRef("samples", "sample-1")).append("s2", + new com.mongodb.DBRef("samples", "sample-2"))); + + WithMapValueDbRef target = converter.read(WithMapValueDbRef.class, source); + + verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); + + assertThat(target.sampleMapLazy).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getSampleMapLazy()).containsEntry("s1", new Sample("sample-1", "one")).containsEntry("s2", + new Sample("sample-2", "two")); + + verify(dbRefResolver).bulkFetch(any()); + } + + @Test // GH-4312 + void conversionShouldAllowReadingAlreadyResolvedLazyReferences() { + + Document sampleSource = new Document("_id", "sample-1").append("value", "one"); + Document source = new Document("_id", "id-1").append("sampleLazy", sampleSource); + + WithSingleValueDbRef read = converter.read(WithSingleValueDbRef.class, source); + + assertThat(read.sampleLazy).isEqualTo(converter.read(Sample.class, sampleSource)); + verifyNoInteractions(dbRefResolver); + } + + @Test // DATAMONGO-2004 + void resolvesLazyDBRefConstructorArgOnAccess() { + + client.getDatabase(DATABASE).getCollection("samples") + .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), + new Document("_id", "sample-2").append("value", "two"))); + + Document source = new Document("_id", "id-1").append("lazyList", + Arrays.asList(new com.mongodb.DBRef("samples", "sample-1"), new com.mongodb.DBRef("samples", "sample-2"))); + + WithLazyDBRefAsConstructorArg target = converter.read(WithLazyDBRefAsConstructorArg.class, source); + + verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); + + assertThat(target.lazyList).isInstanceOf(LazyLoadingProxy.class); + assertThat(target.getLazyList()).contains(new Sample("sample-1", "one"), new Sample("sample-2", "two")); + + verify(dbRefResolver).bulkFetch(any()); + } + + @Test // DATAMONGO-2400 + void readJavaTimeValuesWrittenViaCodec() { + + configureConverterWithNativeJavaTimeCodec(); + MongoCollection mongoCollection = client.getDatabase(DATABASE).getCollection("java-time-types"); + + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + WithJavaTimeTypes source = WithJavaTimeTypes.withJavaTimeTypes(now); + source.id = "id-1"; + + mongoCollection.insertOne(source.toDocument()); + + assertThat(converter.read(WithJavaTimeTypes.class, mongoCollection.find(new Document("_id", source.id)).first())) + .isEqualTo(source); + } + + void configureConverterWithNativeJavaTimeCodec() { + + converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions( + MongoCustomConversions.create(MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs)); + converter.afterPropertiesSet(); + } + + public static class WithLazyDBRef { + + @Id String id; + @DBRef(lazy = true) List lazyList; + + List getLazyList() { + return lazyList; + } + } + + public static class WithSingleValueDbRef { + + @Id // + String id; + + @DBRef // + Sample sample; + + @DBRef(lazy = true) // + Sample sampleLazy; + + public String getId() { + return this.id; + } + + public Sample getSample() { + return this.sample; + } + + public Sample getSampleLazy() { + return this.sampleLazy; + } + + public void setId(String id) { + this.id = id; + } + + public void setSample(Sample sample) { + this.sample = sample; + } + + public void setSampleLazy(Sample sampleLazy) { + this.sampleLazy = sampleLazy; + } + + public String toString() { + return "MappingMongoConverterTests.WithSingleValueDbRef(id=" + this.getId() + ", sample=" + this.getSample() + + ", sampleLazy=" + this.getSampleLazy() + ")"; + } + } + + public static class WithMapValueDbRef { + + @Id String id; + + @DBRef // + Map sampleMap; + + @DBRef(lazy = true) // + Map sampleMapLazy; + + public String getId() { + return this.id; + } + + public Map getSampleMap() { + return this.sampleMap; + } + + public Map getSampleMapLazy() { + return this.sampleMapLazy; + } + + public void setId(String id) { + this.id = id; + } + + public void setSampleMap(Map sampleMap) { + this.sampleMap = sampleMap; + } + + public void setSampleMapLazy(Map sampleMapLazy) { + this.sampleMapLazy = sampleMapLazy; + } + + public String toString() { + return "MappingMongoConverterTests.WithMapValueDbRef(id=" + this.getId() + ", sampleMap=" + this.getSampleMap() + + ", sampleMapLazy=" + this.getSampleMapLazy() + ")"; + } + } + + public static class WithLazyDBRefAsConstructorArg { + + @Id String id; + @DBRef(lazy = true) List lazyList; + + public WithLazyDBRefAsConstructorArg(String id, List lazyList) { + + this.id = id; + this.lazyList = lazyList; + } + + List getLazyList() { + return lazyList; + } + } + + static class Sample { + + @Id String id; + String value; + + public Sample(String id, String value) { + + this.id = id; + this.value = value; + } + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Sample sample = (Sample) o; + return Objects.equals(id, sample.id) && Objects.equals(value, sample.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "MappingMongoConverterTests.Sample(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + static class WithJavaTimeTypes { + + @Id String id; + LocalDate localDate; + LocalTime localTime; + LocalDateTime localDateTime; + + public WithJavaTimeTypes() {} + + static WithJavaTimeTypes withJavaTimeTypes(Instant instant) { + + WithJavaTimeTypes instance = new WithJavaTimeTypes(); + + instance.localDate = LocalDate.from(instant.atZone(ZoneId.of("CET"))); + instance.localTime = LocalTime.from(instant.atZone(ZoneId.of("CET"))); + instance.localDateTime = LocalDateTime.from(instant.atZone(ZoneId.of("CET"))); + + return instance; + } + + Document toDocument() { + return new Document("_id", id).append("localDate", localDate).append("localTime", localTime) + .append("localDateTime", localDateTime); + } + + public String getId() { + return this.id; + } + + public LocalDate getLocalDate() { + return this.localDate; + } + + public LocalTime getLocalTime() { + return this.localTime; + } + + public LocalDateTime getLocalDateTime() { + return this.localDateTime; + } + + public void setId(String id) { + this.id = id; + } + + public void setLocalDate(LocalDate localDate) { + this.localDate = localDate; + } + + public void setLocalTime(LocalTime localTime) { + this.localTime = localTime; + } + + public void setLocalDateTime(LocalDateTime localDateTime) { + this.localDateTime = localDateTime; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithJavaTimeTypes that = (WithJavaTimeTypes) o; + return Objects.equals(id, that.id) && Objects.equals(localDate, that.localDate) + && Objects.equals(localTime, that.localTime) && Objects.equals(localDateTime, that.localDateTime); + } + + @Override + public int hashCode() { + return Objects.hash(id, localDate, localTime, localDateTime); + } + + public String toString() { + return "MappingMongoConverterTests.WithJavaTimeTypes(id=" + this.getId() + ", localDate=" + this.getLocalDate() + + ", localTime=" + this.getLocalTime() + ", localDateTime=" + this.getLocalDateTime() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 7715450873..cf6d69c6c3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,58 +16,64 @@ package org.springframework.data.mongodb.core.convert; import static java.time.ZoneId.*; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.DocumentTestUtils.*; - -import lombok.RequiredArgsConstructor; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URL; +import java.nio.ByteBuffer; +import java.time.LocalDate; import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.EnumMap; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; - +import java.time.temporal.ChronoUnit; +import java.util.*; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.assertj.core.api.Assertions; +import org.assertj.core.data.Percentage; +import org.bson.BsonDouble; +import org.bson.BsonUndefined; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; -import org.hamcrest.Matcher; -import org.hamcrest.Matchers; -import org.joda.time.LocalDate; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.aop.framework.ProxyFactory; import org.springframework.beans.ConversionNotSupportedException; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.context.ApplicationContext; +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.convert.ConverterNotFoundException; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; +import org.springframework.data.annotation.Transient; import org.springframework.data.annotation.TypeAlias; +import org.springframework.data.convert.ConverterBuilder; import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.PropertyValueConverterFactory; import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.ValueConverter; import org.springframework.data.convert.WritingConverter; +import org.springframework.data.domain.Vector; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; @@ -76,6 +82,7 @@ import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.model.MappingInstantiationException; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.convert.DocumentAccessorUnitTests.NestedType; @@ -84,14 +91,27 @@ import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoField; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.PersonPojoStringId; import org.springframework.data.mongodb.core.mapping.TextScore; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.EntityProjectionIntrospector; import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import com.mongodb.BasicDBList; +import com.mongodb.BasicDBObject; +import com.mongodb.DBObject; import com.mongodb.DBRef; /** @@ -101,21 +121,23 @@ * @author Patrik Wasik * @author Christoph Strobl * @author Mark Paluch + * @author Roman Puchkovskiy + * @author Heesu Jung + * @author Julia Lee */ -@RunWith(MockitoJUnitRunner.class) -public class MappingMongoConverterUnitTests { +@ExtendWith(MockitoExtension.class) +class MappingMongoConverterUnitTests { - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; @Mock ApplicationContext context; @Mock DbRefResolver resolver; - public @Rule ExpectedException exception = ExpectedException.none(); - - @Before - public void setUp() { + @BeforeEach + void beforeEach() { - MongoCustomConversions conversions = new MongoCustomConversions(); + MongoCustomConversions conversions = new MongoCustomConversions( + Arrays.asList(new ByteBufferToDoubleHolderConverter())); mappingContext = new MongoMappingContext(); mappingContext.setApplicationContext(context); @@ -130,7 +152,7 @@ public void setUp() { } @Test - public void convertsAddressCorrectly() { + void convertsAddressCorrectly() { Address address = new Address(); address.city = "New York"; @@ -140,93 +162,65 @@ public void convertsAddressCorrectly() { converter.write(address, document); - assertThat(document.get("city").toString(), is("New York")); - assertThat(document.get("street").toString(), is("Broadway")); - } - - @Test - public void convertsJodaTimeTypesCorrectly() { - - converter = new MappingMongoConverter(resolver, mappingContext); - converter.afterPropertiesSet(); - - Person person = new Person(); - person.birthDate = new LocalDate(); - - org.bson.Document document = new org.bson.Document(); - converter.write(person, document); - - assertThat(document.get("birthDate"), is(instanceOf(Date.class))); - - Person result = converter.read(Person.class, document); - assertThat(result.birthDate, is(notNullValue())); - } - - @Test - public void convertsCustomTypeOnConvertToMongoType() { - - converter = new MappingMongoConverter(resolver, mappingContext); - converter.afterPropertiesSet(); - - LocalDate date = new LocalDate(); - converter.convertToMongoType(date); + assertThat(document.get("city").toString()).isEqualTo("New York"); + assertThat(document.get("s").toString()).isEqualTo("Broadway"); } @Test // DATAMONGO-130 - public void writesMapTypeCorrectly() { + void writesMapTypeCorrectly() { Map map = Collections.singletonMap(Locale.US, "Foo"); org.bson.Document document = new org.bson.Document(); converter.write(map, document); - assertThat(document.get(Locale.US.toString()).toString(), is("Foo")); + assertThat(document.get(Locale.US.toString()).toString()).isEqualTo("Foo"); } @Test // DATAMONGO-130 - public void readsMapWithCustomKeyTypeCorrectly() { + void readsMapWithCustomKeyTypeCorrectly() { org.bson.Document mapObject = new org.bson.Document(Locale.US.toString(), "Value"); org.bson.Document document = new org.bson.Document("map", mapObject); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, document); - assertThat(result.map.get(Locale.US), is("Value")); + assertThat(result.map.get(Locale.US)).isEqualTo("Value"); } @Test // DATAMONGO-128 - public void usesDocumentsStoredTypeIfSubtypeOfRequest() { + void usesDocumentsStoredTypeIfSubtypeOfRequest() { org.bson.Document document = new org.bson.Document(); - document.put("birthDate", new LocalDate()); + document.put("birthDate", new Date()); document.put(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); - assertThat(converter.read(Contact.class, document), is(instanceOf(Person.class))); + assertThat(converter.read(Contact.class, document)).isInstanceOf(Person.class); } @Test // DATAMONGO-128 - public void ignoresDocumentsStoredTypeIfCompletelyDifferentTypeRequested() { + void ignoresDocumentsStoredTypeIfCompletelyDifferentTypeRequested() { org.bson.Document document = new org.bson.Document(); - document.put("birthDate", new LocalDate()); + document.put("birthDate", new Date()); document.put(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); - assertThat(converter.read(BirthDateContainer.class, document), is(instanceOf(BirthDateContainer.class))); + assertThat(converter.read(BirthDateContainer.class, document)).isInstanceOf(BirthDateContainer.class); } @Test - public void writesTypeDiscriminatorIntoRootObject() { + void writesTypeDiscriminatorIntoRootObject() { Person person = new Person(); org.bson.Document result = new org.bson.Document(); converter.write(person, result); - assertThat(result.containsKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(true)); - assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY).toString(), is(Person.class.getName())); + assertThat(result.containsKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isTrue(); + assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY).toString()).isEqualTo(Person.class.getName()); } @Test // DATAMONGO-136 - public void writesEnumsCorrectly() { + void writesEnumsCorrectly() { ClassWithEnumProperty value = new ClassWithEnumProperty(); value.sampleEnum = SampleEnum.FIRST; @@ -234,12 +228,12 @@ public void writesEnumsCorrectly() { org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("sampleEnum"), is(instanceOf(String.class))); - assertThat(result.get("sampleEnum").toString(), is("FIRST")); + assertThat(result.get("sampleEnum")).isInstanceOf(String.class); + assertThat(result.get("sampleEnum").toString()).isEqualTo("FIRST"); } @Test // DATAMONGO-209 - public void writesEnumCollectionCorrectly() { + void writesEnumCollectionCorrectly() { ClassWithEnumProperty value = new ClassWithEnumProperty(); value.enums = Arrays.asList(SampleEnum.FIRST); @@ -247,23 +241,23 @@ public void writesEnumCollectionCorrectly() { org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("enums"), is(instanceOf(List.class))); + assertThat(result.get("enums")).isInstanceOf(List.class); List enums = (List) result.get("enums"); - assertThat(enums.size(), is(1)); - assertThat(enums.get(0), is("FIRST")); + assertThat(enums.size()).isEqualTo(1); + assertThat(enums.get(0)).isEqualTo("FIRST"); } @Test // DATAMONGO-136 - public void readsEnumsCorrectly() { + void readsEnumsCorrectly() { org.bson.Document document = new org.bson.Document("sampleEnum", "FIRST"); ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, document); - assertThat(result.sampleEnum, is(SampleEnum.FIRST)); + assertThat(result.sampleEnum).isEqualTo(SampleEnum.FIRST); } @Test // DATAMONGO-209 - public void readsEnumCollectionsCorrectly() { + void readsEnumCollectionsCorrectly() { BasicDBList enums = new BasicDBList(); enums.add("FIRST"); @@ -271,13 +265,13 @@ public void readsEnumCollectionsCorrectly() { ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, document); - assertThat(result.enums, is(instanceOf(List.class))); - assertThat(result.enums.size(), is(1)); - assertThat(result.enums, hasItem(SampleEnum.FIRST)); + assertThat(result.enums).isInstanceOf(List.class); + assertThat(result.enums.size()).isEqualTo(1); + assertThat(result.enums).contains(SampleEnum.FIRST); } @Test // DATAMONGO-144 - public void considersFieldNameWhenWriting() { + void considersFieldNameWhenWriting() { Person person = new Person(); person.firstname = "Oliver"; @@ -285,21 +279,21 @@ public void considersFieldNameWhenWriting() { org.bson.Document result = new org.bson.Document(); converter.write(person, result); - assertThat(result.containsKey("foo"), is(true)); - assertThat(result.containsKey("firstname"), is(false)); + assertThat(result.containsKey("foo")).isTrue(); + assertThat(result.containsKey("firstname")).isFalse(); } @Test // DATAMONGO-144 - public void considersFieldNameWhenReading() { + void considersFieldNameWhenReading() { org.bson.Document document = new org.bson.Document("foo", "Oliver"); Person result = converter.read(Person.class, document); - assertThat(result.firstname, is("Oliver")); + assertThat(result.firstname).isEqualTo("Oliver"); } @Test - public void resolvesNestedComplexTypeForConstructorCorrectly() { + void resolvesNestedComplexTypeForConstructorCorrectly() { org.bson.Document address = new org.bson.Document("street", "110 Southwark Street"); address.put("city", "London"); @@ -311,11 +305,11 @@ public void resolvesNestedComplexTypeForConstructorCorrectly() { person.put("addresses", addresses); Person result = converter.read(Person.class, person); - assertThat(result.addresses, is(notNullValue())); + assertThat(result.addresses).isNotNull(); } @Test // DATAMONGO-145 - public void writesCollectionWithInterfaceCorrectly() { + void writesCollectionWithInterfaceCorrectly() { Person person = new Person(); person.firstname = "Oliver"; @@ -327,15 +321,15 @@ public void writesCollectionWithInterfaceCorrectly() { converter.write(wrapper, document); Object result = document.get("contacts"); - assertThat(result, is(instanceOf(List.class))); + assertThat(result).isInstanceOf(List.class); List contacts = (List) result; org.bson.Document personDocument = (org.bson.Document) contacts.get(0); - assertThat(personDocument.get("foo").toString(), is("Oliver")); - assertThat((String) personDocument.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(Person.class.getName())); + assertThat(personDocument.get("foo").toString()).isEqualTo("Oliver"); + assertThat((String) personDocument.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(Person.class.getName()); } @Test // DATAMONGO-145 - public void readsCollectionWithInterfaceCorrectly() { + void readsCollectionWithInterfaceCorrectly() { org.bson.Document person = new org.bson.Document(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, Person.class.getName()); person.put("foo", "Oliver"); @@ -344,15 +338,15 @@ public void readsCollectionWithInterfaceCorrectly() { contacts.add(person); CollectionWrapper result = converter.read(CollectionWrapper.class, new org.bson.Document("contacts", contacts)); - assertThat(result.contacts, is(notNullValue())); - assertThat(result.contacts.size(), is(1)); + assertThat(result.contacts).isNotNull(); + assertThat(result.contacts.size()).isEqualTo(1); Contact contact = result.contacts.get(0); - assertThat(contact, is(instanceOf(Person.class))); - assertThat(((Person) contact).firstname, is("Oliver")); + assertThat(contact).isInstanceOf(Person.class); + assertThat(((Person) contact).firstname).isEqualTo("Oliver"); } @Test - public void convertsLocalesOutOfTheBox() { + void convertsLocalesOutOfTheBox() { LocaleWrapper wrapper = new LocaleWrapper(); wrapper.locale = Locale.US; @@ -360,15 +354,15 @@ public void convertsLocalesOutOfTheBox() { converter.write(wrapper, document); Object localeField = document.get("locale"); - assertThat(localeField, is(instanceOf(String.class))); - assertThat(localeField, is("en_US")); + assertThat(localeField).isInstanceOf(String.class); + assertThat(localeField).isEqualTo("en_US"); LocaleWrapper read = converter.read(LocaleWrapper.class, document); - assertThat(read.locale, is(Locale.US)); + assertThat(read.locale).isEqualTo(Locale.US); } @Test // DATAMONGO-161 - public void readsNestedMapsCorrectly() { + void readsNestedMapsCorrectly() { Map secondLevel = new HashMap(); secondLevel.put("key1", "value1"); @@ -387,12 +381,12 @@ public void readsNestedMapsCorrectly() { ClassWithNestedMaps result = converter.read(ClassWithNestedMaps.class, document); Map>> nestedMap = result.nestedMaps; - assertThat(nestedMap, is(notNullValue())); - assertThat(nestedMap.get("afield"), is(firstLevel)); + assertThat(nestedMap).isNotNull(); + assertThat(nestedMap.get("afield")).isEqualTo(firstLevel); } @Test // DATACMNS-42, DATAMONGO-171 - public void writesClassWithBigDecimal() { + void writesClassWithBigDecimal() { BigDecimalContainer container = new BigDecimalContainer(); container.value = BigDecimal.valueOf(2.5d); @@ -401,13 +395,13 @@ public void writesClassWithBigDecimal() { org.bson.Document document = new org.bson.Document(); converter.write(container, document); - assertThat(document.get("value"), is(instanceOf(String.class))); - assertThat((String) document.get("value"), is("2.5")); - assertThat(((org.bson.Document) document.get("map")).get("foo"), is(instanceOf(String.class))); + assertThat(document.get("value")).isInstanceOf(String.class); + assertThat((String) document.get("value")).isEqualTo("2.5"); + assertThat(((org.bson.Document) document.get("map")).get("foo")).isInstanceOf(String.class); } @Test // DATACMNS-42, DATAMONGO-171 - public void readsClassWithBigDecimal() { + void readsClassWithBigDecimal() { org.bson.Document document = new org.bson.Document("value", "2.5"); document.put("map", new org.bson.Document("foo", "2.5")); @@ -417,13 +411,13 @@ public void readsClassWithBigDecimal() { document.put("collection", list); BigDecimalContainer result = converter.read(BigDecimalContainer.class, document); - assertThat(result.value, is(BigDecimal.valueOf(2.5d))); - assertThat(result.map.get("foo"), is(BigDecimal.valueOf(2.5d))); - assertThat(result.collection.get(0), is(BigDecimal.valueOf(2.5d))); + assertThat(result.value).isEqualTo(BigDecimal.valueOf(2.5d)); + assertThat(result.map.get("foo")).isEqualTo(BigDecimal.valueOf(2.5d)); + assertThat(result.collection.get(0)).isEqualTo(BigDecimal.valueOf(2.5d)); } @Test - public void writesNestedCollectionsCorrectly() { + void writesNestedCollectionsCorrectly() { CollectionWrapper wrapper = new CollectionWrapper(); wrapper.strings = Arrays.asList(Arrays.asList("Foo")); @@ -432,14 +426,14 @@ public void writesNestedCollectionsCorrectly() { converter.write(wrapper, document); Object outerStrings = document.get("strings"); - assertThat(outerStrings, is(instanceOf(List.class))); + assertThat(outerStrings).isInstanceOf(List.class); List typedOuterString = (List) outerStrings; - assertThat(typedOuterString.size(), is(1)); + assertThat(typedOuterString.size()).isEqualTo(1); } @Test // DATAMONGO-192 - public void readsEmptySetsCorrectly() { + void readsEmptySetsCorrectly() { Person person = new Person(); person.addresses = Collections.emptySet(); @@ -450,39 +444,39 @@ public void readsEmptySetsCorrectly() { } @Test - public void convertsObjectIdStringsToObjectIdCorrectly() { + void convertsObjectIdStringsToObjectIdCorrectly() { PersonPojoStringId p1 = new PersonPojoStringId("1234567890", "Text-1"); org.bson.Document doc1 = new org.bson.Document(); converter.write(p1, doc1); - assertThat(doc1.get("_id"), is(instanceOf(String.class))); + assertThat(doc1.get("_id")).isInstanceOf(String.class); PersonPojoStringId p2 = new PersonPojoStringId(new ObjectId().toString(), "Text-1"); org.bson.Document doc2 = new org.bson.Document(); converter.write(p2, doc2); - assertThat(doc2.get("_id"), is(instanceOf(ObjectId.class))); + assertThat(doc2.get("_id")).isInstanceOf(ObjectId.class); } @Test // DATAMONGO-207 - public void convertsCustomEmptyMapCorrectly() { + void convertsCustomEmptyMapCorrectly() { org.bson.Document map = new org.bson.Document(); org.bson.Document wrapper = new org.bson.Document("map", map); ClassWithSortedMap result = converter.read(ClassWithSortedMap.class, wrapper); - assertThat(result, is(instanceOf(ClassWithSortedMap.class))); - assertThat(result.map, is(instanceOf(SortedMap.class))); + assertThat(result).isInstanceOf(ClassWithSortedMap.class); + assertThat(result.map).isInstanceOf(SortedMap.class); } @Test // DATAMONGO-211 - public void maybeConvertHandlesNullValuesCorrectly() { - assertThat(converter.convertToMongoType(null), is(nullValue())); + void maybeConvertHandlesNullValuesCorrectly() { + assertThat(converter.convertToMongoType(null)).isNull(); } @Test // DATAMONGO-1509 - public void writesGenericTypeCorrectly() { + void writesGenericTypeCorrectly() { GenericType
          type = new GenericType
          (); type.content = new Address(); @@ -493,21 +487,21 @@ public void writesGenericTypeCorrectly() { org.bson.Document content = (org.bson.Document) result.get("content"); assertTypeHint(content, Address.class); - assertThat(content.get("city"), is(notNullValue())); + assertThat(content.get("city")).isNotNull(); } @Test - public void readsGenericTypeCorrectly() { + void readsGenericTypeCorrectly() { org.bson.Document address = new org.bson.Document("_class", Address.class.getName()); address.put("city", "London"); GenericType result = converter.read(GenericType.class, new org.bson.Document("content", address)); - assertThat(result.content, is(instanceOf(Address.class))); + assertThat(result.content).isInstanceOf(Address.class); } @Test // DATAMONGO-228 - public void writesNullValuesForMaps() { + void writesNullValuesForMaps() { ClassWithMapProperty foo = new ClassWithMapProperty(); foo.map = Collections.singletonMap(Locale.US, null); @@ -516,12 +510,12 @@ public void writesNullValuesForMaps() { converter.write(foo, result); Object map = result.get("map"); - assertThat(map, is(instanceOf(org.bson.Document.class))); - assertThat(((org.bson.Document) map).keySet(), hasItem("en_US")); + assertThat(map).isInstanceOf(org.bson.Document.class); + assertThat(((org.bson.Document) map).keySet()).contains("en_US"); } @Test - public void writesBigIntegerIdCorrectly() { + void writesBigIntegerIdCorrectly() { ClassWithBigIntegerId foo = new ClassWithBigIntegerId(); foo.id = BigInteger.valueOf(23L); @@ -529,17 +523,18 @@ public void writesBigIntegerIdCorrectly() { org.bson.Document result = new org.bson.Document(); converter.write(foo, result); - assertThat(result.get("_id"), is(instanceOf(String.class))); + assertThat(result.get("_id")).isInstanceOf(String.class); } - public void convertsObjectsIfNecessary() { + @Test + void convertsObjectsIfNecessary() { ObjectId id = new ObjectId(); - assertThat(converter.convertToMongoType(id), is(id)); + assertThat(converter.convertToMongoType(id)).isEqualTo(id); } @Test // DATAMONGO-235 - public void writesMapOfListsCorrectly() { + void writesMapOfListsCorrectly() { ClassWithMapProperty input = new ClassWithMapProperty(); input.mapOfLists = Collections.singletonMap("Foo", Arrays.asList("Bar")); @@ -548,30 +543,30 @@ public void writesMapOfListsCorrectly() { converter.write(input, result); Object field = result.get("mapOfLists"); - assertThat(field, is(instanceOf(org.bson.Document.class))); + assertThat(field).isInstanceOf(org.bson.Document.class); org.bson.Document map = (org.bson.Document) field; Object foo = map.get("Foo"); - assertThat(foo, is(instanceOf(List.class))); + assertThat(foo).isInstanceOf(List.class); List value = (List) foo; - assertThat(value.size(), is(1)); - assertThat(value.get(0), is("Bar")); + assertThat(value.size()).isEqualTo(1); + assertThat(value.get(0)).isEqualTo("Bar"); } @Test // DATAMONGO-235 - public void readsMapListValuesCorrectly() { + void readsMapListValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add("Bar"); org.bson.Document source = new org.bson.Document("mapOfLists", new org.bson.Document("Foo", list)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); - assertThat(result.mapOfLists, is(not(nullValue()))); + assertThat(result.mapOfLists).isNotNull(); } @Test // DATAMONGO-235 - public void writesMapsOfObjectsCorrectly() { + void writesMapsOfObjectsCorrectly() { ClassWithMapProperty input = new ClassWithMapProperty(); input.mapOfObjects = new HashMap(); @@ -581,30 +576,30 @@ public void writesMapsOfObjectsCorrectly() { converter.write(input, result); Object field = result.get("mapOfObjects"); - assertThat(field, is(instanceOf(org.bson.Document.class))); + assertThat(field).isInstanceOf(org.bson.Document.class); org.bson.Document map = (org.bson.Document) field; Object foo = map.get("Foo"); - assertThat(foo, is(instanceOf(BasicDBList.class))); + assertThat(foo).isInstanceOf(List.class); - BasicDBList value = (BasicDBList) foo; - assertThat(value.size(), is(1)); - assertThat(value.get(0), is("Bar")); + List value = (List) foo; + assertThat(value.size()).isEqualTo(1); + assertThat(value.get(0)).isEqualTo("Bar"); } @Test // DATAMONGO-235 - public void readsMapOfObjectsListValuesCorrectly() { + void readsMapOfObjectsListValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add("Bar"); org.bson.Document source = new org.bson.Document("mapOfObjects", new org.bson.Document("Foo", list)); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); - assertThat(result.mapOfObjects, is(not(nullValue()))); + assertThat(result.mapOfObjects).isNotNull(); } @Test // DATAMONGO-245 - public void readsMapListNestedValuesCorrectly() { + void readsMapListNestedValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add(new org.bson.Document("Hello", "World")); @@ -612,12 +607,12 @@ public void readsMapListNestedValuesCorrectly() { ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object firstObjectInFoo = ((List) result.mapOfObjects.get("Foo")).get(0); - assertThat(firstObjectInFoo, is(instanceOf(Map.class))); - assertThat(((Map) firstObjectInFoo).get("Hello"), is(equalTo("World"))); + assertThat(firstObjectInFoo).isInstanceOf(Map.class); + assertThat(((Map) firstObjectInFoo).get("Hello")).isEqualTo("World"); } @Test // DATAMONGO-245 - public void readsMapDoublyNestedValuesCorrectly() { + void readsMapDoublyNestedValuesCorrectly() { org.bson.Document nested = new org.bson.Document(); org.bson.Document doubly = new org.bson.Document(); @@ -627,14 +622,14 @@ public void readsMapDoublyNestedValuesCorrectly() { ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object foo = result.mapOfObjects.get("Foo"); - assertThat(foo, is(instanceOf(Map.class))); + assertThat(foo).isInstanceOf(Map.class); Object doublyNestedObject = ((Map) foo).get("nested"); - assertThat(doublyNestedObject, is(instanceOf(Map.class))); - assertThat(((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); + assertThat(doublyNestedObject).isInstanceOf(Map.class); + assertThat(((Map) doublyNestedObject).get("Hello")).isEqualTo("World"); } @Test // DATAMONGO-245 - public void readsMapListDoublyNestedValuesCorrectly() { + void readsMapListDoublyNestedValuesCorrectly() { BasicDBList list = new BasicDBList(); org.bson.Document nested = new org.bson.Document(); @@ -646,14 +641,14 @@ public void readsMapListDoublyNestedValuesCorrectly() { ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, source); Object firstObjectInFoo = ((List) result.mapOfObjects.get("Foo")).get(0); - assertThat(firstObjectInFoo, is(instanceOf(Map.class))); + assertThat(firstObjectInFoo).isInstanceOf(Map.class); Object doublyNestedObject = ((Map) firstObjectInFoo).get("nested"); - assertThat(doublyNestedObject, is(instanceOf(Map.class))); - assertThat(((Map) doublyNestedObject).get("Hello"), is(equalTo("World"))); + assertThat(doublyNestedObject).isInstanceOf(Map.class); + assertThat(((Map) doublyNestedObject).get("Hello")).isEqualTo("World"); } @Test // DATAMONGO-259 - public void writesListOfMapsCorrectly() { + void writesListOfMapsCorrectly() { Map map = Collections.singletonMap("Foo", Locale.ENGLISH); @@ -665,16 +660,16 @@ public void writesListOfMapsCorrectly() { converter.write(wrapper, result); List list = (List) result.get("listOfMaps"); - assertThat(list, is(notNullValue())); - assertThat(list.size(), is(1)); + assertThat(list).isNotNull(); + assertThat(list.size()).isEqualTo(1); org.bson.Document document = (org.bson.Document) list.get(0); - assertThat(document.containsKey("Foo"), is(true)); - assertThat((String) document.get("Foo"), is(Locale.ENGLISH.toString())); + assertThat(document.containsKey("Foo")).isTrue(); + assertThat((String) document.get("Foo")).isEqualTo(Locale.ENGLISH.toString()); } @Test // DATAMONGO-259 - public void readsListOfMapsCorrectly() { + void readsListOfMapsCorrectly() { org.bson.Document map = new org.bson.Document("Foo", "en"); @@ -685,32 +680,170 @@ public void readsListOfMapsCorrectly() { CollectionWrapper wrapper = converter.read(CollectionWrapper.class, wrapperSource); - assertThat(wrapper.listOfMaps, is(notNullValue())); - assertThat(wrapper.listOfMaps.size(), is(1)); - assertThat(wrapper.listOfMaps.get(0), is(notNullValue())); - assertThat(wrapper.listOfMaps.get(0).get("Foo"), is(Locale.ENGLISH)); + assertThat(wrapper.listOfMaps).isNotNull(); + assertThat(wrapper.listOfMaps.size()).isEqualTo(1); + assertThat(wrapper.listOfMaps.get(0)).isNotNull(); + assertThat(wrapper.listOfMaps.get(0).get("Foo")).isEqualTo(Locale.ENGLISH); + } + + @ParameterizedTest(name = "{4}") // GH-4571 + @MethodSource("listMapSetReadingSource") + void initializesListMapSetPropertiesIfRequiredOnRead(org.bson.Document source, Class type, + Function valueFunction, Object expectedValue, String displayName) { + + T target = converter.read(type, source); + assertThat(target).extracting(valueFunction).isEqualTo(expectedValue); + } + + private static Stream listMapSetReadingSource() { + + Stream initialList = fixtureFor("contacts", CollectionWrapper.class, CollectionWrapper::getContacts, + builder -> { + + builder.onValue(Collections.emptyList()).expect(Collections.emptyList()); + builder.onNull().expect(null); + builder.onEmpty().expect(null); + }); + + Stream initializedList = fixtureFor("autoInitList", CollectionWrapper.class, + CollectionWrapper::getAutoInitList, builder -> { + + builder.onValue(Collections.emptyList()).expect(Collections.emptyList()); + builder.onNull().expect(null); + builder.onEmpty().expect(Collections.singletonList("spring")); + }); + + Stream initialSet = fixtureFor("contactsSet", CollectionWrapper.class, CollectionWrapper::getContactsSet, + builder -> { + + builder.onValue(Collections.emptyList()).expect(Collections.emptySet()); + builder.onNull().expect(null); + builder.onEmpty().expect(null); + }); + + Stream initialMap = fixtureFor("map", ClassWithMapProperty.class, ClassWithMapProperty::getMap, + builder -> { + + builder.onValue(new org.bson.Document()).expect(Collections.emptyMap()); + builder.onNull().expect(null); + builder.onEmpty().expect(null); + }); + + Stream initializedMap = fixtureFor("autoInitMap", ClassWithMapProperty.class, + ClassWithMapProperty::getAutoInitMap, builder -> { + + builder.onValue(new org.bson.Document()).expect(Collections.emptyMap()); + builder.onNull().expect(null); + builder.onEmpty().expect(Collections.singletonMap("spring", "data")); + }); + + return Stream.of(initialList, initializedList, initialSet, initialMap, initializedMap).flatMap(Function.identity()); + } + + static Stream fixtureFor(String field, Class type, Function valueFunction, + Consumer builderConsumer) { + + FixtureBuilder builder = new FixtureBuilder(field, type, valueFunction); + + builderConsumer.accept(builder); + + return builder.fixtures.stream(); + } + + /** + * Builder for fixtures. + */ + static class FixtureBuilder { + + private final String field; + private final Class typeUnderTest; + private final Function valueMappingFunction; + final List fixtures = new ArrayList<>(); + + FixtureBuilder(String field, Class typeUnderTest, Function valueMappingFunction) { + this.field = field; + this.typeUnderTest = typeUnderTest; + this.valueMappingFunction = valueMappingFunction; + } + + /** + * If the document value is {@code null}. + */ + FixtureStep onNull() { + return new FixtureStep(false, null); + } + + /** + * If the document value is {@code value}. + */ + FixtureStep onValue(@Nullable Object value) { + return new FixtureStep(false, value); + } + + /** + * If the document does not contain the field. + */ + FixtureStep onEmpty() { + return new FixtureStep(true, null); + } + + class FixtureStep { + + private final boolean empty; + private final @Nullable Object documentValue; + + public FixtureStep(boolean empty, @Nullable Object documentValue) { + this.empty = empty; + this.documentValue = documentValue; + } + + /** + * Then expect {@code expectedValue}. + * + * @param expectedValue + */ + void expect(@Nullable Object expectedValue) { + + Arguments fixture; + if (empty) { + fixture = Arguments.of(new org.bson.Document(), typeUnderTest, valueMappingFunction, expectedValue, + "Empty document expecting '%s' at type %s".formatted(expectedValue, typeUnderTest.getSimpleName())); + } else { + + String valueDescription = (documentValue == null ? "null" + : (documentValue + " (" + documentValue.getClass().getSimpleName()) + ")"); + + fixture = Arguments.of(new org.bson.Document(field, documentValue), typeUnderTest, valueMappingFunction, + expectedValue, "Field '%s' with value %s expecting '%s' at type %s".formatted(field, valueDescription, + expectedValue, typeUnderTest.getSimpleName())); + } + + fixtures.add(fixture); + } + } + } @Test // DATAMONGO-259 - public void writesPlainMapOfCollectionsCorrectly() { + void writesPlainMapOfCollectionsCorrectly() { Map> map = Collections.singletonMap("Foo", Arrays.asList(Locale.US)); org.bson.Document result = new org.bson.Document(); converter.write(map, result); - assertThat(result.containsKey("Foo"), is(true)); - assertThat(result.get("Foo"), is(notNullValue())); - assertThat(result.get("Foo"), is(instanceOf(BasicDBList.class))); + assertThat(result.containsKey("Foo")).isTrue(); + assertThat(result.get("Foo")).isNotNull(); + assertThat(result.get("Foo")).isInstanceOf(List.class); - BasicDBList list = (BasicDBList) result.get("Foo"); + List list = (List) result.get("Foo"); - assertThat(list.size(), is(1)); - assertThat(list.get(0), is(Locale.US.toString())); + assertThat(list.size()).isEqualTo(1); + assertThat(list.get(0)).isEqualTo(Locale.US.toString()); } @Test // DATAMONGO-285 @SuppressWarnings({ "unchecked", "rawtypes" }) - public void testSaveMapWithACollectionAsValue() { + void testSaveMapWithACollectionAsValue() { Map keyValues = new HashMap(); keyValues.put("string", "hello"); @@ -724,18 +857,18 @@ public void testSaveMapWithACollectionAsValue() { Map keyValuesFromMongo = converter.read(Map.class, document); - assertEquals(keyValues.size(), keyValuesFromMongo.size()); - assertEquals(keyValues.get("string"), keyValuesFromMongo.get("string")); - assertTrue(List.class.isAssignableFrom(keyValuesFromMongo.get("list").getClass())); + assertThat(keyValuesFromMongo.size()).isEqualTo(keyValues.size()); + assertThat(keyValuesFromMongo.get("string")).isEqualTo(keyValues.get("string")); + assertThat(List.class.isAssignableFrom(keyValuesFromMongo.get("list").getClass())).isTrue(); List listFromMongo = (List) keyValuesFromMongo.get("list"); - assertEquals(list.size(), listFromMongo.size()); - assertEquals(list.get(0), listFromMongo.get(0)); - assertEquals(list.get(1), listFromMongo.get(1)); + assertThat(listFromMongo.size()).isEqualTo(list.size()); + assertThat(listFromMongo.get(0)).isEqualTo(list.get(0)); + assertThat(listFromMongo.get(1)).isEqualTo(list.get(1)); } @Test // DATAMONGO-309 @SuppressWarnings({ "unchecked" }) - public void writesArraysAsMapValuesCorrectly() { + void writesArraysAsMapValuesCorrectly() { ClassWithMapProperty wrapper = new ClassWithMapProperty(); wrapper.mapOfObjects = new HashMap(); @@ -745,19 +878,19 @@ public void writesArraysAsMapValuesCorrectly() { converter.write(wrapper, result); Object mapObject = result.get("mapOfObjects"); - assertThat(mapObject, is(instanceOf(org.bson.Document.class))); + assertThat(mapObject).isInstanceOf(org.bson.Document.class); org.bson.Document map = (org.bson.Document) mapObject; Object valueObject = map.get("foo"); - assertThat(valueObject, is(instanceOf(BasicDBList.class))); + assertThat(valueObject).isInstanceOf(List.class); List list = (List) valueObject; - assertThat(list.size(), is(1)); - assertThat(list, hasItem((Object) "bar")); + assertThat(list.size()).isEqualTo(1); + assertThat(list).contains((Object) "bar"); } @Test // DATAMONGO-324 - public void writesDocumentCorrectly() { + void writesDocumentCorrectly() { org.bson.Document document = new org.bson.Document(); document.put("foo", "bar"); @@ -767,22 +900,22 @@ public void writesDocumentCorrectly() { converter.write(document, result); result.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); - assertThat(document, is(result)); + assertThat(document).isEqualTo(result); } @Test // DATAMONGO-324 - public void readsDocumentCorrectly() { + void readsDocumentCorrectly() { org.bson.Document document = new org.bson.Document(); document.put("foo", "bar"); org.bson.Document result = converter.read(org.bson.Document.class, document); - assertThat(result, is(document)); + assertThat(result).isEqualTo(document); } @Test // DATAMONGO-329 - public void writesMapAsGenericFieldCorrectly() { + void writesMapAsGenericFieldCorrectly() { Map> objectToSave = new HashMap>(); objectToSave.put("test", new A("testValue")); @@ -792,21 +925,21 @@ public void writesMapAsGenericFieldCorrectly() { converter.write(a, result); - assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); - assertThat(result.get("valueType"), is(HashMap.class.getName())); + assertThat(result.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(A.class.getName()); + assertThat(result.get("valueType")).isEqualTo(HashMap.class.getName()); org.bson.Document object = (org.bson.Document) result.get("value"); - assertThat(object, is(notNullValue())); + assertThat(object).isNotNull(); org.bson.Document inner = (org.bson.Document) object.get("test"); - assertThat(inner, is(notNullValue())); - assertThat(inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY), is(A.class.getName())); - assertThat(inner.get("valueType"), is(String.class.getName())); - assertThat(inner.get("value"), is("testValue")); + assertThat(inner).isNotNull(); + assertThat(inner.get(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isEqualTo(A.class.getName()); + assertThat(inner.get("valueType")).isEqualTo(String.class.getName()); + assertThat(inner.get("value")).isEqualTo("testValue"); } @Test - public void writesIntIdCorrectly() { + void writesIntIdCorrectly() { ClassWithIntId value = new ClassWithIntId(); value.id = 5; @@ -814,12 +947,12 @@ public void writesIntIdCorrectly() { org.bson.Document result = new org.bson.Document(); converter.write(value, result); - assertThat(result.get("_id"), is(5)); + assertThat(result.get("_id")).isEqualTo(5); } @Test // DATAMONGO-368 @SuppressWarnings("unchecked") - public void writesNullValuesForCollection() { + void writesNullValuesForCollection() { CollectionWrapper wrapper = new CollectionWrapper(); wrapper.contacts = Arrays.asList(new Person(), null); @@ -828,42 +961,43 @@ public void writesNullValuesForCollection() { converter.write(wrapper, result); Object contacts = result.get("contacts"); - assertThat(contacts, is(instanceOf(Collection.class))); - assertThat(((Collection) contacts).size(), is(2)); - assertThat((Collection) contacts, hasItem(nullValue())); + assertThat(contacts).isInstanceOf(Collection.class); + assertThat(((Collection) contacts).size()).isEqualTo(2); + assertThat((Collection) contacts).containsNull(); } @Test // DATAMONGO-379 - public void considersDefaultingExpressionsAtConstructorArguments() { + void considersDefaultingExpressionsAtConstructorArguments() { org.bson.Document document = new org.bson.Document("foo", "bar"); document.put("foobar", 2.5); DefaultedConstructorArgument result = converter.read(DefaultedConstructorArgument.class, document); - assertThat(result.bar, is(-1)); + assertThat(result.bar).isEqualTo(-1); } @Test // DATAMONGO-379 - public void usesDocumentFieldIfReferencedInAtValue() { + void usesDocumentFieldIfReferencedInAtValue() { org.bson.Document document = new org.bson.Document("foo", "bar"); document.put("something", 37); document.put("foobar", 2.5); DefaultedConstructorArgument result = converter.read(DefaultedConstructorArgument.class, document); - assertThat(result.bar, is(37)); + assertThat(result.bar).isEqualTo(37); } - @Test(expected = MappingInstantiationException.class) // DATAMONGO-379 - public void rejectsNotFoundConstructorParameterForPrimitiveType() { + @Test // DATAMONGO-379 + void rejectsNotFoundConstructorParameterForPrimitiveType() { org.bson.Document document = new org.bson.Document("foo", "bar"); - converter.read(DefaultedConstructorArgument.class, document); + assertThatThrownBy(() -> converter.read(DefaultedConstructorArgument.class, document)) + .isInstanceOf(MappingInstantiationException.class); } @Test // DATAMONGO-358 - public void writesListForObjectPropertyCorrectly() { + void writesListForObjectPropertyCorrectly() { Attribute attribute = new Attribute(); attribute.key = "key"; @@ -877,96 +1011,98 @@ public void writesListForObjectPropertyCorrectly() { converter.write(item, result); Item read = converter.read(Item.class, result); - assertThat(read.attributes.size(), is(1)); - assertThat(read.attributes.get(0).key, is(attribute.key)); - assertThat(read.attributes.get(0).value, is(instanceOf(Collection.class))); + assertThat(read.attributes.size()).isEqualTo(1); + assertThat(read.attributes.get(0).key).isEqualTo(attribute.key); + assertThat(read.attributes.get(0).value).isInstanceOf(Collection.class); @SuppressWarnings("unchecked") Collection values = (Collection) read.attributes.get(0).value; - assertThat(values.size(), is(2)); - assertThat(values, hasItems("1", "2")); + assertThat(values.size()).isEqualTo(2); + assertThat(values).contains("1", "2"); } - @Test(expected = MappingException.class) // DATAMONGO-380 - public void rejectsMapWithKeyContainingDotsByDefault() { - converter.write(Collections.singletonMap("foo.bar", "foobar"), new org.bson.Document()); + @Test // DATAMONGO-380 + void rejectsMapWithKeyContainingDotsByDefault() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> converter.write(Collections.singletonMap("foo.bar", "foobar"), new org.bson.Document())); } @Test // DATAMONGO-380 - public void escapesDotInMapKeysIfReplacementConfigured() { + void escapesDotInMapKeysIfReplacementConfigured() { converter.setMapKeyDotReplacement("~"); org.bson.Document document = new org.bson.Document(); converter.write(Collections.singletonMap("foo.bar", "foobar"), document); - assertThat((String) document.get("foo~bar"), is("foobar")); - assertThat(document.containsKey("foo.bar"), is(false)); + assertThat((String) document.get("foo~bar")).isEqualTo("foobar"); + assertThat(document.containsKey("foo.bar")).isFalse(); } @Test // DATAMONGO-380 @SuppressWarnings("unchecked") - public void unescapesDotInMapKeysIfReplacementConfigured() { + void unescapesDotInMapKeysIfReplacementConfigured() { converter.setMapKeyDotReplacement("~"); org.bson.Document document = new org.bson.Document("foo~bar", "foobar"); Map result = converter.read(Map.class, document); - assertThat(result.get("foo.bar"), is("foobar")); - assertThat(result.containsKey("foobar"), is(false)); + assertThat(result.get("foo.bar")).isEqualTo("foobar"); + assertThat(result.containsKey("foobar")).isFalse(); } @Test // DATAMONGO-382 - @Ignore("mongo3 - no longer supported") - public void convertsSetToBasicDBList() { + @Disabled("mongo3 - no longer supported") + void convertsSetToBasicDBList() { Address address = new Address(); address.city = "London"; address.street = "Foo"; Object result = converter.convertToMongoType(Collections.singleton(address), ClassTypeInformation.OBJECT); - assertThat(result, is(instanceOf(List.class))); + assertThat(result).isInstanceOf(List.class); Set readResult = converter.read(Set.class, (org.bson.Document) result); - assertThat(readResult.size(), is(1)); - assertThat(readResult.iterator().next(), is(instanceOf(Address.class))); + assertThat(readResult.size()).isEqualTo(1); + assertThat(readResult.iterator().next()).isInstanceOf(Address.class); } - @Test // DATAMONGO-402 - public void readsMemberClassCorrectly() { + @Test // DATAMONGO-402, GH-3702 + void readsMemberClassCorrectly() { - org.bson.Document document = new org.bson.Document("inner", new org.bson.Document("value", "FOO!")); + org.bson.Document document = new org.bson.Document("inner", + new LinkedHashMap<>(new org.bson.Document("value", "FOO!"))); Outer outer = converter.read(Outer.class, document); - assertThat(outer.inner, is(notNullValue())); - assertThat(outer.inner.value, is("FOO!")); + assertThat(outer.inner).isNotNull(); + assertThat(outer.inner.value).isEqualTo("FOO!"); assertSyntheticFieldValueOf(outer.inner, outer); } @Test // DATAMONGO-458 - public void readEmptyCollectionIsModifiable() { + void readEmptyCollectionIsModifiable() { org.bson.Document document = new org.bson.Document("contactsSet", new BasicDBList()); CollectionWrapper wrapper = converter.read(CollectionWrapper.class, document); - assertThat(wrapper.contactsSet, is(notNullValue())); + assertThat(wrapper.contactsSet).isNotNull(); wrapper.contactsSet.add(new Contact() {}); } @Test // DATAMONGO-424 - public void readsPlainDBRefObject() { + void readsPlainDBRefObject() { DBRef dbRef = new DBRef("foo", 2); org.bson.Document document = new org.bson.Document("ref", dbRef); DBRefWrapper result = converter.read(DBRefWrapper.class, document); - assertThat(result.ref, is(dbRef)); + assertThat(result.ref).isEqualTo(dbRef); } @Test // DATAMONGO-424 - public void readsCollectionOfDBRefs() { + void readsCollectionOfDBRefs() { DBRef dbRef = new DBRef("foo", 2); BasicDBList refs = new BasicDBList(); @@ -975,12 +1111,12 @@ public void readsCollectionOfDBRefs() { org.bson.Document document = new org.bson.Document("refs", refs); DBRefWrapper result = converter.read(DBRefWrapper.class, document); - assertThat(result.refs, hasSize(1)); - assertThat(result.refs, hasItem(dbRef)); + assertThat(result.refs).hasSize(1); + assertThat(result.refs).contains(dbRef); } @Test // DATAMONGO-424 - public void readsDBRefMap() { + void readsDBRefMap() { DBRef dbRef = mock(DBRef.class); org.bson.Document refMap = new org.bson.Document("foo", dbRef); @@ -988,13 +1124,13 @@ public void readsDBRefMap() { DBRefWrapper result = converter.read(DBRefWrapper.class, document); - assertThat(result.refMap.entrySet(), hasSize(1)); - assertThat(result.refMap.values(), hasItem(dbRef)); + assertThat(result.refMap.entrySet()).hasSize(1); + assertThat(result.refMap.values()).contains(dbRef); } @Test // DATAMONGO-424 @SuppressWarnings({ "rawtypes", "unchecked" }) - public void resolvesDBRefMapValue() { + void resolvesDBRefMapValue() { when(resolver.fetch(Mockito.any(DBRef.class))).thenReturn(new org.bson.Document()); DBRef dbRef = mock(DBRef.class); @@ -1004,33 +1140,31 @@ public void resolvesDBRefMapValue() { DBRefWrapper result = converter.read(DBRefWrapper.class, document); - Matcher isPerson = instanceOf(Person.class); - - assertThat(result.personMap.entrySet(), hasSize(1)); - assertThat(result.personMap.values(), hasItem(isPerson)); + assertThat(result.personMap.entrySet()).hasSize(1); + assertThat(result.personMap.values()).anyMatch(Person.class::isInstance); } @Test // DATAMONGO-462 - public void writesURLsAsStringOutOfTheBox() throws Exception { + void writesURLsAsStringOutOfTheBox() throws Exception { URLWrapper wrapper = new URLWrapper(); - wrapper.url = new URL("http://springsource.org"); + wrapper.url = new URL("https://springsource.org"); org.bson.Document sink = new org.bson.Document(); converter.write(wrapper, sink); - assertThat(sink.get("url"), is("http://springsource.org")); + assertThat(sink.get("url")).isEqualTo("https://springsource.org"); } @Test // DATAMONGO-462 - public void readsURLFromStringOutOfTheBox() throws Exception { - org.bson.Document document = new org.bson.Document("url", "http://springsource.org"); + void readsURLFromStringOutOfTheBox() throws Exception { + org.bson.Document document = new org.bson.Document("url", "https://springsource.org"); URLWrapper result = converter.read(URLWrapper.class, document); - assertThat(result.url, is(new URL("http://springsource.org"))); + assertThat(result.url).isEqualTo(new URL("https://springsource.org")); } @Test // DATAMONGO-485 - public void writesComplexIdCorrectly() { + void writesComplexIdCorrectly() { ComplexId id = new ComplexId(); id.innerId = 4711L; @@ -1042,25 +1176,25 @@ public void writesComplexIdCorrectly() { converter.write(entity, document); Object idField = document.get("_id"); - assertThat(idField, is(notNullValue())); - assertThat(idField, is(instanceOf(org.bson.Document.class))); - assertThat(((org.bson.Document) idField).get("innerId"), is(4711L)); + assertThat(idField).isNotNull(); + assertThat(idField).isInstanceOf(org.bson.Document.class); + assertThat(((org.bson.Document) idField).get("innerId")).isEqualTo(4711L); } @Test // DATAMONGO-485 - public void readsComplexIdCorrectly() { + void readsComplexIdCorrectly() { org.bson.Document innerId = new org.bson.Document("innerId", 4711L); org.bson.Document entity = new org.bson.Document("_id", innerId); ClassWithComplexId result = converter.read(ClassWithComplexId.class, entity); - assertThat(result.complexId, is(notNullValue())); - assertThat(result.complexId.innerId, is(4711L)); + assertThat(result.complexId).isNotNull(); + assertThat(result.complexId.innerId).isEqualTo(4711L); } @Test // DATAMONGO-489 - public void readsArraysAsMapValuesCorrectly() { + void readsArraysAsMapValuesCorrectly() { BasicDBList list = new BasicDBList(); list.add("Foo"); @@ -1070,48 +1204,69 @@ public void readsArraysAsMapValuesCorrectly() { org.bson.Document wrapper = new org.bson.Document("mapOfStrings", map); ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, wrapper); - assertThat(result.mapOfStrings, is(notNullValue())); + assertThat(result.mapOfStrings).isNotNull(); String[] values = result.mapOfStrings.get("key"); - assertThat(values, is(notNullValue())); - assertThat(values, is(arrayWithSize(2))); + assertThat(values).isNotNull(); + assertThat(values).hasSize(2); } @Test // DATAMONGO-497 - public void readsEmptyCollectionIntoConstructorCorrectly() { + void readsEmptyCollectionIntoConstructorCorrectly() { org.bson.Document source = new org.bson.Document("attributes", new BasicDBList()); TypWithCollectionConstructor result = converter.read(TypWithCollectionConstructor.class, source); - assertThat(result.attributes, is(notNullValue())); + assertThat(result.attributes).isNotNull(); + } + + @Test // DATAMONGO-2400 + void writeJavaTimeValuesViaCodec() { + + configureConverterWithNativeJavaTimeCodec(); + TypeWithLocalDateTime source = new TypeWithLocalDateTime(); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("date", source.date); + } + + void configureConverterWithNativeJavaTimeCodec() { + + converter = new MappingMongoConverter(resolver, mappingContext); + + converter.setCustomConversions(MongoCustomConversions + .create(MongoCustomConversions.MongoConverterConfigurationAdapter::useNativeDriverJavaTimeCodecs)); + converter.afterPropertiesSet(); } private static void assertSyntheticFieldValueOf(Object target, Object expected) { for (int i = 0; i < 10; i++) { try { - assertThat(ReflectionTestUtils.getField(target, "this$" + i), is(expected)); + assertThat(ReflectionTestUtils.getField(target, "this$" + i)).isEqualTo(expected); return; } catch (IllegalArgumentException e) { // Suppress and try next } } - fail(String.format("Didn't find synthetic field on %s!", target)); + fail(String.format("Didn't find synthetic field on %s", target)); } @Test // DATAMGONGO-508 - public void eagerlyReturnsDBRefObjectIfTargetAlreadyIsOne() { + void eagerlyReturnsDBRefObjectIfTargetAlreadyIsOne() { DBRef dbRef = new DBRef("collection", "id"); MongoPersistentProperty property = mock(MongoPersistentProperty.class); - assertThat(converter.createDBRef(dbRef, property), is(dbRef)); + assertThat(converter.createDBRef(dbRef, property)).isEqualTo(dbRef); } @Test // DATAMONGO-523, DATAMONGO-1509 - public void considersTypeAliasAnnotation() { + void considersTypeAliasAnnotation() { Aliased aliased = new Aliased(); aliased.name = "foo"; @@ -1123,7 +1278,7 @@ public void considersTypeAliasAnnotation() { } @Test // DATAMONGO-533 - public void marshalsThrowableCorrectly() { + void marshalsThrowableCorrectly() { ThrowableWrapper wrapper = new ThrowableWrapper(); wrapper.throwable = new Exception(); @@ -1133,7 +1288,7 @@ public void marshalsThrowableCorrectly() { } @Test // DATAMONGO-592 - public void recursivelyConvertsSpELReadValue() { + void recursivelyConvertsSpELReadValue() { org.bson.Document input = org.bson.Document.parse( "{ \"_id\" : { \"$oid\" : \"50ca271c4566a2b08f2d667a\" }, \"_class\" : \"com.recorder.TestRecorder2$ObjectContainer\", \"property\" : { \"property\" : 100 } }"); @@ -1142,7 +1297,7 @@ public void recursivelyConvertsSpELReadValue() { } @Test // DATAMONGO-724 - public void mappingConsidersCustomConvertersNotWritingTypeInformation() { + void mappingConsidersCustomConvertersNotWritingTypeInformation() { Person person = new Person(); person.firstname = "Dave"; @@ -1186,28 +1341,32 @@ public Person convert(org.bson.Document source) { ClassWithMapProperty result = mongoConverter.read(ClassWithMapProperty.class, document); - assertThat(result.mapOfPersons, is(notNullValue())); + assertThat(result.mapOfPersons).isNotNull(); Person personCandidate = result.mapOfPersons.get("foo"); - assertThat(personCandidate, is(notNullValue())); - assertThat(personCandidate.firstname, is("Dave")); + assertThat(personCandidate).isNotNull(); + assertThat(personCandidate.firstname).isEqualTo("Dave"); - assertThat(result.mapOfObjects, is(notNullValue())); + assertThat(result.mapOfObjects).isNotNull(); Object value = result.mapOfObjects.get("foo"); - assertThat(value, is(notNullValue())); - assertThat(value, is(instanceOf(Person.class))); - assertThat(((Person) value).firstname, is("Dave")); - assertThat(((Person) value).lastname, is("converter")); + assertThat(value).isNotNull(); + assertThat(value).isInstanceOf(Person.class); + assertThat(((Person) value).firstname).isEqualTo("Dave"); + assertThat(((Person) value).lastname).isEqualTo("converter"); } - @Test // DATAMONGO-743 - public void readsIntoStringsOutOfTheBox() { + @Test // DATAMONGO-743, DATAMONGO-2198 + void readsIntoStringsOutOfTheBox() { + + String target = converter.read(String.class, new org.bson.Document("firstname", "Dave")); - org.bson.Document document = new org.bson.Document("firstname", "Dave"); - assertThat(converter.read(String.class, document), is("{ \"firstname\" : \"Dave\" }")); + assertThat(target).startsWith("{"); + assertThat(target).endsWith("}"); + assertThat(target).contains("\"firstname\""); + assertThat(target).contains("\"Dave\""); } @Test // DATAMONGO-766 - public void writesProjectingTypeCorrectly() { + void writesProjectingTypeCorrectly() { NestedType nested = new NestedType(); nested.c = "C"; @@ -1220,14 +1379,14 @@ public void writesProjectingTypeCorrectly() { org.bson.Document result = new org.bson.Document(); converter.write(type, result); - assertThat(result.get("name"), is((Object) "name")); + assertThat(result.get("name")).isEqualTo((Object) "name"); org.bson.Document aValue = DocumentTestUtils.getAsDocument(result, "a"); - assertThat(aValue.get("b"), is((Object) "bar")); - assertThat(aValue.get("c"), is((Object) "C")); + assertThat(aValue.get("b")).isEqualTo((Object) "bar"); + assertThat(aValue.get("c")).isEqualTo((Object) "C"); } @Test // DATAMONGO-812, DATAMONGO-893, DATAMONGO-1509 - public void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects() { + void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects() { Address address = new Address(); address.city = "London"; @@ -1236,27 +1395,27 @@ public void convertsListToBasicDBListAndRetainsTypeInformationForComplexObjects( Object result = converter.convertToMongoType(Collections.singletonList(address), ClassTypeInformation.from(InterfaceType.class)); - assertThat(result, is(instanceOf(List.class))); + assertThat(result).isInstanceOf(List.class); List dbList = (List) result; - assertThat(dbList, hasSize(1)); + assertThat(dbList).hasSize(1); assertTypeHint(getAsDocument(dbList, 0), Address.class); } @Test // DATAMONGO-812 - public void convertsListToBasicDBListWithoutTypeInformationForSimpleTypes() { + void convertsListToBasicDBListWithoutTypeInformationForSimpleTypes() { Object result = converter.convertToMongoType(Collections.singletonList("foo")); - assertThat(result, is(instanceOf(List.class))); + assertThat(result).isInstanceOf(List.class); List dbList = (List) result; - assertThat(dbList, hasSize(1)); - assertThat(dbList.get(0), instanceOf(String.class)); + assertThat(dbList).hasSize(1); + assertThat(dbList.get(0)).isInstanceOf(String.class); } @Test // DATAMONGO-812, DATAMONGO-1509 - public void convertsArrayToBasicDBListAndRetainsTypeInformationForComplexObjects() { + void convertsArrayToBasicDBListAndRetainsTypeInformationForComplexObjects() { Address address = new Address(); address.city = "London"; @@ -1264,27 +1423,27 @@ public void convertsArrayToBasicDBListAndRetainsTypeInformationForComplexObjects Object result = converter.convertToMongoType(new Address[] { address }, ClassTypeInformation.OBJECT); - assertThat(result, is(instanceOf(List.class))); + assertThat(result).isInstanceOf(List.class); List dbList = (List) result; - assertThat(dbList, hasSize(1)); + assertThat(dbList).hasSize(1); assertTypeHint(getAsDocument(dbList, 0), Address.class); } @Test // DATAMONGO-812 - public void convertsArrayToBasicDBListWithoutTypeInformationForSimpleTypes() { + void convertsArrayToBasicDBListWithoutTypeInformationForSimpleTypes() { Object result = converter.convertToMongoType(new String[] { "foo" }); - assertThat(result, is(instanceOf(List.class))); + assertThat(result).isInstanceOf(List.class); List dbList = (List) result; - assertThat(dbList, hasSize(1)); - assertThat(dbList.get(0), instanceOf(String.class)); + assertThat(dbList).hasSize(1); + assertThat(dbList.get(0)).isInstanceOf(String.class); } @Test // DATAMONGO-833 - public void readsEnumSetCorrectly() { + void readsEnumSetCorrectly() { BasicDBList enumSet = new BasicDBList(); enumSet.add("SECOND"); @@ -1292,25 +1451,25 @@ public void readsEnumSetCorrectly() { ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, document); - assertThat(result.enumSet, is(instanceOf(EnumSet.class))); - assertThat(result.enumSet.size(), is(1)); - assertThat(result.enumSet, hasItem(SampleEnum.SECOND)); + assertThat(result.enumSet).isInstanceOf(EnumSet.class); + assertThat(result.enumSet.size()).isEqualTo(1); + assertThat(result.enumSet).contains(SampleEnum.SECOND); } @Test // DATAMONGO-833 - public void readsEnumMapCorrectly() { + void readsEnumMapCorrectly() { org.bson.Document enumMap = new org.bson.Document("FIRST", "Dave"); ClassWithEnumProperty result = converter.read(ClassWithEnumProperty.class, new org.bson.Document("enumMap", enumMap)); - assertThat(result.enumMap, is(instanceOf(EnumMap.class))); - assertThat(result.enumMap.size(), is(1)); - assertThat(result.enumMap.get(SampleEnum.FIRST), is("Dave")); + assertThat(result.enumMap).isInstanceOf(EnumMap.class); + assertThat(result.enumMap.size()).isEqualTo(1); + assertThat(result.enumMap.get(SampleEnum.FIRST)).isEqualTo("Dave"); } @Test // DATAMONGO-887 - public void readsTreeMapCorrectly() { + void readsTreeMapCorrectly() { org.bson.Document person = new org.bson.Document("foo", "Dave"); org.bson.Document treeMapOfPerson = new org.bson.Document("key", person); @@ -1318,13 +1477,13 @@ public void readsTreeMapCorrectly() { ClassWithMapProperty result = converter.read(ClassWithMapProperty.class, document); - assertThat(result.treeMapOfPersons, is(notNullValue())); - assertThat(result.treeMapOfPersons.get("key"), is(notNullValue())); - assertThat(result.treeMapOfPersons.get("key").firstname, is("Dave")); + assertThat(result.treeMapOfPersons).isNotNull(); + assertThat(result.treeMapOfPersons.get("key")).isNotNull(); + assertThat(result.treeMapOfPersons.get("key").firstname).isEqualTo("Dave"); } @Test // DATAMONGO-887 - public void writesTreeMapCorrectly() { + void writesTreeMapCorrectly() { Person person = new Person(); person.firstname = "Dave"; @@ -1339,11 +1498,11 @@ public void writesTreeMapCorrectly() { org.bson.Document map = getAsDocument(result, "treeMapOfPersons"); org.bson.Document entry = getAsDocument(map, "key"); - assertThat(entry.get("foo"), is("Dave")); + assertThat(entry.get("foo")).isEqualTo("Dave"); } @Test // DATAMONGO-858 - public void shouldWriteEntityWithGeoBoxCorrectly() { + void shouldWriteEntityWithGeoBoxCorrectly() { ClassWithGeoBox object = new ClassWithGeoBox(); object.box = new Box(new Point(1, 2), new Point(3, 4)); @@ -1351,10 +1510,10 @@ public void shouldWriteEntityWithGeoBoxCorrectly() { org.bson.Document document = new org.bson.Document(); converter.write(object, document); - assertThat(document, is(notNullValue())); - assertThat(document.get("box"), is(instanceOf(org.bson.Document.class))); - assertThat(document.get("box"), is((Object) new org.bson.Document() - .append("first", toDocument(object.box.getFirst())).append("second", toDocument(object.box.getSecond())))); + assertThat(document).isNotNull(); + assertThat(document.get("box")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("box")).isEqualTo((Object) new org.bson.Document() + .append("first", toDocument(object.box.getFirst())).append("second", toDocument(object.box.getSecond()))); } private static org.bson.Document toDocument(Point point) { @@ -1362,7 +1521,7 @@ private static org.bson.Document toDocument(Point point) { } @Test // DATAMONGO-858 - public void shouldReadEntityWithGeoBoxCorrectly() { + void shouldReadEntityWithGeoBoxCorrectly() { ClassWithGeoBox object = new ClassWithGeoBox(); object.box = new Box(new Point(1, 2), new Point(3, 4)); @@ -1372,12 +1531,12 @@ public void shouldReadEntityWithGeoBoxCorrectly() { ClassWithGeoBox result = converter.read(ClassWithGeoBox.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.box, is(object.box)); + assertThat(result).isNotNull(); + assertThat(result.box).isEqualTo(object.box); } @Test // DATAMONGO-858 - public void shouldWriteEntityWithGeoPolygonCorrectly() { + void shouldWriteEntityWithGeoPolygonCorrectly() { ClassWithGeoPolygon object = new ClassWithGeoPolygon(); object.polygon = new Polygon(new Point(1, 2), new Point(3, 4), new Point(4, 5)); @@ -1385,21 +1544,21 @@ public void shouldWriteEntityWithGeoPolygonCorrectly() { org.bson.Document document = new org.bson.Document(); converter.write(object, document); - assertThat(document, is(notNullValue())); + assertThat(document).isNotNull(); - assertThat(document.get("polygon"), is(instanceOf(org.bson.Document.class))); + assertThat(document.get("polygon")).isInstanceOf(org.bson.Document.class); org.bson.Document polygonDoc = (org.bson.Document) document.get("polygon"); @SuppressWarnings("unchecked") List points = (List) polygonDoc.get("points"); - assertThat(points, hasSize(3)); - assertThat(points, Matchers. hasItems(toDocument(object.polygon.getPoints().get(0)), - toDocument(object.polygon.getPoints().get(1)), toDocument(object.polygon.getPoints().get(2)))); + assertThat(points).hasSize(3); + assertThat(points).contains(toDocument(object.polygon.getPoints().get(0)), + toDocument(object.polygon.getPoints().get(1)), toDocument(object.polygon.getPoints().get(2))); } @Test // DATAMONGO-858 - public void shouldReadEntityWithGeoPolygonCorrectly() { + void shouldReadEntityWithGeoPolygonCorrectly() { ClassWithGeoPolygon object = new ClassWithGeoPolygon(); object.polygon = new Polygon(new Point(1, 2), new Point(3, 4), new Point(4, 5)); @@ -1409,12 +1568,12 @@ public void shouldReadEntityWithGeoPolygonCorrectly() { ClassWithGeoPolygon result = converter.read(ClassWithGeoPolygon.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.polygon, is(object.polygon)); + assertThat(result).isNotNull(); + assertThat(result.polygon).isEqualTo(object.polygon); } @Test // DATAMONGO-858 - public void shouldWriteEntityWithGeoCircleCorrectly() { + void shouldWriteEntityWithGeoCircleCorrectly() { ClassWithGeoCircle object = new ClassWithGeoCircle(); Circle circle = new Circle(new Point(1, 2), 3); @@ -1424,16 +1583,15 @@ public void shouldWriteEntityWithGeoCircleCorrectly() { org.bson.Document document = new org.bson.Document(); converter.write(object, document); - assertThat(document, is(notNullValue())); - assertThat(document.get("circle"), is(instanceOf(org.bson.Document.class))); - assertThat(document.get("circle"), - is((Object) new org.bson.Document("center", - new org.bson.Document("x", circle.getCenter().getX()).append("y", circle.getCenter().getY())) - .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("circle")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("circle")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", circle.getCenter().getX()).append("y", circle.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } @Test // DATAMONGO-858 - public void shouldReadEntityWithGeoCircleCorrectly() { + void shouldReadEntityWithGeoCircleCorrectly() { ClassWithGeoCircle object = new ClassWithGeoCircle(); object.circle = new Circle(new Point(1, 2), 3); @@ -1443,12 +1601,12 @@ public void shouldReadEntityWithGeoCircleCorrectly() { ClassWithGeoCircle result = converter.read(ClassWithGeoCircle.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.circle, is(result.circle)); + assertThat(result).isNotNull(); + assertThat(result.circle).isEqualTo(result.circle); } @Test // DATAMONGO-858 - public void shouldWriteEntityWithGeoSphereCorrectly() { + void shouldWriteEntityWithGeoSphereCorrectly() { ClassWithGeoSphere object = new ClassWithGeoSphere(); Sphere sphere = new Sphere(new Point(1, 2), 3); @@ -1458,16 +1616,15 @@ public void shouldWriteEntityWithGeoSphereCorrectly() { org.bson.Document document = new org.bson.Document(); converter.write(object, document); - assertThat(document, is(notNullValue())); - assertThat(document.get("sphere"), is(instanceOf(org.bson.Document.class))); - assertThat(document.get("sphere"), - is((Object) new org.bson.Document("center", - new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) - .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("sphere")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("sphere")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } @Test // DATAMONGO-858 - public void shouldWriteEntityWithGeoSphereWithMetricDistanceCorrectly() { + void shouldWriteEntityWithGeoSphereWithMetricDistanceCorrectly() { ClassWithGeoSphere object = new ClassWithGeoSphere(); Sphere sphere = new Sphere(new Point(1, 2), new Distance(3, Metrics.KILOMETERS)); @@ -1477,16 +1634,15 @@ public void shouldWriteEntityWithGeoSphereWithMetricDistanceCorrectly() { org.bson.Document document = new org.bson.Document(); converter.write(object, document); - assertThat(document, is(notNullValue())); - assertThat(document.get("sphere"), is(instanceOf(org.bson.Document.class))); - assertThat(document.get("sphere"), - is((Object) new org.bson.Document("center", - new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) - .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("sphere")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("sphere")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } @Test // DATAMONGO-858 - public void shouldReadEntityWithGeoSphereCorrectly() { + void shouldReadEntityWithGeoSphereCorrectly() { ClassWithGeoSphere object = new ClassWithGeoSphere(); object.sphere = new Sphere(new Point(1, 2), 3); @@ -1496,12 +1652,12 @@ public void shouldReadEntityWithGeoSphereCorrectly() { ClassWithGeoSphere result = converter.read(ClassWithGeoSphere.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.sphere, is(object.sphere)); + assertThat(result).isNotNull(); + assertThat(result.sphere).isEqualTo(object.sphere); } @Test // DATAMONGO-858 - public void shouldWriteEntityWithGeoShapeCorrectly() { + void shouldWriteEntityWithGeoShapeCorrectly() { ClassWithGeoShape object = new ClassWithGeoShape(); Sphere sphere = new Sphere(new Point(1, 2), 3); @@ -1511,17 +1667,16 @@ public void shouldWriteEntityWithGeoShapeCorrectly() { org.bson.Document document = new org.bson.Document(); converter.write(object, document); - assertThat(document, is(notNullValue())); - assertThat(document.get("shape"), is(instanceOf(org.bson.Document.class))); - assertThat(document.get("shape"), - is((Object) new org.bson.Document("center", - new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) - .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString()))); + assertThat(document).isNotNull(); + assertThat(document.get("shape")).isInstanceOf(org.bson.Document.class); + assertThat(document.get("shape")).isEqualTo((Object) new org.bson.Document("center", + new org.bson.Document("x", sphere.getCenter().getX()).append("y", sphere.getCenter().getY())) + .append("radius", radius.getNormalizedValue()).append("metric", radius.getMetric().toString())); } @Test // DATAMONGO-858 - @Ignore - public void shouldReadEntityWithGeoShapeCorrectly() { + @Disabled + void shouldReadEntityWithGeoShapeCorrectly() { ClassWithGeoShape object = new ClassWithGeoShape(); Sphere sphere = new Sphere(new Point(1, 2), 3); @@ -1532,12 +1687,12 @@ public void shouldReadEntityWithGeoShapeCorrectly() { ClassWithGeoShape result = converter.read(ClassWithGeoShape.class, document); - assertThat(result, is(notNullValue())); - assertThat(result.shape, is(sphere)); + assertThat(result).isNotNull(); + assertThat(result.shape).isEqualTo(sphere); } @Test // DATAMONGO-976 - public void shouldIgnoreTextScorePropertyWhenWriting() { + void shouldIgnoreTextScorePropertyWhenWriting() { ClassWithTextScoreProperty source = new ClassWithTextScoreProperty(); source.score = Float.MAX_VALUE; @@ -1545,19 +1700,19 @@ public void shouldIgnoreTextScorePropertyWhenWriting() { org.bson.Document document = new org.bson.Document(); converter.write(source, document); - assertThat(document.get("score"), nullValue()); + assertThat(document.get("score")).isNull(); } @Test // DATAMONGO-976 - public void shouldIncludeTextScorePropertyWhenReading() { + void shouldIncludeTextScorePropertyWhenReading() { ClassWithTextScoreProperty entity = converter.read(ClassWithTextScoreProperty.class, new org.bson.Document("score", 5F)); - assertThat(entity.score, equalTo(5F)); + assertThat(entity.score).isEqualTo(5F); } @Test // DATAMONGO-1001, DATAMONGO-1509 - public void shouldWriteCglibProxiedClassTypeInformationCorrectly() { + void shouldWriteCglibProxiedClassTypeInformationCorrectly() { ProxyFactory factory = new ProxyFactory(); factory.setTargetClass(GenericType.class); @@ -1571,7 +1726,7 @@ public void shouldWriteCglibProxiedClassTypeInformationCorrectly() { } @Test // DATAMONGO-1001 - public void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() { + void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() { LazyLoadingProxy mock = mock(LazyLoadingProxy.class); @@ -1582,27 +1737,23 @@ public void shouldUseTargetObjectOfLazyLoadingProxyWhenWriting() { } @Test // DATAMONGO-1034 - public void rejectsBasicDbListToBeConvertedIntoComplexType() { + void rejectsBasicDbListToBeConvertedIntoComplexType() { - List inner = new ArrayList(); + List inner = new ArrayList<>(); inner.add("key"); inner.add("value"); - List outer = new ArrayList(); + List outer = new ArrayList<>(); outer.add(inner); outer.add(inner); org.bson.Document source = new org.bson.Document("attributes", outer); - exception.expect(MappingException.class); - exception.expectMessage(Item.class.getName()); - exception.expectMessage(ArrayList.class.getName()); - - converter.read(Item.class, source); + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> converter.read(Item.class, source)); } @Test // DATAMONGO-1058 - public void readShouldRespectExplicitFieldNameForDbRef() { + void readShouldRespectExplicitFieldNameForDbRef() { org.bson.Document source = new org.bson.Document(); source.append("explict-name-for-db-ref", new DBRef("foo", "1")); @@ -1614,7 +1765,7 @@ public void readShouldRespectExplicitFieldNameForDbRef() { } @Test // DATAMONGO-1050 - public void writeShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { + void writeShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { RootForClassWithExplicitlyRenamedIdField source = new RootForClassWithExplicitlyRenamedIdField(); source.id = "rootId"; @@ -1624,12 +1775,12 @@ public void writeShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { org.bson.Document sink = new org.bson.Document(); converter.write(source, sink); - assertThat(sink.get("_id"), is("rootId")); - assertThat(sink.get("nested"), is(new org.bson.Document().append("id", "nestedId"))); + assertThat(sink.get("_id")).isEqualTo("rootId"); + assertThat(sink.get("nested")).isEqualTo(new org.bson.Document().append("id", "nestedId")); } @Test // DATAMONGO-1050 - public void readShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { + void readShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { org.bson.Document source = new org.bson.Document().append("_id", "rootId").append("nested", new org.bson.Document("id", "nestedId")); @@ -1637,44 +1788,44 @@ public void readShouldUseExplicitFieldnameForIdPropertyWhenAnnotated() { RootForClassWithExplicitlyRenamedIdField sink = converter.read(RootForClassWithExplicitlyRenamedIdField.class, source); - assertThat(sink.id, is("rootId")); - assertThat(sink.nested, notNullValue()); - assertThat(sink.nested.id, is("nestedId")); + assertThat(sink.id).isEqualTo("rootId"); + assertThat(sink.nested).isNotNull(); + assertThat(sink.nested.id).isEqualTo("nestedId"); } @Test // DATAMONGO-1050 - public void namedIdFieldShouldExtractValueFromUnderscoreIdField() { + void namedIdFieldShouldExtractValueFromUnderscoreIdField() { org.bson.Document document = new org.bson.Document().append("_id", "A").append("id", "B"); ClassWithNamedIdField withNamedIdField = converter.read(ClassWithNamedIdField.class, document); - assertThat(withNamedIdField.id, is("A")); + assertThat(withNamedIdField.id).isEqualTo("A"); } @Test // DATAMONGO-1050 - public void explicitlyRenamedIfFieldShouldExtractValueFromIdField() { + void explicitlyRenamedIfFieldShouldExtractValueFromIdField() { org.bson.Document document = new org.bson.Document().append("_id", "A").append("id", "B"); ClassWithExplicitlyRenamedField withExplicitlyRenamedField = converter.read(ClassWithExplicitlyRenamedField.class, document); - assertThat(withExplicitlyRenamedField.id, is("B")); + assertThat(withExplicitlyRenamedField.id).isEqualTo("B"); } @Test // DATAMONGO-1050 - public void annotatedIdFieldShouldExtractValueFromUnderscoreIdField() { + void annotatedIdFieldShouldExtractValueFromUnderscoreIdField() { org.bson.Document document = new org.bson.Document().append("_id", "A").append("id", "B"); ClassWithAnnotatedIdField withAnnotatedIdField = converter.read(ClassWithAnnotatedIdField.class, document); - assertThat(withAnnotatedIdField.key, is("A")); + assertThat(withAnnotatedIdField.key).isEqualTo("A"); } @Test // DATAMONGO-1102 - public void convertsJava8DateTimeTypesToDateAndBack() { + void convertsJava8DateTimeTypesToDateAndBack() { TypeWithLocalDateTime source = new TypeWithLocalDateTime(); LocalDateTime reference = source.date; @@ -1682,12 +1833,14 @@ public void convertsJava8DateTimeTypesToDateAndBack() { converter.write(source, result); - assertThat(result.get("date"), is(instanceOf(Date.class))); - assertThat(converter.read(TypeWithLocalDateTime.class, result).date, is(reference)); + assertThat(result.get("date")).isInstanceOf(Date.class); + assertThat(converter.read(TypeWithLocalDateTime.class, result).date) + .isEqualTo(reference.truncatedTo(ChronoUnit.MILLIS)); } @Test // DATAMONGO-1128 - public void writesOptionalsCorrectly() { + @Disabled("really we should find a solution for this") + void writesOptionalsCorrectly() { TypeWithOptional type = new TypeWithOptional(); type.localDateTime = Optional.of(LocalDateTime.now()); @@ -1696,14 +1849,15 @@ public void writesOptionalsCorrectly() { converter.write(type, result); - assertThat(getAsDocument(result, "string"), is(new org.bson.Document())); + assertThat(getAsDocument(result, "string")).isEqualTo(new org.bson.Document()); org.bson.Document localDateTime = getAsDocument(result, "localDateTime"); - assertThat(localDateTime.get("value"), is(instanceOf(Date.class))); + assertThat(localDateTime.get("value")).isInstanceOf(Date.class); } @Test // DATAMONGO-1128 - public void readsOptionalsCorrectly() { + @Disabled("Broken by DATAMONGO-1992 - In fact, storing Optional fields seems an anti-pattern.") + void readsOptionalsCorrectly() { LocalDateTime now = LocalDateTime.now(); Date reference = Date.from(now.atZone(systemDefault()).toInstant()); @@ -1713,12 +1867,12 @@ public void readsOptionalsCorrectly() { TypeWithOptional read = converter.read(TypeWithOptional.class, result); - assertThat(read.string, is(Optional. empty())); - assertThat(read.localDateTime, is(Optional.of(now))); + assertThat(read.string).isEmpty(); + assertThat(read.localDateTime).isEqualTo(Optional.of(now)); } @Test // DATAMONGO-1118 - public void convertsMapKeyUsingCustomConverterForAndBackwards() { + void convertsMapKeyUsingCustomConverterForAndBackwards() { MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); converter.setCustomConversions( @@ -1732,11 +1886,11 @@ public void convertsMapKeyUsingCustomConverterForAndBackwards() { org.bson.Document target = new org.bson.Document(); converter.write(source, target); - assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map, is(source.map)); + assertThat(converter.read(ClassWithMapUsingEnumAsKey.class, target).map).isEqualTo(source.map); } @Test // DATAMONGO-1118 - public void writesMapKeyUsingCustomConverter() { + void writesMapKeyUsingCustomConverter() { MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); converter.setCustomConversions(new MongoCustomConversions(Arrays.asList(new FooBarEnumToStringConverter()))); @@ -1752,12 +1906,12 @@ public void writesMapKeyUsingCustomConverter() { org.bson.Document map = DocumentTestUtils.getAsDocument(target, "map"); - assertThat(map.containsKey("foo-enum-value"), is(true)); - assertThat(map.containsKey("bar-enum-value"), is(true)); + assertThat(map.containsKey("foo-enum-value")).isTrue(); + assertThat(map.containsKey("bar-enum-value")).isTrue(); } @Test // DATAMONGO-1118 - public void readsMapKeyUsingCustomConverter() { + void readsMapKeyUsingCustomConverter() { MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); converter.setCustomConversions(new MongoCustomConversions(Arrays.asList(new StringToFooNumConverter()))); @@ -1767,419 +1921,2746 @@ public void readsMapKeyUsingCustomConverter() { ClassWithMapUsingEnumAsKey target = converter.read(ClassWithMapUsingEnumAsKey.class, source); - assertThat(target.map.get(FooBarEnum.FOO), is("spring")); + assertThat(target.map.get(FooBarEnum.FOO)).isEqualTo("spring"); } @Test // DATAMONGO-1471 - public void readsDocumentWithPrimitiveIdButNoValue() { - assertThat(converter.read(ClassWithIntId.class, new org.bson.Document()), is(notNullValue())); + void readsDocumentWithPrimitiveIdButNoValue() { + assertThat(converter.read(ClassWithIntId.class, new org.bson.Document())).isNotNull(); } @Test // DATAMONGO-1497 - public void readsPropertyFromNestedFieldCorrectly() { + void readsPropertyFromNestedFieldCorrectly() { org.bson.Document source = new org.bson.Document("nested", new org.bson.Document("sample", "value")); TypeWithPropertyInNestedField result = converter.read(TypeWithPropertyInNestedField.class, source); - assertThat(result.sample, is("value")); + assertThat(result.sample).isEqualTo("value"); } @Test // DATAMONGO-1525 - public void readsEmptyEnumSet() { + void readsEmptyEnumSet() { org.bson.Document source = new org.bson.Document("enumSet", Collections.emptyList()); - assertThat(converter.read(ClassWithEnumProperty.class, source).enumSet, is(EnumSet.noneOf(SampleEnum.class))); + assertThat(converter.read(ClassWithEnumProperty.class, source).enumSet).isEqualTo(EnumSet.noneOf(SampleEnum.class)); } @Test // DATAMONGO-1757 - public void failsReadingDocumentIntoSimpleType() { + void failsReadingDocumentIntoSimpleType() { org.bson.Document nested = new org.bson.Document("key", "value"); org.bson.Document source = new org.bson.Document("map", new org.bson.Document("key", nested)); - exception.expect(MappingException.class); - exception.expectMessage(nested.toString()); - exception.expectMessage(Long.class.getName()); - - converter.read(TypeWithMapOfLongValues.class, source); + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> converter.read(TypeWithMapOfLongValues.class, source)); } @Test // DATAMONGO-1831 - public void shouldConvertArrayInConstructorCorrectly() { + void shouldConvertArrayInConstructorCorrectly() { org.bson.Document source = new org.bson.Document("array", Collections.emptyList()); - assertThat(converter.read(WithArrayInConstructor.class, source).array, is(emptyArray())); + assertThat(converter.read(WithArrayInConstructor.class, source).array).isEmpty(); } @Test // DATAMONGO-1831 - public void shouldConvertNullForArrayInConstructorCorrectly() { + void shouldConvertNullForArrayInConstructorCorrectly() { org.bson.Document source = new org.bson.Document(); - assertThat(converter.read(WithArrayInConstructor.class, source).array, is(nullValue())); + assertThat(converter.read(WithArrayInConstructor.class, source).array).isNull(); } - static class GenericType { - T content; + @Test // DATAMONGO-1898 + void writesInterfaceBackedEnumsToSimpleNameByDefault() { + + org.bson.Document document = new org.bson.Document(); + + DocWithInterfacedEnum source = new DocWithInterfacedEnum(); + source.property = InterfacedEnum.INSTANCE; + + converter.write(source, document); + + assertThat(document) // + .hasSize(2) // + .hasEntrySatisfying("_class", __ -> {}) // + .hasEntrySatisfying("property", value -> InterfacedEnum.INSTANCE.name().equals(value)); } - static class ClassWithEnumProperty { + @Test // DATAMONGO-1898 + void rejectsConversionFromStringToEnumBackedInterface() { - SampleEnum sampleEnum; - List enums; - EnumSet enumSet; - EnumMap enumMap; + org.bson.Document document = new org.bson.Document("property", InterfacedEnum.INSTANCE.name()); + + assertThatExceptionOfType(ConverterNotFoundException.class) // + .isThrownBy(() -> converter.read(DocWithInterfacedEnum.class, document)); } - enum SampleEnum { - FIRST { - @Override - void method() {} - }, - SECOND { - @Override - void method() { + @Test // DATAMONGO-1898 + void readsInterfacedEnumIfConverterIsRegistered() { - } + org.bson.Document document = new org.bson.Document("property", InterfacedEnum.INSTANCE.name()); + + Converter enumConverter = new Converter() { + + @Override + public SomeInterface convert(String source) { + return InterfacedEnum.valueOf(source); + } }; - abstract void method(); - } + converter.setCustomConversions(new MongoCustomConversions(Collections.singletonList(enumConverter))); + converter.afterPropertiesSet(); - interface InterfaceType { + DocWithInterfacedEnum result = converter.read(DocWithInterfacedEnum.class, document); + assertThat(result.property).isEqualTo(InterfacedEnum.INSTANCE); } - static class Address implements InterfaceType { - String street; - String city; - } + @Test // DATAMONGO-1904 + void readsNestedArraysCorrectly() { - interface Contact { + List>> floats = Collections.singletonList(Collections.singletonList(Arrays.asList(1.0f, 2.0f))); + + org.bson.Document document = new org.bson.Document("nestedFloats", floats); + WithNestedLists result = converter.read(WithNestedLists.class, document); + + assertThat(result.nestedFloats).hasDimensions(1, 1).isEqualTo(new float[][][] { { { 1.0f, 2.0f } } }); } - static class Person implements Contact { + @Test // DATAMONGO-1992 + void readsImmutableObjectCorrectly() { - @Id String id; + org.bson.Document document = new org.bson.Document("_id", "foo"); - LocalDate birthDate; + ImmutableObject result = converter.read(ImmutableObject.class, document); - @Field("foo") String firstname; - String lastname; + assertThat(result.id).isEqualTo("foo"); + assertThat(result.witherUsed).isTrue(); + } - Set
          addresses; + @Test // DATAMONGO-2026 + void readsImmutableObjectWithConstructorIdPropertyCorrectly() { - public Person() { + org.bson.Document source = new org.bson.Document("_id", "spring").append("value", "data"); - } + ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod target = converter + .read(ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod.class, source); - @PersistenceConstructor - public Person(Set
          addresses) { - this.addresses = addresses; - } + assertThat(target.id).isEqualTo("spring"); + assertThat(target.value).isEqualTo("data"); } - static class ClassWithSortedMap { - SortedMap map; - } + @Test // DATAMONGO-2011 + void readsNestedListsToObjectCorrectly() { - static class ClassWithMapProperty { - Map map; - Map> mapOfLists; - Map mapOfObjects; - Map mapOfStrings; - Map mapOfPersons; - TreeMap treeMapOfPersons; - } + List values = Arrays.asList("ONE", "TWO"); + org.bson.Document source = new org.bson.Document("value", Collections.singletonList(values)); - static class ClassWithNestedMaps { - Map>> nestedMaps; + assertThat(converter.read(Attribute.class, source).value).isInstanceOf(List.class); } - static class BirthDateContainer { - LocalDate birthDate; - } + @Test // DATAMONGO-2043 + void omitsTypeHintWhenWritingSimpleTypes() { - static class BigDecimalContainer { - BigDecimal value; - Map map; - List collection; + org.bson.Document target = new org.bson.Document(); + converter.write(new org.bson.Document("value", "FitzChivalry"), target); + + assertThat(target).doesNotContainKeys("_class"); } - static class CollectionWrapper { - List contacts; - List> strings; - List> listOfMaps; - Set contactsSet; + @Test // DATAMONGO-1798 + void convertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsObjectId() { + + ObjectId source = new ObjectId(); + assertThat(converter.convertId(source.toHexString(), ObjectId.class)).isEqualTo(source); } - static class LocaleWrapper { - Locale locale; + @Test // DATAMONGO-1798 + void donNotConvertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsString() { + + ObjectId source = new ObjectId(); + assertThat(converter.convertId(source.toHexString(), String.class)).isEqualTo(source.toHexString()); } - static class ClassWithBigIntegerId { - @Id BigInteger id; + @Test // DATAMONGO-1798 + void donNotConvertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsObject() { + + ObjectId source = new ObjectId(); + assertThat(converter.convertId(source.toHexString(), Object.class)).isEqualTo(source.toHexString()); } - static class A { + @Test // DATAMONGO-2135 + void addsEqualObjectsToCollection() { - String valueType; - T value; + org.bson.Document itemDocument = new org.bson.Document("itemKey", "123"); + org.bson.Document orderDocument = new org.bson.Document("items", + Arrays.asList(itemDocument, itemDocument, itemDocument)); - public A(T value) { - this.valueType = value.getClass().getName(); - this.value = value; - } + Order order = converter.read(Order.class, orderDocument); + + assertThat(order.items).hasSize(3); } - static class ClassWithIntId { + @Test // DATAMONGO-1849 + void mapsValueToExplicitTargetType() { - @Id int id; + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.script = "if (a > b) a else b"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("script")).isEqualTo(new Code(source.script)); } - static class DefaultedConstructorArgument { + @Test // DATAMONGO-2328 + void readsScriptAsStringWhenAnnotatedWithFieldTargetType() { - String foo; - int bar; - double foobar; + String reference = "if (a > b) a else b"; + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("script", new Code(reference))); - DefaultedConstructorArgument(String foo, @Value("#root.something ?: -1") int bar, double foobar) { - this.foo = foo; - this.bar = bar; - this.foobar = foobar; - } + assertThat(target.script).isEqualTo(reference); } - static class Item { - List attributes; - } + @Test // DATAMONGO-1849 + void mapsCollectionValueToExplicitTargetType() { - static class Attribute { - String key; - Object value; + String script = "if (a > b) a else b"; + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.scripts = Collections.singletonList(script); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("scripts", List.class)).containsExactly(new Code(script)); } - static class Outer { + @Test // DATAMONGO-1849 + void mapsBigDecimalToDecimal128WhenAnnotatedWithFieldTargetType() { - class Inner { - String value; - } + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.bigDecimal = BigDecimal.valueOf(3.14159D); - Inner inner; + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("bigDecimal")).isEqualTo(new Decimal128(source.bigDecimal)); } - static class DBRefWrapper { + @Test // DATAMONGO-2328 + void mapsDateToLongWhenAnnotatedWithFieldTargetType() { - DBRef ref; - List refs; - Map refMap; - Map personMap; - } + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.dateAsLong = new Date(); - static class URLWrapper { - URL url; + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("dateAsLong")).isEqualTo(source.dateAsLong.getTime()); } - static class ClassWithComplexId { + @Test // DATAMONGO-2328 + void readsLongAsDateWhenAnnotatedWithFieldTargetType() { - @Id ComplexId complexId; - } + Date reference = new Date(); + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("dateAsLong", reference.getTime())); - static class ComplexId { - Long innerId; + assertThat(target.dateAsLong).isEqualTo(reference); } - static class TypWithCollectionConstructor { + @Test // DATAMONGO-2328 + void mapsLongToDateWhenAnnotatedWithFieldTargetType() { - List attributes; + Date date = new Date(); + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.longAsDate = date.getTime(); - public TypWithCollectionConstructor(List attributes) { - this.attributes = attributes; - } - } + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); - @TypeAlias("_") - static class Aliased { - String name; + assertThat(target.get("longAsDate")).isEqualTo(date); } - static class ThrowableWrapper { + @Test // DATAMONGO-2328 + void readsDateAsLongWhenAnnotatedWithFieldTargetType() { - Throwable throwable; + Date reference = new Date(); + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("longAsDate", reference)); + + assertThat(target.longAsDate).isEqualTo(reference.getTime()); } - @Document - static class PrimitiveContainer { + @Test // DATAMONGO-2328 + void mapsStringAsBooleanWhenAnnotatedWithFieldTargetType() { - @Field("property") private final int m_property; + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.stringAsBoolean = "true"; - @PersistenceConstructor - public PrimitiveContainer(@Value("#root.property") int a_property) { - m_property = a_property; - } + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); - public int property() { - return m_property; - } + assertThat(target.get("stringAsBoolean")).isEqualTo(true); } - @Document - static class ObjectContainer { + @Test // DATAMONGO-2328 + void readsBooleanAsStringWhenAnnotatedWithFieldTargetType() { - @Field("property") private final PrimitiveContainer m_property; + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("stringAsBoolean", true)); + + assertThat(target.stringAsBoolean).isEqualTo("true"); + } + + @Test // DATAMONGO-2328 + void mapsDateAsObjectIdWhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.dateAsObjectId = new Date(); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + // need to compare the the timestamp as ObjectId has an internal counter + assertThat(target.get("dateAsObjectId", ObjectId.class).getTimestamp()) + .isEqualTo(new ObjectId(source.dateAsObjectId).getTimestamp()); + } + + @Test // DATAMONGO-2328 + void readsObjectIdAsDateWhenAnnotatedWithFieldTargetType() { + + ObjectId reference = new ObjectId(); + WithExplicitTargetTypes target = converter.read(WithExplicitTargetTypes.class, + new org.bson.Document("dateAsObjectId", reference)); + + assertThat(target.dateAsObjectId).isEqualTo(new Date(reference.getTimestamp())); + } + + @Test // DATAMONGO-2410 + void shouldAllowReadingBackDbObject() { + + assertThat(converter.read(BasicDBObject.class, new org.bson.Document("property", "value"))) + .isEqualTo(new BasicDBObject("property", "value")); + assertThat(converter.read(DBObject.class, new org.bson.Document("property", "value"))) + .isEqualTo(new BasicDBObject("property", "value")); + } + + @Test // DATAMONGO-2479 + void entityCallbacksAreNotSetByDefault() { + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNull(); + } + + @Test // DATAMONGO-2479 + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + + ApplicationContext ctx = new StaticApplicationContext(); + converter.setApplicationContext(ctx); + + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNotNull(); + } + + @Test // DATAMONGO-2479 + void setterForEntityCallbackOverridesContextInitializedOnes() { + + ApplicationContext ctx = new StaticApplicationContext(); + converter.setApplicationContext(ctx); + + EntityCallbacks callbacks = EntityCallbacks.create(); + converter.setEntityCallbacks(callbacks); + + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2479 + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + + EntityCallbacks callbacks = EntityCallbacks.create(); + ApplicationContext ctx = new StaticApplicationContext(); + + converter.setEntityCallbacks(callbacks); + converter.setApplicationContext(ctx); + + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); + } + + @Test // DATAMONGO-2479 + void resolveDBRefMapValueShouldInvokeCallbacks() { + + AfterConvertCallback afterConvertCallback = spy(new ReturningAfterConvertCallback()); + converter.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); + + when(resolver.fetch(Mockito.any(DBRef.class))).thenReturn(new org.bson.Document()); + DBRef dbRef = mock(DBRef.class); + + org.bson.Document refMap = new org.bson.Document("foo", dbRef); + org.bson.Document document = new org.bson.Document("personMap", refMap); + + DBRefWrapper result = converter.read(DBRefWrapper.class, document); + + verify(afterConvertCallback).onAfterConvert(eq(result.personMap.get("foo")), eq(new org.bson.Document()), any()); + } + + @Test // DATAMONGO-2300 + void readAndConvertDBRefNestedByMapCorrectly() { + + org.bson.Document cluster = new org.bson.Document("_id", 100L); + DBRef dbRef = new DBRef("clusters", 100L); + + org.bson.Document data = new org.bson.Document("_id", 3L); + data.append("cluster", dbRef); + + MappingMongoConverter spyConverter = spy(converter); + Mockito.doReturn(cluster).when(spyConverter).readRef(dbRef); + + Map result = spyConverter.readMap(spyConverter.getConversionContext(ObjectPath.ROOT), data, + ClassTypeInformation.MAP); + + assertThat(((Map) result.get("cluster")).get("_id")).isEqualTo(100L); + } + + @Test // GH-3546 + void readFlattensNestedDocumentToStringIfNecessary() { + + org.bson.Document source = new org.bson.Document("s", + new org.bson.Document("json", "string").append("_id", UUID.randomUUID())); + + Address target = converter.read(Address.class, source); + assertThat(target.street).isNotNull(); + } + + @Test // DATAMONGO-1902 + void writeFlattensUnwrappedType() { + + WithNullableUnwrapped source = new WithNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new EmbeddableType(); + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.transientValue = "must-not-be-written"; + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("_id", "id-1") // + .containsEntry("stringValue", "string-val") // + .containsEntry("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .containsEntry("with-at-field-annotation", "@Field") // + .doesNotContainKey("embeddableValue") // + .doesNotContainKey("transientValue"); + } + + @Test // DATAMONGO-1902 + void writePrefixesUnwrappedType() { + + WithPrefixedNullableUnwrapped source = new WithPrefixedNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new EmbeddableType(); + source.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.embeddableValue.stringValue = "string-val"; + source.embeddableValue.transientValue = "must-not-be-written"; + source.embeddableValue.atFieldAnnotatedValue = "@Field"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("_id", "id-1") // + .containsEntry("prefix-stringValue", "string-val") // + .containsEntry("prefix-listValue", Arrays.asList("list-val-1", "list-val-2")) // + .containsEntry("prefix-with-at-field-annotation", "@Field") // + .doesNotContainKey("embeddableValue") // + .doesNotContainKey("transientValue") // + .doesNotContainKey("prefix-transientValue"); + } + + @Test // DATAMONGO-1902 + void writeNullUnwrappedType() { + + WithNullableUnwrapped source = new WithNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = null; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target) // + .doesNotContainKey("prefix-stringValue").doesNotContainKey("prefix-listValue") + .doesNotContainKey("embeddableValue"); + } + + @Test // DATAMONGO-1902 + void writeDeepNestedUnwrappedType() { + + WrapperAroundWithUnwrapped source = new WrapperAroundWithUnwrapped(); + source.someValue = "root-level-value"; + source.nullableEmbedded = new WithNullableUnwrapped(); + source.nullableEmbedded.id = "id-1"; + source.nullableEmbedded.embeddableValue = new EmbeddableType(); + source.nullableEmbedded.embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + source.nullableEmbedded.embeddableValue.stringValue = "string-val"; + source.nullableEmbedded.embeddableValue.transientValue = "must-not-be-written"; + source.nullableEmbedded.embeddableValue.atFieldAnnotatedValue = "@Field"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target).containsEntry("someValue", "root-level-value") // + .containsEntry("nullableEmbedded", new org.bson.Document("_id", "id-1").append("stringValue", "string-val") // + .append("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("with-at-field-annotation", "@Field")); // + } + + @Test // DATAMONGO-1902 + void readUnwrappedType() { + + org.bson.Document source = new org.bson.Document("_id", "id-1") // + .append("stringValue", "string-val") // + .append("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("with-at-field-annotation", "@Field"); + + EmbeddableType embeddableValue = new EmbeddableType(); + embeddableValue.stringValue = "string-val"; + embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + embeddableValue.atFieldAnnotatedValue = "@Field"; + + WithNullableUnwrapped target = converter.read(WithNullableUnwrapped.class, source); + assertThat(target.embeddableValue).isEqualTo(embeddableValue); + } + + @Test // DATAMONGO-1902 + void readPrefixedUnwrappedType() { + + org.bson.Document source = new org.bson.Document("_id", "id-1") // + .append("prefix-stringValue", "string-val") // + .append("prefix-listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("prefix-with-at-field-annotation", "@Field"); + + EmbeddableType embeddableValue = new EmbeddableType(); + embeddableValue.stringValue = "string-val"; + embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + embeddableValue.atFieldAnnotatedValue = "@Field"; + + WithPrefixedNullableUnwrapped target = converter.read(WithPrefixedNullableUnwrapped.class, source); + assertThat(target.embeddableValue).isEqualTo(embeddableValue); + } + + @Test // DATAMONGO-1902 + void readNullableUnwrappedTypeWhenSourceDoesNotContainValues() { + + org.bson.Document source = new org.bson.Document("_id", "id-1"); + + WithNullableUnwrapped target = converter.read(WithNullableUnwrapped.class, source); + assertThat(target.embeddableValue).isNull(); + } + + @Test // DATAMONGO-1902 + void readEmptyUnwrappedTypeWhenSourceDoesNotContainValues() { + + org.bson.Document source = new org.bson.Document("_id", "id-1"); + + WithEmptyUnwrappedType target = converter.read(WithEmptyUnwrappedType.class, source); + assertThat(target.embeddableValue).isNotNull(); + } + + @Test // DATAMONGO-1902 + void readDeepNestedUnwrappedType() { + + org.bson.Document source = new org.bson.Document("someValue", "root-level-value").append("nullableEmbedded", + new org.bson.Document("_id", "id-1").append("stringValue", "string-val") // + .append("listValue", Arrays.asList("list-val-1", "list-val-2")) // + .append("with-at-field-annotation", "@Field")); + + WrapperAroundWithUnwrapped target = converter.read(WrapperAroundWithUnwrapped.class, source); + + EmbeddableType embeddableValue = new EmbeddableType(); + embeddableValue.stringValue = "string-val"; + embeddableValue.listValue = Arrays.asList("list-val-1", "list-val-2"); + embeddableValue.atFieldAnnotatedValue = "@Field"; + + assertThat(target.someValue).isEqualTo("root-level-value"); + assertThat(target.nullableEmbedded).isNotNull(); + assertThat(target.nullableEmbedded.embeddableValue).isEqualTo(embeddableValue); + } + + @Test // DATAMONGO-1902 + void readUnwrappedTypeWithComplexValue() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("address", + new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham")); + + WithNullableUnwrapped target = converter.read(WithNullableUnwrapped.class, source); + + Address expected = new Address(); + expected.city = "Gotham"; + expected.street = "1007 Mountain Drive"; + + assertThat(target.embeddableValue.address) // + .isEqualTo(expected); + } + + @Test // GH-4491 + void readUnwrappedTypeWithComplexValueUsingConstructor() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("stringValue", "hello").append("address", + new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham")); + + WithUnwrappedConstructor target = converter.read(WithUnwrappedConstructor.class, source); + + Address expected = new Address(); + expected.city = "Gotham"; + expected.street = "1007 Mountain Drive"; + + assertThat(target.embeddableValue.stringValue) // + .isEqualTo("hello"); + assertThat(target.embeddableValue.address) // + .isEqualTo(expected); + } + + @Test // GH-4491 + void readUnwrappedTypeWithComplexValueUsingConstructorWhenUnwrappedPropertiesNotPresent() { + + org.bson.Document source = new org.bson.Document("_id", "id-1"); + + WithUnwrappedConstructor target = converter.read(WithUnwrappedConstructor.class, source); + + assertThat(target.id).isEqualTo("id-1"); + assertThat(target.embeddableValue).isNotNull(); // it's defined as Empty + assertThat(target.embeddableValue.stringValue) // + .isNull(); + assertThat(target.embeddableValue.address) // + .isNull(); + } + + @Test // DATAMONGO-1902 + void writeUnwrappedTypeWithComplexValue() { + + WithNullableUnwrapped source = new WithNullableUnwrapped(); + source.id = "id-1"; + source.embeddableValue = new EmbeddableType(); + source.embeddableValue.address = new Address(); + source.embeddableValue.address.city = "Gotham"; + source.embeddableValue.address.street = "1007 Mountain Drive"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target) // + .containsEntry("address", new org.bson.Document("s", "1007 Mountain Drive").append("city", "Gotham")) // + .doesNotContainKey("street") // + .doesNotContainKey("city"); // + + // use exact key matching, do not dive into nested documents + Assertions.assertThat(target) // + .doesNotContainKey("address.s") // + .doesNotContainKey("address.city"); + } + + @Test // GH-3580 + void shouldFallbackToConfiguredCustomConversionTargetOnRead() { + + GenericTypeConverter genericTypeConverter = spy(new GenericTypeConverter()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(genericTypeConverter); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("_class", SubTypeOfGenericType.class.getName()).append("value", + "v1"); + GenericType target = converter.read(GenericType.class, source); + + assertThat(target).isInstanceOf(GenericType.class); + assertThat(target.content).isEqualTo("v1"); + + verify(genericTypeConverter).convert(eq(source)); + } + + @Test // GH-3580 + void shouldUseMostConcreteCustomConversionTargetOnRead() { + + GenericTypeConverter genericTypeConverter = spy(new GenericTypeConverter()); + SubTypeOfGenericTypeConverter subTypeOfGenericTypeConverter = spy(new SubTypeOfGenericTypeConverter()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(genericTypeConverter); + it.registerConverter(subTypeOfGenericTypeConverter); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("_class", SubTypeOfGenericType.class.getName()).append("value", + "v1"); + GenericType target = converter.read(GenericType.class, source); + + assertThat(target).isInstanceOf(SubTypeOfGenericType.class); + assertThat(target.content).isEqualTo("v1_s"); + + verify(genericTypeConverter, never()).convert(any()); + verify(subTypeOfGenericTypeConverter).convert(eq(source)); + } + + @Test // GH-3660 + void usesCustomConverterForMapTypesOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeImplementingMap source = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("1st", "one").containsEntry("2nd", 2); + } + + @Test // GH-3660 + void usesCustomConverterForTypesImplementingMapOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeImplementingMap source = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("1st", "one").containsEntry("2nd", 2); + } + + @Test // GH-3660 + void usesCustomConverterForTypesImplementingMapOnRead() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new DocumentToTypeImplementingMapConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("1st", "one").append("2nd", 2).append("_class", + TypeImplementingMap.class.getName()); + + TypeImplementingMap target = converter.read(TypeImplementingMap.class, source); + + assertThat(target).isEqualTo(new TypeImplementingMap("one", 2)); + } + + @Test // GH-3660 + void usesCustomConverterForPropertiesUsingTypesThatImplementMapOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeWrappingTypeImplementingMap source = new TypeWrappingTypeImplementingMap(); + source.typeImplementingMap = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("typeImplementingMap", new org.bson.Document("1st", "one").append("2nd", 2)); + } + + @Test // GH-3660 + void usesCustomConverterForPropertiesUsingTypesImplementingMapOnRead() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new DocumentToTypeImplementingMapConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("typeImplementingMap", + new org.bson.Document("1st", "one").append("2nd", 2)) + .append("_class", TypeWrappingTypeImplementingMap.class.getName()); + + TypeWrappingTypeImplementingMap target = converter.read(TypeWrappingTypeImplementingMap.class, source); + + assertThat(target.typeImplementingMap).isEqualTo(new TypeImplementingMap("one", 2)); + } + + @Test // GH-3407 + void shouldWriteNullPropertyCorrectly() { + + WithFieldWrite fieldWrite = new WithFieldWrite(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlways", null).doesNotContainKey("writeNonNull"); + assertThat(document).containsEntry("writeAlwaysPersonDBRef", null).doesNotContainKey("writeNonNullPersonDBRef"); + } + + @Test // GH-4710 + void shouldWriteSimplePropertyCorrectlyAfterConversionReturnsNull() { + + MongoCustomConversions conversions = new MongoCustomConversions(ConverterBuilder + .writing(Integer.class, String.class, it -> null).andReading(it -> null).getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlways = 10; + fieldWrite.writeNonNull = 20; + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlways", null).doesNotContainKey("writeNonNull"); + } + + @Test // GH-4710 + void shouldWriteComplexPropertyCorrectlyAfterConversionReturnsNull() { + + MongoCustomConversions conversions = new MongoCustomConversions(ConverterBuilder + .writing(Person.class, String.class, it -> null).andReading(it -> null).getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlwaysPerson = new Person(); + fieldWrite.writeNonNullPerson = new Person(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlwaysPerson", null).doesNotContainKey("writeNonNullPerson"); + } + + @Test // GH-4710 + void shouldDelegateWriteOfDBRefToCustomConversionIfConfigured() { + + MongoCustomConversions conversions = new MongoCustomConversions( + ConverterBuilder.writing(Person.class, DBRef.class, it -> new DBRef("persons", "n/a")).andReading(it -> null) + .getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlwaysPersonDBRef = new Person(); + fieldWrite.writeNonNullPersonDBRef = new Person(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlwaysPersonDBRef", new DBRef("persons", "n/a"));// .doesNotContainKey("writeNonNullPersonDBRef"); + } + + @Test // GH-4710 + void shouldDelegateWriteOfDBRefToCustomConversionIfConfiguredAndCheckNulls() { + + MongoCustomConversions conversions = new MongoCustomConversions(ConverterBuilder + .writing(Person.class, DBRef.class, it -> null).andReading(it -> null).getConverters().stream().toList()); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + WithFieldWrite fieldWrite = new WithFieldWrite(); + fieldWrite.writeAlwaysPersonDBRef = new Person(); + fieldWrite.writeNonNullPersonDBRef = new Person(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlwaysPersonDBRef", null).doesNotContainKey("writeNonNullPersonDBRef"); + } + + @Test // GH-4710 + void shouldApplyNullConversionToPropertyValueConverters() { + + MongoCustomConversions conversions = new MongoCustomConversions( + MongoCustomConversions.MongoConverterConfigurationAdapter.from(Collections.emptyList()) + .configurePropertyConversions(registrar -> { + registrar.registerConverter(Person.class, "firstname", new MongoValueConverter() { + @Override + public String readNull(MongoConversionContext context) { + return "NULL"; + } + + @Override + public String writeNull(MongoConversionContext context) { + return "NULL"; + } + + @Override + public String read(String value, MongoConversionContext context) { + return ""; + } + + @Override + public String write(String value, MongoConversionContext context) { + return ""; + } + }); + })); + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + org.bson.Document document = new org.bson.Document(); + converter.write(new Person(), document); + + assertThat(document).containsEntry("foo", "NULL"); + + document = new org.bson.Document("foo", null); + Person result = converter.read(Person.class, document); + + assertThat(result.firstname).isEqualTo("NULL"); + } + + @Test // GH-3686 + void readsCollectionContainingNullValue() { + + org.bson.Document source = new org.bson.Document("items", + Arrays.asList(new org.bson.Document("itemKey", "i1"), null, new org.bson.Document("itemKey", "i3"))); + + Order target = converter.read(Order.class, source); + + assertThat(target.items).map(it -> it != null ? it.itemKey : null).containsExactly("i1", null, "i3"); + } + + @Test // GH-3686 + void readsArrayContainingNullValue() { + + org.bson.Document source = new org.bson.Document("arrayOfStrings", Arrays.asList("i1", null, "i3")); + + WithArrays target = converter.read(WithArrays.class, source); + + assertThat(target.arrayOfStrings).containsExactly("i1", null, "i3"); + } + + @Test // GH-3686 + void readsMapContainingNullValue() { + + org.bson.Document source = new org.bson.Document("mapOfObjects", + new org.bson.Document("item1", "i1").append("item2", null).append("item3", "i3")); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects).containsEntry("item1", "i1").containsEntry("item2", null).containsEntry("item3", + "i3"); + } + + @Test // GH-3670 + void appliesCustomConverterEvenToSimpleTypes() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new MongoSimpleTypeConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("content", new Binary(new byte[] { 0x00, 0x42 })); + + GenericType target = converter.read(GenericType.class, source); + assertThat(target.content).isInstanceOf(byte[].class); + } + + @Test // GH-3702 + void readsRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("raw", + new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.raw).isInstanceOf(org.bson.Document.class) + .isEqualTo(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3702 + void readsListOfRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("listOfRaw", + Arrays.asList(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1)))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.listOfRaw) + .containsExactly(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3692 + void readsMapThatDoesNotComeAsDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("mapOfObjects", + Collections.singletonMap("simple", 1)); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects).containsEntry("simple", 1); + } + + @Test // GH-3851 + void associationMappingShouldFallBackToDefaultIfNoAtReferenceAnnotationPresent/* as done via jmolecules */() { + + UUID id = UUID.randomUUID(); + Person sourceValue = new Person(); + sourceValue.id = id.toString(); + + DocumentAccessor accessor = new DocumentAccessor(new org.bson.Document()); + MongoPersistentProperty persistentProperty = mock(MongoPersistentProperty.class); + when(persistentProperty.isAssociation()).thenReturn(true); + when(persistentProperty.getMongoField()).thenReturn(MongoField.fromKey("pName")); + doReturn(TypeInformation.of(Person.class)).when(persistentProperty).getTypeInformation(); + doReturn(Person.class).when(persistentProperty).getType(); + doReturn(Person.class).when(persistentProperty).getRawType(); + + converter.writePropertyInternal(sourceValue, accessor, persistentProperty, null); + + assertThat(accessor.getDocument()) + .isEqualTo(new org.bson.Document("pName", new org.bson.Document("_id", id.toString()))); + } + + @Test // GH-2860 + void projectShouldReadSimpleInterfaceProjection() { + + org.bson.Document source = new org.bson.Document("birthDate", + Date.from(LocalDate.of(1999, 12, 1).atStartOfDay(systemDefault()).toInstant())).append("foo", "Walter"); + + EntityProjectionIntrospector discoverer = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = discoverer.introspect(PersonProjection.class, Person.class); + PersonProjection person = converter.project(projection, source); + + assertThat(person.getBirthDate()).isEqualTo(LocalDate.of(1999, 12, 1)); + assertThat(person.getFirstname()).isEqualTo("Walter"); + } + + @Test // GH-2860 + void projectShouldReadSimpleDtoProjection() { + + org.bson.Document source = new org.bson.Document("birthDate", + Date.from(LocalDate.of(1999, 12, 1).atStartOfDay(systemDefault()).toInstant())).append("foo", "Walter"); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector.introspect(PersonDto.class, Person.class); + PersonDto person = converter.project(projection, source); + + assertThat(person.getBirthDate()).isEqualTo(LocalDate.of(1999, 12, 1)); + assertThat(person.getFirstname()).isEqualTo("Walter"); + } + + @Test // GH-2860 + void projectShouldReadNestedProjection() { + + org.bson.Document source = new org.bson.Document("addresses", + Collections.singletonList(new org.bson.Document("s", "hwy"))); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector.introspect(WithNestedProjection.class, + Person.class); + WithNestedProjection person = converter.project(projection, source); + + assertThat(person.getAddresses()).extracting(AddressProjection::getStreet).hasSize(1).containsOnly("hwy"); + } + + @Test // GH-4609 + void projectShouldReadNestedInterfaceProjection() { + + org.bson.Document source = new org.bson.Document("foo", "spring").append("address", + new org.bson.Document("s", "data").append("city", "mongodb")); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector + .introspect(WithNestedInterfaceProjection.class, Person.class); + WithNestedInterfaceProjection person = converter.project(projection, source); + + assertThat(person.getFirstname()).isEqualTo("spring"); + assertThat(person.getAddress().getStreet()).isEqualTo("data"); + } + + @Test // GH-4609 + void projectShouldReadNestedDtoProjection() { + + org.bson.Document source = new org.bson.Document("foo", "spring").append("address", + new org.bson.Document("s", "data").append("city", "mongodb")); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector + .introspect(WithNestedDtoProjection.class, Person.class); + WithNestedDtoProjection person = converter.project(projection, source); + + assertThat(person.getFirstname()).isEqualTo("spring"); + assertThat(person.getAddress().getStreet()).isEqualTo("data"); + } + + @Test // GH-4626 + void projectShouldReadDtoProjectionPropertiesOnlyOnce() { + + ByteBuffer number = ByteBuffer.allocate(8); + number.putDouble(1.2d); + number.flip(); + + org.bson.Document source = new org.bson.Document("number", number); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector.introspect(DoubleHolderDto.class, + WithDoubleHolder.class); + DoubleHolderDto result = converter.project(projection, source); + + assertThat(result.number.number).isCloseTo(1.2, Percentage.withPercentage(1)); + } + + @Test // GH-2860 + void projectShouldReadProjectionWithNestedEntity() { + + org.bson.Document source = new org.bson.Document("addresses", + Collections.singletonList(new org.bson.Document("s", "hwy"))); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + EntityProjection projection = introspector + .introspect(ProjectionWithNestedEntity.class, Person.class); + ProjectionWithNestedEntity person = converter.project(projection, source); + + assertThat(person.getAddresses()).extracting(Address::getStreet).hasSize(1).containsOnly("hwy"); + } + + @Test // GH-3998 + void shouldReadOpenProjection() { + + org.bson.Document author = new org.bson.Document("firstName", "Walter").append("lastName", "White"); + org.bson.Document book = new org.bson.Document("_id", "foo").append("name", "my-book").append("author", author); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + BookProjection projection = converter.project(introspector.introspect(BookProjection.class, Book.class), book); + + assertThat(projection.getName()).isEqualTo("my-book by Walter White"); + } + + @Test // GH-4120 + void shouldReadDtoProjection() { + + org.bson.Document author = new org.bson.Document("firstName", "Walter").append("lastName", "White"); + org.bson.Document book = new org.bson.Document("_id", "foo").append("name", "my-book").append("author", author); + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + AuthorOnly projection = converter.project(introspector.introspect(AuthorOnly.class, Book.class), book); + + assertThat(projection.getAuthor().getFirstName()).isEqualTo("Walter"); + assertThat(projection.getAuthor().getLastName()).isEqualTo("White"); + } + + @Test // GH-3596 + void simpleConverter() { + + WithValueConverters wvc = new WithValueConverters(); + wvc.converterWithDefaultCtor = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target).containsEntry("converterWithDefaultCtor", new org.bson.Document("foo", "spring")); + + WithValueConverters read = converter.read(WithValueConverters.class, target); + assertThat(read.converterWithDefaultCtor).startsWith("spring"); + } + + @Test // GH-3596 + void enumConverter() { + + WithValueConverters wvc = new WithValueConverters(); + wvc.converterEnum = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target).containsEntry("converterEnum", new org.bson.Document("bar", "spring")); + + WithValueConverters read = converter.read(WithValueConverters.class, target); + assertThat(read.converterEnum).isEqualTo("spring"); + } + + @Test // GH-3596 + void beanConverter() { + + DefaultListableBeanFactory defaultListableBeanFactory = new DefaultListableBeanFactory(); + defaultListableBeanFactory.registerBeanDefinition("someDependency", + BeanDefinitionBuilder.rootBeanDefinition(SomeDependency.class).getBeanDefinition()); + + converter = new MappingMongoConverter(resolver, mappingContext); + + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerPropertyValueConverterFactory( + PropertyValueConverterFactory.beanFactoryAware(defaultListableBeanFactory)); + })); + converter.afterPropertiesSet(); + + WithContextValueConverters wvc = new WithContextValueConverters(); + wvc.converterBean = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target.get("converterBean", org.bson.Document.class)).satisfies(it -> { + assertThat(it).containsKey("ooo"); + assertThat((String) it.get("ooo")).startsWith("spring - "); + }); + + WithContextValueConverters read = converter.read(WithContextValueConverters.class, target); + assertThat(read.converterBean).startsWith("spring -"); + } + + @Test // GH-3596 + void pathConfiguredConverter/*no annotation required*/() { + + converter = new MappingMongoConverter(resolver, mappingContext); + + converter.setCustomConversions(MongoCustomConversions.create(it -> { + + it.configurePropertyConversions(registrar -> { + registrar.registerConverter(WithValueConverters.class, "viaRegisteredConverter", + new PropertyValueConverter() { + + @Nullable + @Override + public String read(@Nullable org.bson.Document nativeValue, MongoConversionContext context) { + return nativeValue.getString("bar"); + } + + @Nullable + @Override + public org.bson.Document write(@Nullable String domainValue, MongoConversionContext context) { + return new org.bson.Document("bar", domainValue); + } + }); + }); + })); + + WithValueConverters wvc = new WithValueConverters(); + wvc.viaRegisteredConverter = "spring"; + + org.bson.Document target = new org.bson.Document(); + converter.write(wvc, target); + + assertThat(target).containsEntry("viaRegisteredConverter", new org.bson.Document("bar", "spring")); + + WithValueConverters read = converter.read(WithValueConverters.class, target); + assertThat(read.viaRegisteredConverter).isEqualTo("spring"); + } + + @Test // GH-4098 + void resolvesCyclicNonAssociationValueFromSource/* and does not attempt to be smart and look up id values in context */() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("value", "v1").append("cycle", + new org.bson.Document("_id", "id-1").append("value", "v2")); + + assertThat(converter.read(Cyclic.class, source).cycle.value).isEqualTo("v2"); + } + + @Test // GH-4371 + void shouldConvertTypesToStringTargetType() { + + org.bson.Document source = org.bson.Document.parse(""" + { + city : ["Gotham", "Metropolis"] + } + """); + + assertThat(converter.read(Address.class, source).city).isEqualTo("Gotham,Metropolis"); + } + + @Test // GH-2350 + void shouldConvertBsonUndefinedToNull() { + + org.bson.Document source = new org.bson.Document("s", "hallway drive").append("city", new BsonUndefined()); + assertThat(converter.read(Address.class, source).city).isNull(); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnWriteIfFieldTypeIsKey() { + + WithPropertyHavingDotsInFieldName source = new WithPropertyHavingDotsInFieldName(); + source.value = "A"; + + assertThat(write(source)).containsEntry("field.name.with.dots", "A"); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnReadIfFieldTypeIsKey() { + + org.bson.Document source = new org.bson.Document("field.name.with.dots", "A"); + + WithPropertyHavingDotsInFieldName target = converter.read(WithPropertyHavingDotsInFieldName.class, source); + assertThat(target.value).isEqualTo("A"); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnWriteOfNestedPropertyIfFieldTypeIsKey() { + + WrapperForTypeWithPropertyHavingDotsInFieldName source = new WrapperForTypeWithPropertyHavingDotsInFieldName(); + source.nested = new WithPropertyHavingDotsInFieldName(); + source.nested.value = "A"; + + assertThat(write(source).get("nested", org.bson.Document.class)).containsEntry("field.name.with.dots", "A"); + } + + @Test // GH-4464 + void shouldNotSplitKeyNamesWithDotOnReadOfNestedIfFieldTypeIsKey() { + + org.bson.Document source = new org.bson.Document("nested", new org.bson.Document("field.name.with.dots", "A")); + + WrapperForTypeWithPropertyHavingDotsInFieldName target = converter + .read(WrapperForTypeWithPropertyHavingDotsInFieldName.class, source); + assertThat(target.nested).isNotNull(); + assertThat(target.nested.value).isEqualTo("A"); + } + + @Test // GH-4464 + void writeShouldAllowDotsInMapKeyNameIfConfigured() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + Person person = new Person(); + person.firstname = "bart"; + person.lastname = "simpson"; + + ClassWithMapProperty source = new ClassWithMapProperty(); + source.mapOfPersons = Map.of("map.key.with.dots", person); + + assertThat(write(source).get("mapOfPersons", org.bson.Document.class)).containsKey("map.key.with.dots"); + } + + @Test // GH-4464 + void readShouldAllowDotsInMapKeyNameIfConfigured() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.preserveMapKeys(true); + converter.afterPropertiesSet(); + + Person person = new Person(); + person.firstname = "bart"; + person.lastname = "simpson"; + + org.bson.Document source = new org.bson.Document("mapOfPersons", + new org.bson.Document("map.key.with.dots", write(person))); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfPersons).containsEntry("map.key.with.dots", person); + } + + @ValueSource(classes = { ComplexIdAndNoAnnotation.class, ComplexIdAndIdAnnotation.class, + ComplexIdAndMongoIdAnnotation.class, ComplexIdAndFieldAnnotation.class }) + @ParameterizedTest // GH-4524 + void projectShouldReadComplexIdType(Class projectionTargetType) { + + EntityProjectionIntrospector introspector = EntityProjectionIntrospector.create(converter.getProjectionFactory(), + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and((target, underlyingType) -> !converter.conversions.isSimpleType(target)), + mappingContext); + + ComplexId idValue = ComplexId.of(101L); + org.bson.Document source = new org.bson.Document("_id", new org.bson.Document("innerId", idValue.innerId)) + .append("value", "abc").append("_class", ComplexIdAndNoAnnotation.class.getName()); + + EntityProjection projection = introspector.introspect(projectionTargetType, + ComplexIdAndNoAnnotation.class); + + assertThat(converter.project(projection, source)) // + .isInstanceOf(projectionTargetType) // + .extracting("id").isEqualTo(idValue); + } + + @Test // GH-4877 + void shouldReadNonIdFieldCalledIdFromSource() { + + WithRenamedIdPropertyAndAnotherPropertyNamedId source = new WithRenamedIdPropertyAndAnotherPropertyNamedId(); + source.abc = "actual-id-value"; + source.id = "just-a-field"; + + org.bson.Document document = write(source); + assertThat(document).containsEntry("_id", source.abc).containsEntry("id", source.id); + + WithRenamedIdPropertyAndAnotherPropertyNamedId target = converter + .read(WithRenamedIdPropertyAndAnotherPropertyNamedId.class, document); + assertThat(target.abc).isEqualTo(source.abc); + assertThat(target.id).isEqualTo(source.id); + } + + @Test // GH-4706 + void shouldWriteVectorValues() { + + WithVector source = new WithVector(); + source.embeddings = Vector.of(1.1d, 2.2d, 3.3d); + + org.bson.Document document = write(source); + assertThat(document.getList("embeddings", BsonDouble.class)).hasSize(3); + } + + @Test // GH-4706 + void shouldReadVectorValues() { + + org.bson.Document document = new org.bson.Document("embeddings", List.of(1.1d, 2.2d, 3.3d)); + WithVector withVector = converter.read(WithVector.class, document); + assertThat(withVector.embeddings.toDoubleArray()).contains(1.1d, 2.2d, 3.3d); + } + + @Test // GH-4706 + void writesByteArrayAsIsIfNoFieldInstructionsGiven() { + + WithArrays source = new WithArrays(); + source.arrayOfPrimitiveBytes = new byte[] { 0, 1, 2 }; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("arrayOfPrimitiveBytes", byte[].class)).isSameAs(source.arrayOfPrimitiveBytes); + } + + @Test // GH-3444 + void convertsBigIntegerToDecimal128IfFieldTypeIndicatesConversion() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.bigInteger = BigInteger.valueOf(101); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("bigInteger")).isEqualTo(new Decimal128(source.bigInteger.longValueExact())); + } + + @Test // GH-3444 + void usesDecimal128NumericFormat() { + + MappingMongoConverter converter = createConverter(MongoCustomConversions.BigDecimalRepresentation.DECIMAL128); + + BigDecimalContainer container = new BigDecimalContainer(); + container.value = BigDecimal.valueOf(2.5d); + container.map = Collections.singletonMap("foo", container.value); + + org.bson.Document document = new org.bson.Document(); + converter.write(container, document); + + assertThat(document.get("value")).isInstanceOf(Decimal128.class); + assertThat(((org.bson.Document) document.get("map")).get("foo")).isInstanceOf(Decimal128.class); + } + + @Test // GH-3444 + void usesStringNumericFormat() { + + MappingMongoConverter converter = createConverter(MongoCustomConversions.BigDecimalRepresentation.STRING); + + BigDecimalContainer container = new BigDecimalContainer(); + container.value = BigDecimal.valueOf(2.5d); + container.map = Collections.singletonMap("foo", container.value); + + org.bson.Document document = new org.bson.Document(); + converter.write(container, document); + + assertThat(document).containsEntry("value", "2.5"); + assertThat(document).containsEntry("map.foo", "2.5"); + } + + private MappingMongoConverter createConverter( + MongoCustomConversions.BigDecimalRepresentation bigDecimalRepresentation) { + + MongoCustomConversions conversions = MongoCustomConversions.create( + it -> it.registerConverter(new ByteBufferToDoubleHolderConverter()).bigDecimal(bigDecimalRepresentation)); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(context); + mappingContext.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); + mappingContext.afterPropertiesSet(); + + mappingContext.getPersistentEntity(Address.class); + + MappingMongoConverter converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(conversions); + converter.afterPropertiesSet(); + + return converter; + } + + org.bson.Document write(Object source) { + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + return target; + } + + static class WithVector { + + Vector embeddings; + } + + static class GenericType { + T content; + } + + static class ClassWithEnumProperty { + + SampleEnum sampleEnum; + List enums; + EnumSet enumSet; + EnumMap enumMap; + } + + enum SampleEnum { + FIRST { + @Override + void method() {} + }, + SECOND { + @Override + void method() {} + }; + + abstract void method(); + } + + interface InterfaceType { + + } + + static class Address implements InterfaceType { + + @Field("s") String street; + String city; + + public String getStreet() { + return street; + } + + public String getCity() { + return city; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(street, address.street) && Objects.equals(city, address.city); + } + + @Override + public int hashCode() { + return Objects.hash(street, city); + } + } + + interface Contact { + + } + + static class Person implements Contact { + + @Id String id; + + Date birthDate; + + @Field("foo") String firstname; + String lastname; + + Set
          addresses; + Address address; + + Person() { + + } + + @PersistenceConstructor + public Person(Set
          addresses) { + this.addresses = addresses; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(birthDate, person.birthDate) + && Objects.equals(firstname, person.firstname) && Objects.equals(lastname, person.lastname) + && Objects.equals(addresses, person.addresses); + } + + @Override + public int hashCode() { + return Objects.hash(id, birthDate, firstname, lastname, addresses); + } + } + + interface PersonProjection { + + LocalDate getBirthDate(); + + String getFirstname(); + } + + interface WithNestedProjection { + + Set getAddresses(); + } + + interface WithNestedInterfaceProjection { + String getFirstname(); + + AddressProjection getAddress(); + } + + interface WithNestedDtoProjection { + String getFirstname(); + + AddressDto getAddress(); + } + + interface ProjectionWithNestedEntity { + + Set
          getAddresses(); + } + + interface AddressProjection { + + String getStreet(); + } + + class AddressDto { + + String street; + + public String getStreet() { + return street; + } + + public void setStreet(String street) { + this.street = street; + } + } + + static class PersonDto { + + LocalDate birthDate; + + @Field("foo") String firstname; + String lastname; + + public PersonDto(LocalDate birthDate, String firstname, String lastname) { + this.birthDate = birthDate; + this.firstname = firstname; + this.lastname = lastname; + } + + public LocalDate getBirthDate() { + return birthDate; + } + + public String getFirstname() { + return firstname; + } + + public String getLastname() { + return lastname; + } + } + + static class ClassWithSortedMap { + SortedMap map; + } + + static class ClassWithMapProperty { + Map map; + Map autoInitMap = Collections.singletonMap("spring", "data"); + Map> mapOfLists; + Map mapOfObjects; + Map mapOfStrings; + Map mapOfPersons; + TreeMap treeMapOfPersons; + + public Map getMap() { + return map; + } + + public Map getAutoInitMap() { + return this.autoInitMap; + } + } + + static class ClassWithNestedMaps { + Map>> nestedMaps; + } + + static class BirthDateContainer { + Date birthDate; + } + + static class BigDecimalContainer { + BigDecimal value; + Map map; + List collection; + } + + static class CollectionWrapper { + List contacts; + List> strings; + List> listOfMaps; + Set contactsSet; + List autoInitList = Collections.singletonList("spring"); + + public List getContacts() { + return contacts; + } + + public Set getContactsSet() { + return contactsSet; + } + + public List getAutoInitList() { + return autoInitList; + } + } + + static class LocaleWrapper { + Locale locale; + } + + static class ClassWithBigIntegerId { + @Id BigInteger id; + } + + static class A { + + String valueType; + T value; + + A(T value) { + this.valueType = value.getClass().getName(); + this.value = value; + } + } + + static class ClassWithIntId { + + @Id int id; + } + + static class DefaultedConstructorArgument { + + String foo; + int bar; + double foobar; + + DefaultedConstructorArgument(String foo, @Value("#root.something ?: -1") int bar, double foobar) { + this.foo = foo; + this.bar = bar; + this.foobar = foobar; + } + } + + static class Item { + List attributes; + } + + static class Attribute { + String key; + Object value; + } + + static class Outer { + + class Inner { + String value; + } + + Inner inner; + } + + static class DBRefWrapper { + + DBRef ref; + List refs; + Map refMap; + Map personMap; + } + + static class URLWrapper { + URL url; + } + + static class ClassWithComplexId { + + @Id ComplexId complexId; + } + + static class ComplexId { + + Long innerId; + + static ComplexId of(Long value) { + + ComplexId id = new ComplexId(); + id.innerId = value; + return id; + } + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComplexId complexId = (ComplexId) o; + return Objects.equals(innerId, complexId.innerId); + } + + @Override + public int hashCode() { + return Objects.hash(innerId); + } + } + + static class TypWithCollectionConstructor { + + List attributes; + + public TypWithCollectionConstructor(List attributes) { + this.attributes = attributes; + } + } + + @TypeAlias("_") + static class Aliased { + String name; + } + + static class ThrowableWrapper { + + Throwable throwable; + } + + @Document + static class PrimitiveContainer { + + @Field("property") private int m_property; + + @PersistenceConstructor + public PrimitiveContainer(@Value("#root.property") int a_property) { + m_property = a_property; + } + + public int property() { + return m_property; + } + } + + @Document + static class ObjectContainer { + + @Field("property") private PrimitiveContainer m_property; @PersistenceConstructor public ObjectContainer(@Value("#root.property") PrimitiveContainer a_property) { m_property = a_property; } - public PrimitiveContainer property() { - return m_property; + public PrimitiveContainer property() { + return m_property; + } + } + + class ClassWithGeoBox { + + Box box; + } + + class ClassWithGeoCircle { + + Circle circle; + } + + class ClassWithGeoSphere { + + Sphere sphere; + } + + class ClassWithGeoPolygon { + + Polygon polygon; + } + + class ClassWithGeoShape { + + Shape shape; + } + + class ClassWithTextScoreProperty { + + @TextScore Float score; + } + + class ClassWithExplicitlyNamedDBRefProperty { + + @Field("explict-name-for-db-ref") // + @org.springframework.data.mongodb.core.mapping.DBRef // + ClassWithIntId dbRefProperty; + + public ClassWithIntId getDbRefProperty() { + return dbRefProperty; + } + } + + static class RootForClassWithExplicitlyRenamedIdField { + + @Id String id; + ClassWithExplicitlyRenamedField nested; + } + + static class ClassWithExplicitlyRenamedField { + + @Field("id") String id; + } + + static class RootForClassWithNamedIdField { + + String id; + ClassWithNamedIdField nested; + } + + static class ClassWithNamedIdField { + + String id; + } + + static class ClassWithAnnotatedIdField { + + @Id String key; + } + + static class TypeWithLocalDateTime { + + LocalDateTime date; + + TypeWithLocalDateTime() { + this.date = LocalDateTime.now(); + } + } + + static class TypeWithOptional { + + Optional string = Optional.empty(); + Optional localDateTime = Optional.empty(); + } + + static class ClassWithMapUsingEnumAsKey { + + enum FooBarEnum { + FOO, BAR + } + + Map map; + } + + @WritingConverter + static class FooBarEnumToStringConverter implements Converter { + + @Override + public String convert(FooBarEnum source) { + + if (source == null) { + return null; + } + + return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value"; + } + } + + @ReadingConverter + static class StringToFooNumConverter implements Converter { + + @Override + public FooBarEnum convert(String source) { + + if (source == null) { + return null; + } + + if ("foo-enum-value".equals(source)) { + return FooBarEnum.FOO; + } + if ("bar-enum-value".equals(source)) { + return FooBarEnum.BAR; + } + + throw new ConversionNotSupportedException(source, String.class, null); + } + } + + static class TypeWithPropertyInNestedField { + @Field("nested.sample") String sample; + } + + static class TypeWithMapOfLongValues { + Map map; + } + + static class WithArrayInConstructor { + + final String[] array; + + public WithArrayInConstructor(String[] array) { + this.array = array; + } + } + + static class WithArrays { + String[] arrayOfStrings; + byte[] arrayOfPrimitiveBytes; + } + + // DATAMONGO-1898 + + // DATACMNS-1278 + static interface SomeInterface {} + + static enum InterfacedEnum implements SomeInterface { + INSTANCE; + } + + static class DocWithInterfacedEnum { + SomeInterface property; + } + + // DATAMONGO-1904 + + static class WithNestedLists { + float[][][] nestedFloats; + } + + static class ImmutableObject { + + final String id; + final String name; + final boolean witherUsed; + + private ImmutableObject(String id) { + this.id = id; + this.name = null; + this.witherUsed = false; + } + + private ImmutableObject(String id, String name, boolean witherUsed) { + this.id = id; + this.name = name; + this.witherUsed = witherUsed; + } + + public ImmutableObject() { + this.id = null; + this.name = null; + witherUsed = false; + } + + public ImmutableObject withId(String id) { + return new ImmutableObject(id, name, true); + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public boolean isWitherUsed() { + return witherUsed; + } + } + + static class ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod { + + final @Id String id; + String value; + + public ImmutableObjectWithIdConstructorPropertyAndNoIdWitherMethod(String id) { + this.id = id; + } + } + + // DATAMONGO-2135 + static class SomeItem { + + String itemKey; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeItem someItem = (SomeItem) o; + return Objects.equals(itemKey, someItem.itemKey); + } + + @Override + public int hashCode() { + return Objects.hash(itemKey); + } + } + + static class Order { + Collection items = new ArrayList<>(); + } + + static class WithExplicitTargetTypes { + + @Field(targetType = FieldType.SCRIPT) // + String script; + + @Field(targetType = FieldType.SCRIPT) // + List scripts; + + @Field(targetType = FieldType.DECIMAL128) // + BigDecimal bigDecimal; + + @Field(targetType = FieldType.DECIMAL128) + BigInteger bigInteger; + + @Field(targetType = FieldType.INT64) // + Date dateAsLong; + + @Field(targetType = FieldType.DATE_TIME) // + Long longAsDate; + + @Field(targetType = FieldType.BOOLEAN) // + String stringAsBoolean; + + @Field(targetType = FieldType.OBJECT_ID) // + Date dateAsObjectId; + } + + static class WrapperAroundWithUnwrapped { + + String someValue; + WithNullableUnwrapped nullableEmbedded; + WithEmptyUnwrappedType emptyEmbedded; + WithPrefixedNullableUnwrapped prefixedEmbedded; + } + + static class WithNullableUnwrapped { + + String id; + + @Unwrapped.Nullable EmbeddableType embeddableValue; + } + + static class WithUnwrappedConstructor { + + private final String id; + + private final @Unwrapped.Empty EmbeddableType embeddableValue; + + public WithUnwrappedConstructor(String id, EmbeddableType embeddableValue) { + this.id = id; + this.embeddableValue = embeddableValue; + } + } + + static class WithPrefixedNullableUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") EmbeddableType embeddableValue; + } + + static class WithEmptyUnwrappedType { + + String id; + + @Unwrapped.Empty EmbeddableType embeddableValue; + } + + static class EmbeddableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Transient // + String transientValue; + + Address address; + + @Override + public boolean equals(Object o) { + + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + EmbeddableType that = (EmbeddableType) o; + return Objects.equals(stringValue, that.stringValue) && Objects.equals(listValue, that.listValue) + && Objects.equals(atFieldAnnotatedValue, that.atFieldAnnotatedValue) + && Objects.equals(transientValue, that.transientValue) && Objects.equals(address, that.address); + } + + @Override + public int hashCode() { + return Objects.hash(stringValue, listValue, atFieldAnnotatedValue, transientValue, address); + } + } + + static class ReturningAfterConvertCallback implements AfterConvertCallback { + + @Override + public Person onAfterConvert(Person entity, org.bson.Document document, String collection) { + + return entity; + } + } + + static class SubTypeOfGenericType extends GenericType { + + } + + @ReadingConverter + static class GenericTypeConverter implements Converter> { + + @Override + public GenericType convert(org.bson.Document source) { + + GenericType target = new GenericType<>(); + target.content = source.get("value"); + return target; + } + } + + @ReadingConverter + static class SubTypeOfGenericTypeConverter implements Converter { + + @Override + public SubTypeOfGenericType convert(org.bson.Document source) { + + SubTypeOfGenericType target = new SubTypeOfGenericType(); + target.content = source.getString("value") + "_s"; + return target; } } - class ClassWithGeoBox { + @WritingConverter + static class TypeImplementingMapToDocumentConverter implements Converter { - Box box; + @Nullable + @Override + public org.bson.Document convert(TypeImplementingMap source) { + return new org.bson.Document("1st", source.val1).append("2nd", source.val2); + } } - class ClassWithGeoCircle { + @ReadingConverter + static class DocumentToTypeImplementingMapConverter implements Converter { - Circle circle; + @Nullable + @Override + public TypeImplementingMap convert(org.bson.Document source) { + return new TypeImplementingMap(source.getString("1st"), source.getInteger("2nd")); + } } - class ClassWithGeoSphere { + @ReadingConverter + public static class MongoSimpleTypeConverter implements Converter { - Sphere sphere; + @Override + public byte[] convert(Binary source) { + return source.getData(); + } } - class ClassWithGeoPolygon { + static class TypeWrappingTypeImplementingMap { - Polygon polygon; + String id; + TypeImplementingMap typeImplementingMap; } - class ClassWithGeoShape { + static class TypeImplementingMap implements Map { - Shape shape; + String val1; + int val2; + + TypeImplementingMap(String val1, int val2) { + this.val1 = val1; + this.val2 = val2; + } + + @Override + public int size() { + return 0; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public boolean containsKey(Object key) { + return false; + } + + @Override + public boolean containsValue(Object value) { + return false; + } + + @Override + public String get(Object key) { + return null; + } + + @Nullable + @Override + public String put(String key, String value) { + return null; + } + + @Override + public String remove(Object key) { + return null; + } + + @Override + public void putAll(@NonNull Map m) { + + } + + @Override + public void clear() { + + } + + @NonNull + @Override + public Set keySet() { + return null; + } + + @NonNull + @Override + public Collection values() { + return null; + } + + @NonNull + @Override + public Set> entrySet() { + return null; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + TypeImplementingMap that = (TypeImplementingMap) o; + return val2 == that.val2 && Objects.equals(val1, that.val1); + } + + @Override + public int hashCode() { + return Objects.hash(val1, val2); + } } - class ClassWithTextScoreProperty { + static class WithRawDocumentProperties { - @TextScore Float score; + String id; + org.bson.Document raw; + List listOfRaw; } - class ClassWithExplicitlyNamedDBRefProperty { + static class WithFieldWrite { - @Field("explict-name-for-db-ref") // - @org.springframework.data.mongodb.core.mapping.DBRef // - ClassWithIntId dbRefProperty; + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Integer writeNonNull; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Integer writeAlways; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Person writeNonNullPerson; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Person writeAlwaysPerson; + + @org.springframework.data.mongodb.core.mapping.DBRef + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Person writeNonNullPersonDBRef; + + @org.springframework.data.mongodb.core.mapping.DBRef + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Person writeAlwaysPersonDBRef; - public ClassWithIntId getDbRefProperty() { - return dbRefProperty; - } } - static class RootForClassWithExplicitlyRenamedIdField { + static class WithValueConverters { - @Id String id; - ClassWithExplicitlyRenamedField nested; + @ValueConverter(Converter1.class) String converterWithDefaultCtor; + + @ValueConverter(Converter2.class) String converterEnum; + + String viaRegisteredConverter; } - static class ClassWithExplicitlyRenamedField { + static class WithContextValueConverters { - @Field("id") String id; + @ValueConverter(Converter3.class) String converterBean; + + String viaRegisteredConverter; } - static class RootForClassWithNamedIdField { + static class Converter3 implements MongoValueConverter { - String id; - ClassWithNamedIdField nested; + private final SomeDependency someDependency; + + public Converter3(@Autowired SomeDependency someDependency) { + this.someDependency = someDependency; + } + + @Override + public Object read(org.bson.Document value, MongoConversionContext context) { + return value.get("ooo"); + } + + @Override + public org.bson.Document write(Object value, MongoConversionContext context) { + return new org.bson.Document("ooo", value + " - " + someDependency.toString()); + } } - static class ClassWithNamedIdField { + static class SomeDependency { - String id; } - static class ClassWithAnnotatedIdField { + enum Converter2 implements MongoValueConverter { - @Id String key; + INSTANCE; + + @Nullable + @Override + public String read(@Nullable org.bson.Document value, MongoConversionContext context) { + return value.getString("bar"); + } + + @Nullable + @Override + public org.bson.Document write(@Nullable String value, MongoConversionContext context) { + return new org.bson.Document("bar", value); + } } - static class TypeWithLocalDateTime { + static class Converter1 implements MongoValueConverter { - LocalDateTime date; + @Nullable + @Override + public String read(@Nullable org.bson.Document value, MongoConversionContext context) { + return value.getString("foo"); + } - TypeWithLocalDateTime() { - this.date = LocalDateTime.now(); + @Nullable + @Override + public org.bson.Document write(@Nullable String value, MongoConversionContext context) { + return new org.bson.Document("foo", value); } } - static class TypeWithOptional { + interface BookProjection { - Optional string = Optional.empty(); - Optional localDateTime = Optional.empty(); + @Value("#{target.name + ' by ' + target.author.firstName + ' ' + target.author.lastName}") + String getName(); } - static class ClassWithMapUsingEnumAsKey { + static class AuthorOnly { - enum FooBarEnum { - FOO, BAR + final AuthorNameOnly author; + + public AuthorOnly(AuthorNameOnly author) { + this.author = author; } - Map map; + public AuthorNameOnly getAuthor() { + return author; + } } - @WritingConverter - static class FooBarEnumToStringConverter implements Converter { + static class AuthorNameOnly { - @Override - public String convert(FooBarEnum source) { + final String firstName; - if (source == null) { - return null; - } + final String lastName; - return FooBarEnum.FOO.equals(source) ? "foo-enum-value" : "bar-enum-value"; + public AuthorNameOnly(String firstName, String lastName) { + this.firstName = firstName; + this.lastName = lastName; + } + + public String getFirstName() { + return firstName; + } + + public String getLastName() { + return lastName; } } - @ReadingConverter - static class StringToFooNumConverter implements Converter { + static class Book { - @Override - public FooBarEnum convert(String source) { + @Id String id; - if (source == null) { - return null; - } + String name; - if ("foo-enum-value".equals(source)) { - return FooBarEnum.FOO; + Author author = new Author(); + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Author getAuthor() { + return author; + } + + public void setAuthor(Author author) { + this.author = author; + } + } + + static class Author { + + @Id String id; + + String firstName; + + String lastName; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getFirstName() { + return firstName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public String getLastName() { + return lastName; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } + } + + static class Cyclic { + + @Id String id; + String value; + Cyclic cycle; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public Cyclic getCycle() { + return cycle; + } + + public void setCycle(Cyclic cycle) { + this.cycle = cycle; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; } - if ("bar-enum-value".equals(source)) { - return FooBarEnum.BAR; + if (o == null || getClass() != o.getClass()) { + return false; } + Cyclic cyclic = (Cyclic) o; + return Objects.equals(id, cyclic.id) && Objects.equals(value, cyclic.value) + && Objects.equals(cycle, cyclic.cycle); + } - throw new ConversionNotSupportedException(source, String.class, null); + @Override + public int hashCode() { + return Objects.hash(id, value, cycle); } } - static class TypeWithPropertyInNestedField { - @Field("nested.sample") String sample; + static class WrapperForTypeWithPropertyHavingDotsInFieldName { + + WithPropertyHavingDotsInFieldName nested; } - static class TypeWithMapOfLongValues { - Map map; + static class WithPropertyHavingDotsInFieldName { + + @Field(name = "field.name.with.dots", nameType = Type.KEY) String value; } - @RequiredArgsConstructor - static class WithArrayInConstructor { + static class ComplexIdAndFieldAnnotation { - final String[] array; + @Field("_id") // + ComplexId id; + String value; + } + + static class ComplexIdAndMongoIdAnnotation { + + @MongoId // + ComplexId id; + String value; + } + + static class ComplexIdAndIdAnnotation { + + @Id // + ComplexId id; + String value; + } + + static class ComplexIdAndNoAnnotation { + + ComplexId id; + String value; + } + + @ReadingConverter + static class ByteBufferToDoubleHolderConverter implements Converter { + @Override + public DoubleHolder convert(ByteBuffer source) { + return new DoubleHolder(source.getDouble()); + } + } + + record DoubleHolder(double number) { + + } + + static class WithDoubleHolder { + DoubleHolder number; + } + + static class DoubleHolderDto { + DoubleHolder number; + + public DoubleHolderDto(DoubleHolder number) { + this.number = number; + } + } + + static class WithRenamedIdPropertyAndAnotherPropertyNamedId { + + @Id String abc; + String id; } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java index 0f1f174651..a1c2fc0897 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,22 +19,26 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - +import java.util.Arrays; +import java.util.Objects; import java.util.UUID; +import org.bson.BinaryVector; import org.bson.types.Binary; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.domain.Vector; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.MongoVector; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.util.ObjectUtils; /** * Integration tests for {@link MongoConverters}. @@ -43,17 +47,17 @@ * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoTemplateExtension.class) public class MongoConvertersIntegrationTests { static final String COLLECTION = "converter-tests"; - @Autowired MongoOperations template; + @Template // + static MongoTestTemplate template; - @Before + @BeforeEach public void setUp() { - template.dropCollection(COLLECTION); + template.flush(COLLECTION); } @Test // DATAMONGO-422 @@ -102,6 +106,80 @@ public void shouldReadBinaryType() { assertThat(template.findOne(query(where("id").is(wbd.id)), WithBinaryDataType.class)).isEqualTo(wbd); } + @Test // GH-4706 + public void shouldReadAndWriteVectors() { + + WithVectors source = new WithVectors(); + source.vector = Vector.of(1.1, 2.2, 3.3); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4706 + public void shouldReadAndWriteFloatVectors() { + + WithVectors source = new WithVectors(); + source.vector = Vector.of(1.1f, 2.2f, 3.3f); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + // top-level arrays are converted into doubles by MongoDB with all their conversion imprecisions + assertThat(loaded.vector.getClass().getName()).contains("DoubleVector"); + assertThat(loaded.vector).isNotEqualTo(source.vector); + } + + @Test // GH-4706 + public void shouldReadAndWriteBinFloat32Vectors() { + + WithVectors source = new WithVectors(); + source.binVector = BinaryVector.floatVector(new float[] { 1.1f, 2.2f, 3.3f }); + source.vector = MongoVector.ofFloat(new float[] { 1.1f, 2.2f, 3.3f }); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + assertThat(loaded.vector).isEqualTo(source.vector); + assertThat(loaded.binVector).isEqualTo(source.binVector); + assertThat(loaded.binVector).isEqualTo(source.vector.getSource()); + } + + @Test // GH-4706 + public void shouldReadAndWriteBinInt8Vectors() { + + WithVectors source = new WithVectors(); + source.binVector = BinaryVector.int8Vector(new byte[] { 1, 2, 3 }); + source.vector = MongoVector.ofInt8(new byte[] { 1, 2, 3 }); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + assertThat(loaded.vector).isEqualTo(source.vector); + assertThat(loaded.binVector).isEqualTo(source.binVector); + assertThat(loaded.binVector).isEqualTo(source.vector.getSource()); + } + + @Test // GH-4706 + public void shouldReadAndWriteBinPackedVectors() { + + WithVectors source = new WithVectors(); + source.binVector = BinaryVector.packedBitVector(new byte[] { 1, 2, 3 }, (byte) 1); + source.vector = MongoVector.of(source.binVector); + + template.save(source); + + WithVectors loaded = template.findOne(query(where("id").is(source.id)), WithVectors.class); + + assertThat(loaded.vector).isEqualTo(source.vector); + assertThat(loaded.binVector).isEqualTo(source.binVector); + } + @Document(COLLECTION) static class Wrapper { @@ -109,19 +187,123 @@ static class Wrapper { UUID uuid; } - @Data + @Document(COLLECTION) + static class WithVectors { + + ObjectId id; + Vector vector; + BinaryVector binVector; + + @Override + public boolean equals(Object o) { + if (!(o instanceof WithVectors that)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(id, that.id)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(vector, that.vector)) { + return false; + } + return ObjectUtils.nullSafeEquals(binVector, that.binVector); + } + + @Override + public int hashCode() { + return ObjectUtils.nullSafeHash(id, vector, binVector); + } + } + @Document(COLLECTION) static class WithBinaryDataInArray { @Id String id; byte[] data; + + public String getId() { + return this.id; + } + + public byte[] getData() { + return this.data; + } + + public void setId(String id) { + this.id = id; + } + + public void setData(byte[] data) { + this.data = data; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithBinaryDataInArray that = (WithBinaryDataInArray) o; + return Objects.equals(id, that.id) && Arrays.equals(data, that.data); + } + + @Override + public int hashCode() { + int result = Objects.hash(id); + result = 31 * result + Arrays.hashCode(data); + return result; + } + + public String toString() { + return "MongoConvertersIntegrationTests.WithBinaryDataInArray(id=" + this.getId() + ", data=" + + java.util.Arrays.toString(this.getData()) + ")"; + } } - @Data @Document(COLLECTION) static class WithBinaryDataType { @Id String id; Binary data; + + public WithBinaryDataType() {} + + public String getId() { + return this.id; + } + + public Binary getData() { + return this.data; + } + + public void setId(String id) { + this.id = id; + } + + public void setData(Binary data) { + this.data = data; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithBinaryDataType that = (WithBinaryDataType) o; + return Objects.equals(id, that.id) && Objects.equals(data, that.data); + } + + @Override + public int hashCode() { + return Objects.hash(id, data); + } + + public String toString() { + return "MongoConvertersIntegrationTests.WithBinaryDataType(id=" + this.getId() + ", data=" + this.getData() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java index 753f033caa..72cd807d3b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoConvertersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,23 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.math.BigDecimal; +import java.net.URI; +import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.Currency; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; -import org.junit.Test; +import org.assertj.core.data.TemporalUnitLessThanOffset; +import org.bson.BsonTimestamp; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.support.ConfigurableConversionService; +import org.springframework.core.convert.support.DefaultConversionService; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Point; @@ -32,15 +40,15 @@ import org.springframework.data.mongodb.core.convert.MongoConverters.AtomicIntegerToIntegerConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.AtomicLongToLongConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.BigDecimalToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.BsonTimestampToInstantConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.CurrencyToStringConverter; +import org.springframework.data.mongodb.core.convert.MongoConverters.DocumentToStringConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.IntegerToAtomicIntegerConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.LongToAtomicLongConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToBigDecimalConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.StringToCurrencyConverter; import org.springframework.data.mongodb.core.geo.Sphere; -import org.bson.Document; - /** * Unit tests for {@link MongoConverters}. * @@ -48,101 +56,130 @@ * @author Thomas Darimont * @author Christoph Strobl */ -public class MongoConvertersUnitTests { +class MongoConvertersUnitTests { @Test - public void convertsBigDecimalToStringAndBackCorrectly() { + void convertsBigDecimalToStringAndBackCorrectly() { BigDecimal bigDecimal = BigDecimal.valueOf(254, 1); String value = BigDecimalToStringConverter.INSTANCE.convert(bigDecimal); - assertThat(value, is("25.4")); + assertThat(value).isEqualTo("25.4"); BigDecimal reference = StringToBigDecimalConverter.INSTANCE.convert(value); - assertThat(reference, is(bigDecimal)); + assertThat(reference).isEqualTo(bigDecimal); } @Test // DATAMONGO-858 - public void convertsBoxToDocumentAndBackCorrectly() { + void convertsBoxToDocumentAndBackCorrectly() { Box box = new Box(new Point(1, 2), new Point(3, 4)); Document document = GeoConverters.BoxToDocumentConverter.INSTANCE.convert(box); Shape shape = GeoConverters.DocumentToBoxConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) box)); + assertThat(shape).isEqualTo(box); } @Test // DATAMONGO-858 - public void convertsCircleToDocumentAndBackCorrectly() { + void convertsCircleToDocumentAndBackCorrectly() { Circle circle = new Circle(new Point(1, 2), 3); Document document = GeoConverters.CircleToDocumentConverter.INSTANCE.convert(circle); Shape shape = GeoConverters.DocumentToCircleConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) circle)); + assertThat(shape).isEqualTo(circle); } @Test // DATAMONGO-858 - public void convertsPolygonToDocumentAndBackCorrectly() { + void convertsPolygonToDocumentAndBackCorrectly() { Polygon polygon = new Polygon(new Point(1, 2), new Point(2, 3), new Point(3, 4), new Point(5, 6)); Document document = GeoConverters.PolygonToDocumentConverter.INSTANCE.convert(polygon); Shape shape = GeoConverters.DocumentToPolygonConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) polygon)); + assertThat(shape).isEqualTo(polygon); } @Test // DATAMONGO-858 - public void convertsSphereToDocumentAndBackCorrectly() { + void convertsSphereToDocumentAndBackCorrectly() { Sphere sphere = new Sphere(new Point(1, 2), 3); Document document = GeoConverters.SphereToDocumentConverter.INSTANCE.convert(sphere); org.springframework.data.geo.Shape shape = GeoConverters.DocumentToSphereConverter.INSTANCE.convert(document); - assertThat(shape, is((org.springframework.data.geo.Shape) sphere)); + assertThat(shape).isEqualTo(sphere); } @Test // DATAMONGO-858 - public void convertsPointToListAndBackCorrectly() { + void convertsPointToListAndBackCorrectly() { Point point = new Point(1, 2); Document document = GeoConverters.PointToDocumentConverter.INSTANCE.convert(point); org.springframework.data.geo.Point converted = GeoConverters.DocumentToPointConverter.INSTANCE.convert(document); - assertThat(converted, is((org.springframework.data.geo.Point) point)); + assertThat(converted).isEqualTo(point); } @Test // DATAMONGO-1372 - public void convertsCurrencyToStringCorrectly() { - assertThat(CurrencyToStringConverter.INSTANCE.convert(Currency.getInstance("USD")), is("USD")); + void convertsCurrencyToStringCorrectly() { + assertThat(CurrencyToStringConverter.INSTANCE.convert(Currency.getInstance("USD"))).isEqualTo("USD"); } @Test // DATAMONGO-1372 - public void convertsStringToCurrencyCorrectly() { - assertThat(StringToCurrencyConverter.INSTANCE.convert("USD"), is(Currency.getInstance("USD"))); + void convertsStringToCurrencyCorrectly() { + assertThat(StringToCurrencyConverter.INSTANCE.convert("USD")).isEqualTo(Currency.getInstance("USD")); } @Test // DATAMONGO-1416 - public void convertsAtomicLongToLongCorrectly() { - assertThat(AtomicLongToLongConverter.INSTANCE.convert(new AtomicLong(100L)), is(100L)); + void convertsAtomicLongToLongCorrectly() { + assertThat(AtomicLongToLongConverter.INSTANCE.convert(new AtomicLong(100L))).isEqualTo(100L); } @Test // DATAMONGO-1416 - public void convertsAtomicIntegerToIntegerCorrectly() { - assertThat(AtomicIntegerToIntegerConverter.INSTANCE.convert(new AtomicInteger(100)), is(100)); + void convertsAtomicIntegerToIntegerCorrectly() { + assertThat(AtomicIntegerToIntegerConverter.INSTANCE.convert(new AtomicInteger(100))).isEqualTo(100); } @Test // DATAMONGO-1416 - public void convertsLongToAtomicLongCorrectly() { - assertThat(LongToAtomicLongConverter.INSTANCE.convert(100L), is(instanceOf(AtomicLong.class))); + void convertsLongToAtomicLongCorrectly() { + assertThat(LongToAtomicLongConverter.INSTANCE.convert(100L)).isInstanceOf(AtomicLong.class); } @Test // DATAMONGO-1416 - public void convertsIntegerToAtomicIntegerCorrectly() { - assertThat(IntegerToAtomicIntegerConverter.INSTANCE.convert(100), is(instanceOf(AtomicInteger.class))); + void convertsIntegerToAtomicIntegerCorrectly() { + assertThat(IntegerToAtomicIntegerConverter.INSTANCE.convert(100)).isInstanceOf(AtomicInteger.class); + } + + @Test // DATAMONGO-2113 + void convertsBsonTimestampToInstantCorrectly() { + + assertThat(BsonTimestampToInstantConverter.INSTANCE.convert(new BsonTimestamp(6615900307735969796L))) + .isCloseTo(Instant.ofEpochSecond(1540384327), new TemporalUnitLessThanOffset(100, ChronoUnit.MILLIS)); + } + + @Test // DATAMONGO-2210 + void convertsUrisToString() { + + MongoCustomConversions conversions = new MongoCustomConversions(); + + assertThat(conversions.getSimpleTypeHolder().isSimpleType(URI.class)).isTrue(); + + ConfigurableConversionService conversionService = new DefaultConversionService(); + conversions.registerConvertersIn(conversionService); + + assertThat(conversionService.convert(URI.create("/segment"), String.class)).isEqualTo("/segment"); + assertThat(conversionService.convert("/segment", URI.class)).isEqualTo(URI.create("/segment")); + } + + @Test // GH-3546 + void convertsDocumentWithUUidToString() { + + UUID uuid = UUID.randomUUID(); + assertThat(DocumentToStringConverter.INSTANCE.convert(new Document("_id", uuid))) + .isEqualTo("{\"_id\": \"" + uuid.toString() + "\"}"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoCustomConversionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoCustomConversionsUnitTests.java new file mode 100644 index 0000000000..9382b835ea --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoCustomConversionsUnitTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.time.ZonedDateTime; +import java.util.Collections; +import java.util.Date; + +import org.junit.jupiter.api.Test; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.core.convert.QueryMapperUnitTests.Foo; + +/** + * Unit tests for {@link MongoCustomConversions}. + * + * @author Christoph Strobl + */ +class MongoCustomConversionsUnitTests { + + @Test // DATAMONGO-2349 + void nonAnnotatedConverterForJavaTimeTypeShouldOnlyBeRegisteredAsReadingConverter() { + + MongoCustomConversions conversions = new MongoCustomConversions( + Collections.singletonList(new DateToZonedDateTimeConverter())); + + assertThat(conversions.hasCustomReadTarget(Date.class, ZonedDateTime.class)).isTrue(); + assertThat(conversions.hasCustomWriteTarget(Date.class)).isFalse(); + } + + @Test // GH-3596 + void propertyValueConverterRegistrationWorksAsExpected() { + + PersistentProperty persistentProperty = mock(PersistentProperty.class); + PersistentEntity owner = mock(PersistentEntity.class); + when(persistentProperty.getName()).thenReturn("name"); + when(persistentProperty.getOwner()).thenReturn(owner); + when(owner.getType()).thenReturn(Foo.class); + + MongoCustomConversions conversions = MongoCustomConversions.create(config -> { + + config.configurePropertyConversions( + registry -> registry.registerConverter(Foo.class, "name", mock(PropertyValueConverter.class))); + }); + + assertThat(conversions.getPropertyValueConversions().hasValueConverter(persistentProperty)).isTrue(); + } + + @Test // GH-4390 + void doesNotReturnConverterForNativeTimeTimeIfUsingDriverCodec() { + + MongoCustomConversions conversions = MongoCustomConversions.create(config -> { + config.useNativeDriverJavaTimeCodecs(); + }); + + assertThat(conversions.getCustomWriteTarget(Date.class)).isEmpty(); + } + + static class DateToZonedDateTimeConverter implements Converter { + + @Override + public ZonedDateTime convert(Date source) { + return ZonedDateTime.now(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java index 06cf669fb0..16b198004c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,138 +15,119 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.springframework.data.domain.Example.*; import static org.springframework.data.domain.ExampleMatcher.*; import static org.springframework.data.mongodb.core.DocumentTestUtils.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import org.bson.conversions.Bson; -import org.hamcrest.core.Is; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Example; import org.springframework.data.domain.ExampleMatcher; -import org.springframework.data.domain.ExampleMatcher.*; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.convert.QueryMapperUnitTests.ClassWithGeoTypes; import org.springframework.data.mongodb.core.convert.QueryMapperUnitTests.WithDBRef; +import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.UntypedExampleMatcher; -import org.springframework.data.mongodb.test.util.IsBsonObject; import org.springframework.data.util.TypeInformation; /** * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class MongoExampleMapperUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoExampleMapperUnitTests { - MongoExampleMapper mapper; - MongoMappingContext context; - MappingMongoConverter converter; + private MongoExampleMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; - @Mock MongoDbFactory factory; - - @Before - public void setUp() { + @BeforeEach + void setUp() { this.context = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.afterPropertiesSet(); this.mapper = new MongoExampleMapper(converter); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { FlatDocument probe = new FlatDocument(); probe.id = "steelheart"; - IsBsonObject expected = isBsonObject().containing("_id", "steelheart"); - - assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("_id", "steelheart"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { FlatDocument probe = new FlatDocument(); probe.id = "steelheart"; probe.stringValue = "firefight"; probe.intValue = 100; - IsBsonObject expected = isBsonObject().// - containing("_id", "steelheart").// - containing("stringValue", "firefight").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("_id", "steelheart") // + .containsEntry("stringValue", "firefight") // + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; probe.intValue = 100; - IsBsonObject expected = isBsonObject().// - containing("stringValue", "firefight").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) // + .containsEntry("stringValue", "firefight") // + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { FlatDocument probe = new FlatDocument(); probe.listOfString = Arrays.asList("Prof", "Tia", "David"); List list = (Arrays.asList("Prof", "Tia", "David")); - IsBsonObject expected = isBsonObject().// - containing("listOfString", list); - - assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("listOfString", list); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "Mitosis"; - IsBsonObject expected = isBsonObject().containing("custom_field_name", "Mitosis"); - - assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("custom_field_name", "Mitosis"); } @Test // DATAMONGO-1245 - public void typedExampleShouldContainTypeRestriction() { + void typedExampleShouldContainTypeRestriction() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -155,25 +136,23 @@ public void typedExampleShouldContainTypeRestriction() { org.bson.Document document = mapper.getMappedExample(Example.of(probe), context.getRequiredPersistentEntity(WrapperDocument.class)); - assertThat(document, - isBsonObject().containing("_class", new org.bson.Document("$in", new String[] { probe.getClass().getName() }))); + assertThat(document).containsEntry("_class", + new org.bson.Document("$in", Collections.singletonList(probe.getClass().getName()))); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatchMode() { + void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatchMode() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); probe.flatDoc.stringValue = "conflux"; - IsBsonObject expected = isBsonObject().containing("flatDoc\\.stringValue", "conflux"); - - assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(WrapperDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc\\.stringValue", "conflux"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictMatchMode() { + void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictMatchMode() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -181,12 +160,12 @@ public void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictM Example example = Example.of(probe, matching().withIncludeNullValues()); - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class)), // - isBsonObject().containing("flatDoc.stringValue", "conflux")); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc.stringValue", "conflux"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarting() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarting() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -194,15 +173,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarti Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.STARTING)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "^firefight").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "^firefight")// + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMatchModeIsStarting() { + void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMatchModeIsStarting() { FlatDocument probe = new FlatDocument(); probe.stringValue = "fire.ight"; @@ -210,15 +187,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMat Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.STARTING)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "^" + Pattern.quote("fire.ight")).// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "^" + Pattern.quote("fire.ight"))// + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -226,15 +201,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.ENDING)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "firefight$").// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "firefight$") // + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -242,15 +215,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.REGEX)); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", "firefight").// - containing("custom_field_name.$regex", "^(cat|dog).*shelter\\d?"); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", "firefight") // + .containsEntry("custom_field_name.$regex", "^(cat|dog).*shelter\\d?"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMatchModeSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMatchModeSet() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -258,15 +229,13 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMat Example example = Example.of(probe, matching().withStringMatcher(StringMatcher.ENDING).withIgnoreCase()); - IsBsonObject expected = isBsonObject().// - containing("stringValue", new org.bson.Document("$regex", "firefight$").append("$options", "i")).// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue", new org.bson.Document("$regex", "firefight$").append("$options", "i")) // + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -274,15 +243,14 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { Example example = Example.of(probe, matching().withIgnoreCase()); - IsBsonObject expected = isBsonObject().// - containing("stringValue", new org.bson.Document("$regex", Pattern.quote("firefight")).append("$options", "i")).// - containing("intValue", 100); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue", + new org.bson.Document("$regex", Pattern.quote("firefight")).append("$options", "i")) // + .containsEntry("intValue", 100); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedWhenContainingDBRef() { + void exampleShouldBeMappedWhenContainingDBRef() { FlatDocument probe = new FlatDocument(); probe.stringValue = "steelheart"; @@ -293,12 +261,12 @@ public void exampleShouldBeMappedWhenContainingDBRef() { context.getRequiredPersistentEntity(WithDBRef.class)); com.mongodb.DBRef reference = getTypedValue(document, "referenceDocument", com.mongodb.DBRef.class); - assertThat(reference.getId(), Is. is("200")); - assertThat(reference.getCollectionName(), is("refDoc")); + assertThat(reference.getId()).isEqualTo("200"); + assertThat(reference.getCollectionName()).isEqualTo("refDoc"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedWhenDBRefIsNull() { + void exampleShouldBeMappedWhenDBRefIsNull() { FlatDocument probe = new FlatDocument(); probe.stringValue = "steelheart"; @@ -306,11 +274,11 @@ public void exampleShouldBeMappedWhenDBRefIsNull() { org.bson.Document document = mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)); - assertThat(document, isBsonObject().containing("stringValue", "steelheart")); + assertThat(document).containsEntry("stringValue", "steelheart"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { + void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { ClassWithGeoTypes probe = new ClassWithGeoTypes(); probe.legacyPoint = new Point(10D, 20D); @@ -318,12 +286,12 @@ public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { org.bson.Document document = mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(WithDBRef.class)); - assertThat(document.get("legacyPoint.x"), Is. is(10D)); - assertThat(document.get("legacyPoint.y"), Is. is(20D)); + assertThat(document.get("legacyPoint.x")).isEqualTo(10D); + assertThat(document.get("legacyPoint.y")).isEqualTo(20D); } @Test // DATAMONGO-1245 - public void mappingShouldExcludeFieldWithCustomNameCorrectly() { + void mappingShouldExcludeFieldWithCustomNameCorrectly() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "foo"; @@ -332,15 +300,13 @@ public void mappingShouldExcludeFieldWithCustomNameCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("customNamedField")); - IsBsonObject expected = isBsonObject().// - containing("stringValue", "string").// - containing("intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue", "string") // + .containsEntry("intValue", 10); } @Test // DATAMONGO-1245 - public void mappingShouldExcludeFieldCorrectly() { + void mappingShouldExcludeFieldCorrectly() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "foo"; @@ -349,15 +315,13 @@ public void mappingShouldExcludeFieldCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("stringValue")); - IsBsonObject expected = isBsonObject().// - containing("custom_field_name", "foo").// - containing("intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("custom_field_name", "foo") // + .containsEntry("intValue", 10); } @Test // DATAMONGO-1245 - public void mappingShouldExcludeNestedFieldCorrectly() { + void mappingShouldExcludeNestedFieldCorrectly() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -367,16 +331,13 @@ public void mappingShouldExcludeNestedFieldCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("flatDoc.stringValue")); - IsBsonObject expected = isBsonObject().// - containing("flatDoc\\.custom_field_name", "foo").// - containing("flatDoc\\.intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc\\.custom_field_name", "foo")// + .containsEntry("flatDoc\\.intValue", 10); } @Test // DATAMONGO-1245 - public void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { + void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -386,16 +347,13 @@ public void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { Example example = Example.of(probe, matching().withIgnorePaths("flatDoc.customNamedField")); - IsBsonObject expected = isBsonObject().// - containing("flatDoc\\.stringValue", "string").// - containing("flatDoc\\.intValue", 10); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class)), - is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(WrapperDocument.class))) + .containsEntry("flatDoc\\.stringValue", "string") // + .containsEntry("flatDoc\\.intValue", 10); } @Test // DATAMONGO-1245 - public void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMatcher() { + void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMatcher() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -403,15 +361,13 @@ public void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMa Example example = Example.of(probe, matching().withMatcher("stringValue", GenericPropertyMatchers.contains())); - IsBsonObject expected = isBsonObject().// - containing("stringValue.$regex", ".*firefight.*").// - containing("custom_field_name", "steelheart"); - - assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class)), is(expected)); + assertThat(mapper.getMappedExample(example, context.getRequiredPersistentEntity(FlatDocument.class))) + .containsEntry("stringValue.$regex", ".*firefight.*") // + .containsEntry("custom_field_name", "steelheart"); } @Test // DATAMONGO-1245 - public void mappingShouldIncludePropertiesFromHierarchicalDocument() { + void mappingShouldIncludePropertiesFromHierarchicalDocument() { HierachicalDocument probe = new HierachicalDocument(); probe.stringValue = "firefight"; @@ -421,11 +377,11 @@ public void mappingShouldIncludePropertiesFromHierarchicalDocument() { org.bson.Document document = mapper.getMappedExample(of(probe), context.getRequiredPersistentEntity(FlatDocument.class)); - assertThat(document, isBsonObject().containing("anotherStringValue", "calamity")); + assertThat(document).containsEntry("anotherStringValue", "calamity"); } @Test // DATAMONGO-1459 - public void mapsAnyMatchingExampleCorrectly() { + void mapsAnyMatchingExampleCorrectly() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -433,11 +389,11 @@ public void mapsAnyMatchingExampleCorrectly() { Example example = Example.of(probe, ExampleMatcher.matchingAny()); - assertThat(mapper.getMappedExample(example), isBsonObject().containing("$or").containing("_class")); + assertThat(mapper.getMappedExample(example)).containsKeys("$or", "_class"); } @Test // DATAMONGO-1768 - public void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { + void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -446,17 +402,17 @@ public void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { org.bson.Document document = mapper .getMappedExample(Example.of(probe, ExampleMatcher.matching().withIgnorePaths("_class"))); - assertThat(document, isBsonObject().notContaining("_class")); + assertThat(document).doesNotContainKey("_class"); } @Test // DATAMONGO-1768 - public void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPathWhenUsingCustomTypeMapper() { + void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPathWhenUsingCustomTypeMapper() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); probe.flatDoc.stringValue = "conflux"; - MappingMongoConverter mappingMongoConverter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + MappingMongoConverter mappingMongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); mappingMongoConverter.setTypeMapper(new DefaultMongoTypeMapper() { @Override @@ -480,18 +436,72 @@ public void writeType(TypeInformation info, Bson sink) { org.bson.Document document = new MongoExampleMapper(mappingMongoConverter) .getMappedExample(Example.of(probe, ExampleMatcher.matching().withIgnorePaths("_foo"))); - assertThat(document, isBsonObject().notContaining("_class").notContaining("_foo")); + assertThat(document).doesNotContainKeys("_class", "_foo"); } @Test // DATAMONGO-1768 - public void untypedExampleShouldNotInferTypeRestriction() { + void untypedExampleShouldNotInferTypeRestriction() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); probe.flatDoc.stringValue = "conflux"; org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); - assertThat(document, isBsonObject().notContaining("_class")); + assertThat(document).doesNotContainKey("_class"); + } + + @Test // DATAMONGO-1902 + void mapsUnwrappedType() { + + WithUnwrapped probe = new WithUnwrapped(); + probe.unwrappedValue = new UnwrappableType(); + probe.unwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.unwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("stringValue", "string-value").containsEntry("with-at-field-annotation", + "@Field"); + } + + @Test // DATAMONGO-1902 + void mapsPrefixedUnwrappedType() { + + WithUnwrapped probe = new WithUnwrapped(); + probe.prefixedUnwrappedValue = new UnwrappableType(); + probe.prefixedUnwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.prefixedUnwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("prefix-stringValue", "string-value") + .containsEntry("prefix-with-at-field-annotation", "@Field"); + } + + @Test // DATAMONGO-1902 + void mapsNestedUnwrappedType() { + + WrapperAroundWithUnwrapped probe = new WrapperAroundWithUnwrapped(); + probe.withUnwrapped = new WithUnwrapped(); + probe.withUnwrapped.unwrappedValue = new UnwrappableType(); + probe.withUnwrapped.unwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.withUnwrapped.unwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("withUnwrapped.stringValue", "string-value") + .containsEntry("withUnwrapped.with-at-field-annotation", "@Field"); + } + + @Test // DATAMONGO-1902 + void mapsNestedPrefixedUnwrappedType() { + + WrapperAroundWithUnwrapped probe = new WrapperAroundWithUnwrapped(); + probe.withUnwrapped = new WithUnwrapped(); + probe.withUnwrapped.prefixedUnwrappedValue = new UnwrappableType(); + probe.withUnwrapped.prefixedUnwrappedValue.atFieldAnnotatedValue = "@Field"; + probe.withUnwrapped.prefixedUnwrappedValue.stringValue = "string-value"; + + org.bson.Document document = mapper.getMappedExample(Example.of(probe, UntypedExampleMatcher.matching())); + assertThat(document).containsEntry("withUnwrapped.prefix-stringValue", "string-value") + .containsEntry("withUnwrapped.prefix-with-at-field-annotation", "@Field"); } static class FlatDocument { @@ -521,4 +531,29 @@ static class ReferenceDocument { @Id String id; String value; } + + @Document + static class WrapperAroundWithUnwrapped { + + String id; + WithUnwrapped withUnwrapped; + } + + @Document + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + @Unwrapped.Nullable("prefix-") UnwrappableType prefixedUnwrappedValue; + } + + static class UnwrappableType { + + @Indexed String stringValue; + + @Indexed // + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java index 492d241891..558c0d65ec 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoJsonSchemaMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,10 +23,9 @@ import java.util.List; import org.bson.Document; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; @@ -38,8 +37,6 @@ */ public class MongoJsonSchemaMapperUnitTests { - public @Rule ExpectedException exception = ExpectedException.none(); - MongoJsonSchemaMapper mapper; Document addressProperty = new Document("type", "object").append("required", Arrays.asList("street", "postCode")) @@ -108,7 +105,7 @@ public class MongoJsonSchemaMapperUnitTests { " }" + // " } }"; - @Before + @BeforeEach public void setUp() { mapper = new MongoJsonSchemaMapper(new MappingMongoConverter(mock(DbRefResolver.class), new MongoMappingContext())); } @@ -116,26 +113,20 @@ public void setUp() { @Test // DATAMONGO-1835 public void noNullSchemaAllowed() { - exception.expect(IllegalArgumentException.class); - - mapper.mapSchema(null, Object.class); + assertThatIllegalArgumentException().isThrownBy(() -> mapper.mapSchema(null, Object.class)); } @Test // DATAMONGO-1835 public void noNullDomainTypeAllowed() { - exception.expect(IllegalArgumentException.class); - - mapper.mapSchema(new Document("$jsonSchema", new Document()), null); + assertThatIllegalArgumentException() + .isThrownBy(() -> mapper.mapSchema(new Document("$jsonSchema", new Document()), null)); } @Test // DATAMONGO-1835 public void schemaDocumentMustContain$jsonSchemaField() { - - exception.expect(IllegalArgumentException.class); - exception.expectMessage("contain $jsonSchema"); - - mapper.mapSchema(new Document("foo", new Document()), Object.class); + assertThatIllegalArgumentException() + .isThrownBy(() -> mapper.mapSchema(new Document("foo", new Document()), Object.class)); } @Test // DATAMONGO-1835 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java index b6e54085b6..fb19ecf3b6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NamedMongoScriptConvertsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,15 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import org.bson.Document; import org.bson.types.Code; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.core.convert.MongoConverters.DocumentToNamedMongoScriptConverter; import org.springframework.data.mongodb.core.convert.MongoConverters.NamedMongoScriptToDocumentConverter; @@ -39,7 +39,8 @@ * @since 1.7 */ @RunWith(Suite.class) -@SuiteClasses({ NamedMongoScriptToDocumentConverterUnitTests.class, DocumentToNamedMongoScriptConverterUnitTests.class }) +@SuiteClasses({ NamedMongoScriptToDocumentConverterUnitTests.class, + DocumentToNamedMongoScriptConverterUnitTests.class }) public class NamedMongoScriptConvertsUnitTests { static final String FUNCTION_NAME = "echo"; @@ -55,19 +56,13 @@ public static class NamedMongoScriptToDocumentConverterUnitTests { NamedMongoScriptToDocumentConverter converter = NamedMongoScriptToDocumentConverter.INSTANCE; - @Test // DATAMONGO-479 - public void convertShouldReturnEmptyDocWhenScriptIsNull() { - assertThat(converter.convert(null), is((Document) new Document())); - } - @Test // DATAMONGO-479 public void convertShouldConvertScriptNameCorreclty() { Document document = converter.convert(ECHO_SCRIPT); Object id = document.get("_id"); - assertThat(id, is(instanceOf(String.class))); - assertThat(id, is((Object) FUNCTION_NAME)); + assertThat(id).isInstanceOf(String.class).isEqualTo(FUNCTION_NAME); } @Test // DATAMONGO-479 @@ -76,8 +71,7 @@ public void convertShouldConvertScriptCodeCorreclty() { Document document = converter.convert(ECHO_SCRIPT); Object code = document.get("value"); - assertThat(code, is(instanceOf(Code.class))); - assertThat(code, is((Object) new Code(JS_FUNCTION))); + assertThat(code).isInstanceOf(Code.class).isEqualTo(new Code(JS_FUNCTION)); } } @@ -88,9 +82,9 @@ public static class DocumentToNamedMongoScriptConverterUnitTests { DocumentToNamedMongoScriptConverter converter = DocumentToNamedMongoScriptConverter.INSTANCE; - @Test // DATAMONGO-479 - public void convertShouldReturnNullIfSourceIsNull() { - assertThat(converter.convert(null), is(nullValue())); + @Test // DATAMONGO-479, DATAMONGO-2385 + public void convertShouldReturnNullIfSourceIsEmpty() { + assertThat(converter.convert(new Document())).isNull(); } @Test // DATAMONGO-479 @@ -98,7 +92,7 @@ public void convertShouldConvertIdCorreclty() { NamedMongoScript script = converter.convert(FUNCTION); - assertThat(script.getName(), is(FUNCTION_NAME)); + assertThat(script.getName()).isEqualTo(FUNCTION_NAME); } @Test // DATAMONGO-479 @@ -106,8 +100,7 @@ public void convertShouldConvertScriptValueCorreclty() { NamedMongoScript script = converter.convert(FUNCTION); - assertThat(script.getCode(), is(notNullValue())); - assertThat(script.getCode(), is(JS_FUNCTION)); + assertThat(script.getCode()).isEqualTo(JS_FUNCTION); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java index 595fea72de..9df15a674a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/NumberToNumberConverterFactoryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.Is.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collection; @@ -28,6 +27,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; + import org.springframework.data.mongodb.core.convert.MongoConverters.NumberToNumberConverterFactory; /** @@ -54,6 +54,7 @@ public static Collection parameters() { @Test // DATAMONGO-1288 public void convertsToTargetTypeCorrectly() { - assertThat(NumberToNumberConverterFactory.INSTANCE.getConverter(expected.getClass()).convert(source), is(expected)); + assertThat(NumberToNumberConverterFactory.INSTANCE.getConverter(expected.getClass()).convert(source)) + .isEqualTo(expected); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java index 7f5a2068d7..b772772444 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ObjectPathUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,15 +16,18 @@ package org.springframework.data.mongodb.core.convert; import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.util.ClassTypeInformation; /** + * Unit tests for {@link ObjectPath}. + * * @author Christoph Strobl */ public class ObjectPathUnitTests { @@ -33,12 +36,12 @@ public class ObjectPathUnitTests { MongoPersistentEntity two; MongoPersistentEntity three; - @Before + @BeforeEach public void setUp() { - one = new BasicMongoPersistentEntity(ClassTypeInformation.from(EntityOne.class)); - two = new BasicMongoPersistentEntity(ClassTypeInformation.from(EntityTwo.class)); - three = new BasicMongoPersistentEntity(ClassTypeInformation.from(EntityThree.class)); + one = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(EntityOne.class)); + two = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(EntityTwo.class)); + three = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(EntityThree.class)); } @Test // DATAMONGO-1703 @@ -81,21 +84,26 @@ public void getPathItemShouldReturnNullWhenIdAndCollectionMatchAndAssignableToIn assertThat(path.getPathItem("id-1", "one", ValueInterface.class)).isNotNull(); } - @Document("one") - static class EntityOne { + @Test // DATAMONGO-2267 + public void collectionLookupShouldBeLazy/* because we may need to resolve SpEL which can be pretty expensive */() { - } + MongoPersistentEntity spied = spy(one); + ObjectPath path = ObjectPath.ROOT.push(new EntityThree(), spied, "id-1"); - static class EntityTwo extends EntityOne { + verify(spied, never()).getCollection(); + path.getPathItem("id-1", "foo", EntityTwo.class); + + verify(spied).getCollection(); } - interface ValueInterface { + @Document("one") + static class EntityOne {} - } + static class EntityTwo extends EntityOne {} - @Document("three") - static class EntityThree implements ValueInterface { + interface ValueInterface {} - } + @Document("three") + static class EntityThree implements ValueInterface {} } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java old mode 100644 new mode 100755 index 740537cadc..72d0055389 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,51 +15,65 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static org.springframework.data.mongodb.core.aggregation.AggregationExpressionCriteria.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.TreeMap; +import java.util.regex.Pattern; +import org.bson.BsonRegularExpression; +import org.bson.conversions.Bson; +import org.bson.types.Code; import org.bson.types.ObjectId; -import org.hamcrest.core.Is; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.convert.WritingConverter; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.aggregation.ComparisonOperators; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.geo.GeoJsonPolygon; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldName.Type; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.TextScore; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.test.util.BasicDbListBuilder; +import org.springframework.data.mongodb.core.query.TextQuery; import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; -import com.mongodb.QueryBuilder; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.model.Filters; /** * Unit tests for {@link QueryMapper}. @@ -69,145 +83,220 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author David Julia + * @author Gyungrai Wang */ -@RunWith(MockitoJUnitRunner.class) public class QueryMapperUnitTests { - QueryMapper mapper; - MongoMappingContext context; - MappingMongoConverter converter; - - @Mock MongoDbFactory factory; + private QueryMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; - @Before - public void setUp() { + @BeforeEach + void beforeEach() { + MongoCustomConversions conversions = new MongoCustomConversions(); this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); this.mapper = new QueryMapper(converter); } @Test - public void translatesIdPropertyIntoIdKey() { + void translatesIdPropertyIntoIdKey() { org.bson.Document query = new org.bson.Document("foo", "value"); MongoPersistentEntity entity = context.getRequiredPersistentEntity(Sample.class); org.bson.Document result = mapper.getMappedObject(query, entity); - assertThat(result.get("_id"), is(notNullValue())); - assertThat(result.get("foo"), is(nullValue())); + assertThat(result).containsKey("_id"); + assertThat(result).doesNotContainKey("foo"); } @Test - public void convertsStringIntoObjectId() { + void convertsStringIntoObjectId() { org.bson.Document query = new org.bson.Document("_id", new ObjectId().toString()); org.bson.Document result = mapper.getMappedObject(query, context.getPersistentEntity(IdWrapper.class)); - assertThat(result.get("_id"), is(instanceOf(ObjectId.class))); + assertThat(result.get("_id")).isInstanceOf(ObjectId.class); } @Test - public void handlesBigIntegerIdsCorrectly() { + void handlesBigIntegerIdsCorrectly() { org.bson.Document document = new org.bson.Document("id", new BigInteger("1")); org.bson.Document result = mapper.getMappedObject(document, context.getPersistentEntity(IdWrapper.class)); - assertThat(result.get("_id"), is((Object) "1")); + assertThat(result).containsEntry("_id", "1"); } @Test - public void handlesObjectIdCapableBigIntegerIdsCorrectly() { + void handlesObjectIdCapableBigIntegerIdsCorrectly() { ObjectId id = new ObjectId(); org.bson.Document document = new org.bson.Document("id", new BigInteger(id.toString(), 16)); org.bson.Document result = mapper.getMappedObject(document, context.getPersistentEntity(IdWrapper.class)); - assertThat(result.get("_id"), is((Object) id)); + assertThat(result).containsEntry("_id", id); + } + + @Test // GH-4490 + void translates$GtCorrectly() { + + Criteria criteria = where("id").gt(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$gt")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translates$GteCorrectly() { + + Criteria criteria = where("id").gte(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$gte")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translates$LteCorrectly() { + + Criteria criteria = where("id").lte(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$lte")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translates$LtCorrectly() { + + Criteria criteria = where("id").lt(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$lt")).isInstanceOf(ObjectId.class); + } + + @Test // GH-4490 + void translatesMultipleCompareOperatorsCorrectly() { + + Criteria criteria = where("id").lt(new ObjectId().toString()).lte(new ObjectId().toString()) + .gt(new ObjectId().toString()).gte(new ObjectId().toString()); + + org.bson.Document query = new org.bson.Document("id", new ObjectId().toString()); + org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(IdWrapper.class)); + Object object = result.get("_id"); + assertThat(object).isInstanceOf(org.bson.Document.class); + org.bson.Document document = (org.bson.Document) object; + assertThat(document.get("$lt")).isInstanceOf(ObjectId.class); + assertThat(document.get("$lte")).isInstanceOf(ObjectId.class); + assertThat(document.get("$gt")).isInstanceOf(ObjectId.class); + assertThat(document.get("$gte")).isInstanceOf(ObjectId.class); } @Test // DATAMONGO-278 - public void translates$NeCorrectly() { + void translates$NeCorrectly() { Criteria criteria = where("foo").ne(new ObjectId().toString()); org.bson.Document result = mapper.getMappedObject(criteria.getCriteriaObject(), context.getPersistentEntity(Sample.class)); Object object = result.get("_id"); - assertThat(object, is(instanceOf(org.bson.Document.class))); + assertThat(object).isInstanceOf(org.bson.Document.class); org.bson.Document document = (org.bson.Document) object; - assertThat(document.get("$ne"), is(instanceOf(ObjectId.class))); + assertThat(document.get("$ne")).isInstanceOf(ObjectId.class); } @Test // DATAMONGO-326 - public void handlesEnumsCorrectly() { + void handlesEnumsCorrectly() { Query query = query(where("foo").is(Enum.INSTANCE)); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); Object object = result.get("foo"); - assertThat(object, is(instanceOf(String.class))); + assertThat(object).isInstanceOf(String.class); } @Test - public void handlesEnumsInNotEqualCorrectly() { + void handlesEnumsInNotEqualCorrectly() { Query query = query(where("foo").ne(Enum.INSTANCE)); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); Object object = result.get("foo"); - assertThat(object, is(instanceOf(org.bson.Document.class))); + assertThat(object).isInstanceOf(org.bson.Document.class); Object ne = ((org.bson.Document) object).get("$ne"); - assertThat(ne, is(instanceOf(String.class))); - assertThat(ne.toString(), is(Enum.INSTANCE.name())); + assertThat(ne).isInstanceOf(String.class).hasToString(Enum.INSTANCE.name()); } @Test - public void handlesEnumsIn$InCorrectly() { + void handlesEnumsIn$InCorrectly() { Query query = query(where("foo").in(Enum.INSTANCE)); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); Object object = result.get("foo"); - assertThat(object, is(instanceOf(org.bson.Document.class))); + assertThat(object).isInstanceOf(org.bson.Document.class); Object in = ((org.bson.Document) object).get("$in"); - assertThat(in, is(instanceOf(List.class))); + assertThat(in).isInstanceOf(List.class); List list = (List) in; - assertThat(list.size(), is(1)); - assertThat(list.get(0), is(instanceOf(String.class))); - assertThat(list.get(0).toString(), is(Enum.INSTANCE.name())); + assertThat(list).hasSize(1); + assertThat(list.get(0)).isInstanceOf(String.class).hasToString(Enum.INSTANCE.name()); } @Test // DATAMONGO-373 - public void handlesNativelyBuiltQueryCorrectly() { + void handlesNativelyBuiltQueryCorrectly() { - DBObject query = new QueryBuilder().or(new BasicDBObject("foo", "bar")).get(); - mapper.getMappedObject(new org.bson.Document(query.toMap()), Optional.empty()); + Bson query = new BasicDBObject(Filters.or(new BasicDBObject("foo", "bar")).toBsonDocument(org.bson.Document.class, + MongoClientSettings.getDefaultCodecRegistry())); + mapper.getMappedObject(query, Optional.empty()); } @Test // DATAMONGO-369 - public void handlesAllPropertiesIfDocument() { + void handlesAllPropertiesIfDocument() { org.bson.Document query = new org.bson.Document(); query.put("foo", new org.bson.Document("$in", Arrays.asList(1, 2))); query.put("bar", new Person()); org.bson.Document result = mapper.getMappedObject(query, Optional.empty()); - assertThat(result.get("bar"), is(notNullValue())); + assertThat(result).containsKey("bar"); } @Test // DATAMONGO-429 - public void transformsArraysCorrectly() { + void transformsArraysCorrectly() { Query query = new BasicQuery("{ 'tags' : { '$all' : [ 'green', 'orange']}}"); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(result.toJson(), is(query.getQueryObject().toJson())); + assertThat(result.toJson()).isEqualTo(query.getQueryObject().toJson()); } @Test - public void doesHandleNestedFieldsWithDefaultIdNames() { + void doesHandleNestedFieldsWithDefaultIdNames() { org.bson.Document document = new org.bson.Document("id", new ObjectId().toString()); document.put("nested", new org.bson.Document("id", new ObjectId().toString())); @@ -215,12 +304,12 @@ public void doesHandleNestedFieldsWithDefaultIdNames() { MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithDefaultId.class); org.bson.Document result = mapper.getMappedObject(document, entity); - assertThat(result.get("_id"), is(instanceOf(ObjectId.class))); - assertThat(((org.bson.Document) result.get("nested")).get("_id"), is(instanceOf(ObjectId.class))); + assertThat(result.get("_id")).isInstanceOf(ObjectId.class); + assertThat(((org.bson.Document) result.get("nested")).get("_id")).isInstanceOf(ObjectId.class); } @Test // DATAMONGO-493 - public void doesNotTranslateNonIdPropertiesFor$NeCriteria() { + void doesNotTranslateNonIdPropertiesFor$NeCriteria() { ObjectId accidentallyAnObjectId = new ObjectId(); @@ -229,66 +318,63 @@ public void doesHandleNestedFieldsWithDefaultIdNames() { org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(UserEntity.class)); - assertThat(document.get("publishers"), is(instanceOf(org.bson.Document.class))); + assertThat(document.get("publishers")).isInstanceOf(org.bson.Document.class); org.bson.Document publishers = (org.bson.Document) document.get("publishers"); - assertThat(publishers.containsKey("$ne"), is(true)); - assertThat(publishers.get("$ne"), is(instanceOf(String.class))); + assertThat(publishers).containsKey("$ne"); + assertThat(publishers.get("$ne")).isInstanceOf(String.class); } @Test // DATAMONGO-494 - public void usesEntityMetadataInOr() { + void usesEntityMetadataInOr() { Query query = query(new Criteria().orOperator(where("foo").is("bar"))); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Sample.class)); - assertThat(result.keySet(), hasSize(1)); - assertThat(result.keySet(), hasItem("$or")); + assertThat(result.keySet()).hasSize(1).containsOnly("$or"); List ors = getAsDBList(result, "$or"); - assertThat(ors, hasSize(1)); + assertThat(ors).hasSize(1); org.bson.Document criterias = getAsDocument(ors, 0); - assertThat(criterias.keySet(), hasSize(1)); - assertThat(criterias.get("_id"), is(notNullValue())); - assertThat(criterias.get("foo"), is(nullValue())); + assertThat(criterias.keySet()).hasSize(1).doesNotContain("foo"); + assertThat(criterias).containsKey("_id"); } @Test - public void translatesPropertyReferenceCorrectly() { + void translatesPropertyReferenceCorrectly() { Query query = query(where("field").is(new CustomizedField())); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(result.containsKey("foo"), is(true)); - assertThat(result.keySet().size(), is(1)); + assertThat(result).containsKey("foo").hasSize(1); } @Test - public void translatesNestedPropertyReferenceCorrectly() { + void translatesNestedPropertyReferenceCorrectly() { Query query = query(where("field.field").is(new CustomizedField())); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(result.containsKey("foo.foo"), is(true)); - assertThat(result.keySet().size(), is(1)); + assertThat(result).containsKey("foo.foo"); + assertThat(result.keySet()).hasSize(1); } @Test - public void returnsOriginalKeyIfNoPropertyReference() { + void returnsOriginalKeyIfNoPropertyReference() { Query query = query(where("bar").is(new CustomizedField())); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(result.containsKey("bar"), is(true)); - assertThat(result.keySet().size(), is(1)); + assertThat(result).containsKey("bar"); + assertThat(result.keySet()).hasSize(1); } @Test - public void convertsAssociationCorrectly() { + void convertsAssociationCorrectly() { Reference reference = new Reference(); reference.id = 5L; @@ -299,11 +385,11 @@ public void convertsAssociationCorrectly() { Object referenceObject = object.get("reference"); - assertThat(referenceObject, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(referenceObject).isInstanceOf(com.mongodb.DBRef.class); } @Test - public void convertsNestedAssociationCorrectly() { + void convertsNestedAssociationCorrectly() { Reference reference = new Reference(); reference.id = 5L; @@ -314,11 +400,11 @@ public void convertsNestedAssociationCorrectly() { Object referenceObject = object.get("withDbRef.reference"); - assertThat(referenceObject, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(referenceObject).isInstanceOf(com.mongodb.DBRef.class); } @Test - public void convertsInKeywordCorrectly() { + void convertsInKeywordCorrectly() { Reference first = new Reference(); first.id = 5L; @@ -333,36 +419,36 @@ public void convertsInKeywordCorrectly() { org.bson.Document reference = DocumentTestUtils.getAsDocument(result, "reference"); List inClause = getAsDBList(reference, "$in"); - assertThat(inClause, hasSize(2)); - assertThat(inClause.get(0), is(instanceOf(com.mongodb.DBRef.class))); - assertThat(inClause.get(1), is(instanceOf(com.mongodb.DBRef.class))); + assertThat(inClause).hasSize(2); + assertThat(inClause.get(0)).isInstanceOf(com.mongodb.DBRef.class); + assertThat(inClause.get(1)).isInstanceOf(com.mongodb.DBRef.class); } @Test // DATAMONGO-570 - public void correctlyConvertsNullReference() { + void correctlyConvertsNullReference() { Query query = query(where("reference").is(null)); org.bson.Document object = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRef.class)); - assertThat(object.get("reference"), is(nullValue())); + assertThat(object.get("reference")).isNull(); } @Test // DATAMONGO-629 - public void doesNotMapIdIfNoEntityMetadataAvailable() { + void doesNotMapIdIfNoEntityMetadataAvailable() { String id = new ObjectId().toString(); Query query = query(where("id").is(id)); org.bson.Document object = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(object.containsKey("id"), is(true)); - assertThat(object.get("id"), is((Object) id)); - assertThat(object.containsKey("_id"), is(false)); + assertThat(object).containsKey("id"); + assertThat(object).containsEntry("id", id); + assertThat(object).doesNotContainKey("_id"); } @Test // DATAMONGO-677 - public void handleMapWithDBRefCorrectly() { + void handleMapWithDBRefCorrectly() { org.bson.Document mapDocument = new org.bson.Document(); mapDocument.put("test", new com.mongodb.DBRef("test", "test")); @@ -371,96 +457,175 @@ public void handleMapWithDBRefCorrectly() { org.bson.Document mapped = mapper.getMappedObject(document, context.getPersistentEntity(WithMapDBRef.class)); - assertThat(mapped.containsKey("mapWithDBRef"), is(true)); - assertThat(mapped.get("mapWithDBRef"), instanceOf(org.bson.Document.class)); - assertThat(((org.bson.Document) mapped.get("mapWithDBRef")).containsKey("test"), is(true)); - assertThat(((org.bson.Document) mapped.get("mapWithDBRef")).get("test"), instanceOf(com.mongodb.DBRef.class)); + assertThat(mapped).containsKey("mapWithDBRef"); + assertThat(mapped.get("mapWithDBRef")).isInstanceOf(org.bson.Document.class); + assertThat(((org.bson.Document) mapped.get("mapWithDBRef"))).containsKey("test"); + assertThat(((org.bson.Document) mapped.get("mapWithDBRef")).get("test")).isInstanceOf(com.mongodb.DBRef.class); } @Test - public void convertsUnderscoreIdValueWithoutMetadata() { + void convertsUnderscoreIdValueWithoutMetadata() { org.bson.Document document = new org.bson.Document().append("_id", new ObjectId().toString()); org.bson.Document mapped = mapper.getMappedObject(document, Optional.empty()); - assertThat(mapped.containsKey("_id"), is(true)); - assertThat(mapped.get("_id"), is(instanceOf(ObjectId.class))); + assertThat(mapped).containsKey("_id"); + assertThat(mapped.get("_id")).isInstanceOf(ObjectId.class); } @Test // DATAMONGO-705 - public void convertsDBRefWithExistsQuery() { + void convertsDBRefWithExistsQuery() { Query query = query(where("reference").exists(false)); - BasicMongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), entity); org.bson.Document reference = getAsDocument(mappedObject, "reference"); - assertThat(reference.containsKey("$exists"), is(true)); - assertThat(reference.get("$exists"), is((Object) false)); + assertThat(reference).containsKey("$exists"); + assertThat(reference).containsEntry("$exists", false); } @Test // DATAMONGO-706 - public void convertsNestedDBRefsCorrectly() { + void convertsNestedDBRefsCorrectly() { Reference reference = new Reference(); reference.id = 5L; Query query = query(where("someString").is("foo").andOperator(where("reference").in(reference))); - BasicMongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), entity); - assertThat(mappedObject.get("someString"), is((Object) "foo")); + assertThat(mappedObject).containsEntry("someString", "foo"); List andClause = getAsDBList(mappedObject, "$and"); - assertThat(andClause, hasSize(1)); + assertThat(andClause).hasSize(1); List inClause = getAsDBList(getAsDocument(getAsDocument(andClause, 0), "reference"), "$in"); - assertThat(inClause, hasSize(1)); - assertThat(inClause.get(0), is(instanceOf(com.mongodb.DBRef.class))); + assertThat(inClause).hasSize(1); + assertThat(inClause.get(0)).isInstanceOf(com.mongodb.DBRef.class); + } + + @Test // GH-3853 + void convertsDocumentReferenceOnIdPropertyCorrectly() { + + Sample reference = new Sample(); + reference.foo = "s1"; + + Query query = query(where("sample").is(reference)); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("sample", "s1"); + } + + @Test // GH-4033 + void convertsNestedPathToIdPropertyOfDocumentReferenceCorrectly() { + + Query query = query(where("sample.foo").is("s1")); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("sample", "s1"); + } + + @Test // GH-4033 + void convertsNestedPathToIdPropertyOfDocumentReferenceCorrectlyWhenItShouldBeConvertedToObjectId() { + + ObjectId id = new ObjectId(); + Query query = query(where("sample.foo").is(id.toHexString())); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery.get("sample")).satisfies(it -> { + + assertThat(it).isInstanceOf(ObjectId.class); + assertThat(((ObjectId) it).toHexString()).isEqualTo(id.toHexString()); + }); + } + + @Test // GH-3853 + void convertsListDocumentReferenceOnIdPropertyCorrectly() { + + Sample reference = new Sample(); + reference.foo = "s1"; + + Query query = query(where("samples").is(Arrays.asList(reference))); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("samples", Arrays.asList("s1")); + } + + @Test // GH-3853 + void convertsDocumentReferenceOnNonIdPropertyCorrectly() { + + Customer reference = new Customer(); + reference.id = new ObjectId(); + reference.name = "c1"; + + Query query = query(where("customer").is(reference)); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("customer", "c1"); + } + + @Test // GH-3853 + void convertsListDocumentReferenceOnNonIdPropertyCorrectly() { + + Customer reference = new Customer(); + reference.id = new ObjectId(); + reference.name = "c1"; + + Query query = query(where("customers").is(Arrays.asList(reference))); + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedQuery).containsEntry("customers", Arrays.asList("c1")); } @Test // DATAMONGO-752 - public void mapsSimpleValuesStartingWith$Correctly() { + void mapsSimpleValuesStartingWith$Correctly() { Query query = query(where("myvalue").is("$334")); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(result.keySet(), hasSize(1)); - assertThat(result.get("myvalue"), is((Object) "$334")); + assertThat(result.keySet()).hasSize(1); + assertThat(result).containsEntry("myvalue", "$334"); } @Test // DATAMONGO-752 - public void mapsKeywordAsSimpleValuesCorrectly() { + void mapsKeywordAsSimpleValuesCorrectly() { Query query = query(where("myvalue").is("$center")); org.bson.Document result = mapper.getMappedObject(query.getQueryObject(), Optional.empty()); - assertThat(result.keySet(), hasSize(1)); - assertThat(result.get("myvalue"), is((Object) "$center")); + assertThat(result.keySet()).hasSize(1); + assertThat(result).containsEntry("myvalue", "$center"); } @Test // DATAMONGO-805 - public void shouldExcludeDBRefAssociation() { + void shouldExcludeDBRefAssociation() { Query query = query(where("someString").is("foo")); query.fields().exclude("reference"); - BasicMongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(WithDBRef.class); org.bson.Document queryResult = mapper.getMappedObject(query.getQueryObject(), entity); org.bson.Document fieldsResult = mapper.getMappedObject(query.getFieldsObject(), entity); - assertThat(queryResult.get("someString"), is((Object) "foo")); - assertThat(fieldsResult.get("reference"), is((Object) 0)); + assertThat(queryResult).containsEntry("someString", "foo"); + assertThat(fieldsResult).containsEntry("reference", 0); } @Test // DATAMONGO-686 - public void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() { + void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedByInList() { - BasicMongoPersistentEntity persistentEntity = context.getRequiredPersistentEntity(Sample.class); + MongoPersistentEntity persistentEntity = context.getRequiredPersistentEntity(Sample.class); String idPropertyName = persistentEntity.getIdProperty().getName(); org.bson.Document queryObject = query(where(idPropertyName).in("42")).getQueryObject(); @@ -468,26 +633,26 @@ public void queryMapperShouldNotChangeStateInGivenQueryObjectWhenIdConstrainedBy mapper.getMappedObject(queryObject, persistentEntity); Object idValuesAfter = getAsDocument(queryObject, idPropertyName).get("$in"); - assertThat(idValuesAfter, is(idValuesBefore)); + assertThat(idValuesAfter).isEqualTo(idValuesBefore); } @Test // DATAMONGO-821 - public void queryMapperShouldNotTryToMapDBRefListPropertyIfNestedInsideDocumentWithinDocument() { + void queryMapperShouldNotTryToMapDBRefListPropertyIfNestedInsideDocumentWithinDocument() { org.bson.Document queryObject = query( where("referenceList").is(new org.bson.Document("$nested", new org.bson.Document("$keys", 0L)))) - .getQueryObject(); + .getQueryObject(); org.bson.Document mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRefList.class)); org.bson.Document referenceObject = getAsDocument(mappedObject, "referenceList"); org.bson.Document nestedObject = getAsDocument(referenceObject, "$nested"); - assertThat(nestedObject, is((org.bson.Document) new org.bson.Document("$keys", 0L))); + assertThat(nestedObject).isEqualTo(new org.bson.Document("$keys", 0L)); } @Test // DATAMONGO-821 - public void queryMapperShouldNotTryToMapDBRefPropertyIfNestedInsideDocumentWithinDocument() { + void queryMapperShouldNotTryToMapDBRefPropertyIfNestedInsideDocumentWithinDocument() { org.bson.Document queryObject = query( where("reference").is(new org.bson.Document("$nested", new org.bson.Document("$keys", 0L)))).getQueryObject(); @@ -496,39 +661,39 @@ public void queryMapperShouldNotTryToMapDBRefPropertyIfNestedInsideDocumentWithi org.bson.Document referenceObject = getAsDocument(mappedObject, "reference"); org.bson.Document nestedObject = getAsDocument(referenceObject, "$nested"); - assertThat(nestedObject, is((org.bson.Document) new org.bson.Document("$keys", 0L))); + assertThat(nestedObject).isEqualTo(new org.bson.Document("$keys", 0L)); } @Test // DATAMONGO-821 - public void queryMapperShouldMapDBRefPropertyIfNestedInDocument() { + void queryMapperShouldMapDBRefPropertyIfNestedInDocument() { Reference sample = new Reference(); sample.id = 321L; - org.bson.Document queryObject = query(where("reference").is(new org.bson.Document("$in", Arrays.asList(sample)))) - .getQueryObject(); + org.bson.Document queryObject = query( + where("reference").is(new org.bson.Document("$in", Collections.singletonList(sample)))).getQueryObject(); org.bson.Document mappedObject = mapper.getMappedObject(queryObject, context.getPersistentEntity(WithDBRef.class)); org.bson.Document referenceObject = getAsDocument(mappedObject, "reference"); List inObject = getAsDBList(referenceObject, "$in"); - assertThat(inObject.get(0), is(instanceOf(com.mongodb.DBRef.class))); + assertThat(inObject.get(0)).isInstanceOf(com.mongodb.DBRef.class); } @Test // DATAMONGO-773 - public void queryMapperShouldBeAbleToProcessQueriesThatIncludeDbRefFields() { + void queryMapperShouldBeAbleToProcessQueriesThatIncludeDbRefFields() { - BasicMongoPersistentEntity persistentEntity = context.getRequiredPersistentEntity(WithDBRef.class); + MongoPersistentEntity persistentEntity = context.getRequiredPersistentEntity(WithDBRef.class); Query qry = query(where("someString").is("abc")); qry.fields().include("reference"); org.bson.Document mappedFields = mapper.getMappedObject(qry.getFieldsObject(), persistentEntity); - assertThat(mappedFields, is(notNullValue())); + assertThat(mappedFields).isNotNull(); } @Test // DATAMONGO-893 - public void classInformationShouldNotBePresentInDocumentUsedInFinderMethods() { + void classInformationShouldNotBePresentInDocumentUsedInFinderMethods() { EmbeddedClass embedded = new EmbeddedClass(); embedded.id = "1"; @@ -538,18 +703,17 @@ public void classInformationShouldNotBePresentInDocumentUsedInFinderMethods() { Query query = query(where("embedded").in(Arrays.asList(embedded, embedded2))); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); - assertThat(document, - equalTo(org.bson.Document.parse("{ \"embedded\" : { \"$in\" : [ { \"_id\" : \"1\"} , { \"_id\" : \"2\"}]}}"))); + assertThat(document).isEqualTo("{ \"embedded\" : { \"$in\" : [ { \"_id\" : \"1\"} , { \"_id\" : \"2\"}]}}"); } @Test // DATAMONGO-1406 - public void shouldMapQueryForNestedCustomizedPropertiesUsingConfiguredFieldNames() { + void shouldMapQueryForNestedCustomizedPropertiesUsingConfiguredFieldNames() { EmbeddedClass embeddedClass = new EmbeddedClass(); embeddedClass.customizedField = "hello"; Foo foo = new Foo(); - foo.listOfItems = Arrays.asList(embeddedClass); + foo.listOfItems = Collections.singletonList(embeddedClass); Query query = new Query(Criteria.where("listOfItems") // .elemMatch(new Criteria(). // @@ -557,32 +721,33 @@ public void shouldMapQueryForNestedCustomizedPropertiesUsingConfiguredFieldNames org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); - assertThat(document, isBsonObject().containing("my_items.$elemMatch.$and", - new BasicDbListBuilder().add(new BasicDBObject("fancy_custom_name", embeddedClass.customizedField)).get())); + assertThat(document).containsEntry("my_items.$elemMatch.$and", + Collections.singletonList(new org.bson.Document("fancy_custom_name", embeddedClass.customizedField))); } @Test // DATAMONGO-647 - public void customizedFieldNameShouldBeMappedCorrectlyWhenApplyingSort() { + void customizedFieldNameShouldBeMappedCorrectlyWhenApplyingSort() { Query query = query(where("field").is("bar")).with(Sort.by(Direction.DESC, "field")); org.bson.Document document = mapper.getMappedObject(query.getSortObject(), context.getPersistentEntity(CustomizedField.class)); - assertThat(document, equalTo(new org.bson.Document().append("foo", -1))); + assertThat(document).isEqualTo(new org.bson.Document().append("foo", -1)); } @Test // DATAMONGO-973 - public void getMappedFieldsAppendsTextScoreFieldProperlyCorrectlyWhenNotPresent() { + void getMappedFieldsAppendsTextScoreFieldProperlyCorrectlyWhenNotPresent() { Query query = new Query(); org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(document, equalTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore")))); + assertThat(document) + .isEqualTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore"))); } @Test // DATAMONGO-973 - public void getMappedFieldsReplacesTextScoreFieldProperlyCorrectlyWhenPresent() { + void getMappedFieldsReplacesTextScoreFieldProperlyCorrectlyWhenPresent() { Query query = new Query(); query.fields().include("textScore"); @@ -590,115 +755,116 @@ public void getMappedFieldsReplacesTextScoreFieldProperlyCorrectlyWhenPresent() org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(document, equalTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore")))); + assertThat(document) + .isEqualTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore"))); } @Test // DATAMONGO-973 - public void getMappedSortAppendsTextScoreProperlyWhenSortedByScore() { + void getMappedSortAppendsTextScoreProperlyWhenSortedByScore() { Query query = new Query().with(Sort.by("textScore")); org.bson.Document document = mapper.getMappedSort(query.getSortObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(document, equalTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore")))); + assertThat(document) + .isEqualTo(new org.bson.Document().append("score", new org.bson.Document("$meta", "textScore"))); } @Test // DATAMONGO-973 - public void getMappedSortIgnoresTextScoreWhenNotSortedByScore() { + void getMappedSortIgnoresTextScoreWhenNotSortedByScore() { Query query = new Query().with(Sort.by("id")); org.bson.Document document = mapper.getMappedSort(query.getSortObject(), context.getPersistentEntity(WithTextScoreProperty.class)); - assertThat(document, equalTo(new org.bson.Document().append("_id", 1))); + assertThat(document).isEqualTo(new org.bson.Document().append("_id", 1)); } - @Test // DATAMONGO-1070 - public void mapsIdReferenceToDBRefCorrectly() { + @Test // DATAMONGO-1070, DATAMONGO-1798 + void mapsIdReferenceToDBRefCorrectly() { ObjectId id = new ObjectId(); - org.bson.Document query = new org.bson.Document("reference.id", new com.mongodb.DBRef("reference", id.toString())); + org.bson.Document query = new org.bson.Document("reference.id", new com.mongodb.DBRef("reference", id)); org.bson.Document result = mapper.getMappedObject(query, context.getPersistentEntity(WithDBRef.class)); - assertThat(result.containsKey("reference"), is(true)); + assertThat(result).containsKey("reference"); com.mongodb.DBRef reference = getTypedValue(result, "reference", com.mongodb.DBRef.class); - assertThat(reference.getId(), is(instanceOf(ObjectId.class))); + assertThat(reference.getId()).isInstanceOf(ObjectId.class); } @Test // DATAMONGO-1050 - public void shouldUseExplicitlySetFieldnameForIdPropertyCandidates() { + void shouldUseExplicitlySetFieldnameForIdPropertyCandidates() { Query query = query(where("nested.id").is("bar")); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); - assertThat(document, equalTo(new org.bson.Document().append("nested.id", "bar"))); + assertThat(document).isEqualTo(new org.bson.Document().append("nested.id", "bar")); } @Test // DATAMONGO-1050 - public void shouldUseExplicitlySetFieldnameForIdPropertyCandidatesUsedInSortClause() { + void shouldUseExplicitlySetFieldnameForIdPropertyCandidatesUsedInSortClause() { Query query = new Query().with(Sort.by("nested.id")); org.bson.Document document = mapper.getMappedSort(query.getSortObject(), context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); - assertThat(document, equalTo(new org.bson.Document().append("nested.id", 1))); + assertThat(document).isEqualTo(new org.bson.Document().append("nested.id", 1)); } @Test // DATAMONGO-1135 - public void nearShouldUseGeoJsonRepresentationOnUnmappedProperty() { + void nearShouldUseGeoJsonRepresentationOnUnmappedProperty() { Query query = query(where("foo").near(new GeoJsonPoint(100, 50))); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(document, isBsonObject().containing("foo.$near.$geometry.type", "Point")); - assertThat(document, isBsonObject().containing("foo.$near.$geometry.coordinates.[0]", 100D)); - assertThat(document, isBsonObject().containing("foo.$near.$geometry.coordinates.[1]", 50D)); + assertThat(document).containsEntry("foo.$near.$geometry.type", "Point"); + assertThat(document).containsEntry("foo.$near.$geometry.coordinates.[0]", 100D); + assertThat(document).containsEntry("foo.$near.$geometry.coordinates.[1]", 50D); } @Test // DATAMONGO-1135 - public void nearShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { + void nearShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { Query query = query(where("geoJsonPoint").near(new GeoJsonPoint(100, 50))); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(document, isBsonObject().containing("geoJsonPoint.$near.$geometry.type", "Point")); + assertThat(document).containsEntry("geoJsonPoint.$near.$geometry.type", "Point"); } @Test // DATAMONGO-1135 - public void nearSphereShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { + void nearSphereShouldUseGeoJsonRepresentationWhenMappingToGoJsonType() { Query query = query(where("geoJsonPoint").nearSphere(new GeoJsonPoint(100, 50))); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(document, isBsonObject().containing("geoJsonPoint.$nearSphere.$geometry.type", "Point")); + assertThat(document).containsEntry("geoJsonPoint.$nearSphere.$geometry.type", "Point"); } @Test // DATAMONGO-1135 - public void shouldMapNameCorrectlyForGeoJsonType() { + void shouldMapNameCorrectlyForGeoJsonType() { Query query = query(where("namedGeoJsonPoint").nearSphere(new GeoJsonPoint(100, 50))); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(document, - isBsonObject().containing("geoJsonPointWithNameViaFieldAnnotation.$nearSphere.$geometry.type", "Point")); + assertThat(document).containsEntry("geoJsonPointWithNameViaFieldAnnotation.$nearSphere.$geometry.type", "Point"); } @Test // DATAMONGO-1135 - public void withinShouldUseGeoJsonPolygonWhenMappingPolygonOn2DSphereIndex() { + void withinShouldUseGeoJsonPolygonWhenMappingPolygonOn2DSphereIndex() { Query query = query(where("geoJsonPoint") .within(new GeoJsonPolygon(new Point(0, 0), new Point(100, 100), new Point(100, 0), new Point(0, 0)))); @@ -706,11 +872,11 @@ public void withinShouldUseGeoJsonPolygonWhenMappingPolygonOn2DSphereIndex() { org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(document, isBsonObject().containing("geoJsonPoint.$geoWithin.$geometry.type", "Polygon")); + assertThat(document).containsEntry("geoJsonPoint.$geoWithin.$geometry.type", "Polygon"); } @Test // DATAMONGO-1134 - public void intersectsShouldUseGeoJsonRepresentationCorrectly() { + void intersectsShouldUseGeoJsonRepresentationCorrectly() { Query query = query(where("geoJsonPoint") .intersects(new GeoJsonPolygon(new Point(0, 0), new Point(100, 100), new Point(100, 0), new Point(0, 0)))); @@ -718,34 +884,56 @@ public void intersectsShouldUseGeoJsonRepresentationCorrectly() { org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(ClassWithGeoTypes.class)); - assertThat(document, isBsonObject().containing("geoJsonPoint.$geoIntersects.$geometry.type", "Polygon")); - assertThat(document, isBsonObject().containing("geoJsonPoint.$geoIntersects.$geometry.coordinates")); + assertThat(document).containsEntry("geoJsonPoint.$geoIntersects.$geometry.type", "Polygon"); + assertThat(document).containsKey("geoJsonPoint.$geoIntersects.$geometry.coordinates"); } @Test // DATAMONGO-1269 - public void mappingShouldRetainNumericMapKey() { + void mappingShouldRetainNumericMapKey() { Query query = query(where("map.1.stringProperty").is("ba'alzamon")); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(EntityWithComplexValueTypeMap.class)); - assertThat(document.containsKey("map.1.stringProperty"), is(true)); + assertThat(document).containsKey("map.1.stringProperty"); + } + + @Test // GH-3688 + void mappingShouldRetainNestedNumericMapKeys() { + + Query query = query(where("outerMap.1.map.2.stringProperty").is("ba'alzamon")); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map.2.stringProperty"); + } + + @Test // GH-3688 + void mappingShouldAllowSettingEntireNestedNumericKeyedMapValue() { + + Query query = query(where("outerMap.1.map").is(null)); // newEntityWithComplexValueTypeMap() + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map"); } @Test // DATAMONGO-1269 - public void mappingShouldRetainNumericPositionInList() { + void mappingShouldRetainNumericPositionInList() { Query query = query(where("list.1.stringProperty").is("ba'alzamon")); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(EntityWithComplexValueTypeList.class)); - assertThat(document.containsKey("list.1.stringProperty"), is(true)); + assertThat(document).containsKey("list.1.stringProperty"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectly() { + void exampleShouldBeMappedCorrectly() { Foo probe = new Foo(); probe.embedded = new EmbeddedClass(); @@ -755,11 +943,11 @@ public void exampleShouldBeMappedCorrectly() { org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); - assertThat(document, isBsonObject().containing("embedded\\._id", "conflux")); + assertThat(document).containsEntry("embedded\\._id", "conflux"); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { + void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { ClassWithGeoTypes probe = new ClassWithGeoTypes(); probe.legacyPoint = new Point(10D, 20D); @@ -769,127 +957,1045 @@ public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDBRef.class)); - assertThat(document.get("legacyPoint.x"), Is. is(10D)); - assertThat(document.get("legacyPoint.y"), Is. is(20D)); + assertThat(document).containsEntry("legacyPoint.x", 10D); + assertThat(document).containsEntry("legacyPoint.y", 20D); } - @Document - public class Foo { - @Id private ObjectId id; - EmbeddedClass embedded; + @Test // GH-3544 + void exampleWithCombinedCriteriaShouldBeMappedCorrectly() { + + Foo probe = new Foo(); + probe.embedded = new EmbeddedClass(); + probe.embedded.id = "conflux"; + + Query query = query(byExample(probe).and("listOfItems").exists(true)); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class)); - @Field("my_items") - List listOfItems; + assertThat(document).containsEntry("embedded\\._id", "conflux").containsEntry("my_items", + new org.bson.Document("$exists", true)); } - public class EmbeddedClass { - public String id; + @Test // DATAMONGO-1988 + void mapsStringObjectIdRepresentationToObjectIdWhenReferencingIdProperty() { + + Query query = query(where("sample.foo").is(new ObjectId().toHexString())); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithEmbedded.class)); - @Field("fancy_custom_name") public String customizedField; + assertThat(document.get("sample._id")).isInstanceOf(ObjectId.class); } - class IdWrapper { - Object id; + @Test // DATAMONGO-1988 + void matchesExactFieldNameToIdProperty() { + + Query query = query(where("sample.iid").is(new ObjectId().toHexString())); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithEmbedded.class)); + + assertThat(document.get("sample.iid")).isInstanceOf(String.class); } - class ClassWithEmbedded { - @Id String id; - Sample sample; + @Test // DATAMONGO-1988 + void leavesNonObjectIdStringIdRepresentationUntouchedWhenReferencingIdProperty() { + + Query query = query(where("sample.foo").is("id-1")); + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithEmbedded.class)); + + assertThat(document.get("sample._id")).isInstanceOf(String.class); } - class ClassWithDefaultId { + @Test // DATAMONGO-2168 + void getMappedObjectShouldNotMapTypeHint() { - String id; - ClassWithDefaultId nested; + converter.setTypeMapper(new DefaultMongoTypeMapper("className")); + + org.bson.Document update = new org.bson.Document("className", "foo"); + org.bson.Document mappedObject = mapper.getMappedObject(update, context.getPersistentEntity(UserEntity.class)); + + assertThat(mappedObject).containsEntry("className", "foo"); } - class Sample { + @Test // DATAMONGO-2168 + void getMappedObjectShouldIgnorePathsLeadingToJavaLangClassProperties/* like Class#getName() */() { - @Id private String foo; + org.bson.Document update = new org.bson.Document("className", "foo"); + org.bson.Document mappedObject = mapper.getMappedObject(update, context.getPersistentEntity(UserEntity.class)); + + assertThat(mappedObject).containsEntry("className", "foo"); } - class BigIntegerId { + @Test // DATAMONGO-2193 + void shouldNotConvertHexStringToObjectIdForRenamedNestedIdField() { - @Id private BigInteger id; + String idHex = new ObjectId().toHexString(); + Query query = new Query(where("nested.id").is(idHex)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); + + assertThat(document).isEqualTo(new org.bson.Document("nested.id", idHex)); } - enum Enum { - INSTANCE; + @Test // DATAMONGO-2221 + void shouldNotConvertHexStringToObjectIdForRenamedDeeplyNestedIdField() { + + String idHex = new ObjectId().toHexString(); + Query query = new Query(where("nested.deeplyNested.id").is(idHex)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); + + assertThat(document).isEqualTo(new org.bson.Document("nested.deeplyNested.id", idHex)); } - class UserEntity { - String id; - List publishers = new ArrayList(); + @Test // DATAMONGO-2221 + void shouldNotConvertHexStringToObjectIdForUnresolvablePath() { + + String idHex = new ObjectId().toHexString(); + Query query = new Query(where("nested.unresolvablePath.id").is(idHex)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(RootForClassWithExplicitlyRenamedIdField.class)); + + assertThat(document).isEqualTo(new org.bson.Document("nested.unresolvablePath.id", idHex)); } - class CustomizedField { + @Test // DATAMONGO-1849 + void shouldConvertPropertyWithExplicitTargetType() { - @Field("foo") CustomizedField field; + String script = "if (a > b) a else b"; + Query query = new Query(where("script").is(script)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document).isEqualTo(new org.bson.Document("script", new Code(script))); } - class WithDBRef { + @Test // DATAMONGO-1849 + void shouldConvertCollectionPropertyWithExplicitTargetType() { - String someString; - @DBRef Reference reference; + String script = "if (a > b) a else b"; + Query query = new Query(where("scripts").is(script)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document).isEqualTo(new org.bson.Document("scripts", new Code(script))); } - class WithDBRefList { + @Test // GH-4649 + void shouldRetainRegexPattern() { - String someString; - @DBRef List referenceList; + Query query = new Query(where("text").regex("foo")); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document.get("text")).isInstanceOf(Pattern.class); + + query = new Query(where("text").regex(new BsonRegularExpression("foo"))); + document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + assertThat(document.get("text")).isInstanceOf(BsonRegularExpression.class); } - class Reference { + @Test // GH-4674 + void shouldRetainRegexPatternForIdProperty() { - Long id; + org.bson.Document javaRegex = mapper.getMappedObject(query(where("id").regex("^1234$")).getQueryObject(), + context.getPersistentEntity(WithStringId.class)); + + assertThat(javaRegex.get("_id")).isInstanceOf(Pattern.class); + + org.bson.Document bsonRegex = mapper.getMappedObject( + query(where("id").regex(new BsonRegularExpression("^1234$"))).getQueryObject(), + context.getPersistentEntity(WithStringId.class)); + + assertThat(bsonRegex.get("_id")).isInstanceOf(BsonRegularExpression.class); } - class WithDBRefWrapper { + @Test // DATAMONGO-2339 + void findByIdUsesMappedIdFieldNameWithUnderscoreCorrectly() { - WithDBRef withDbRef; + org.bson.Document target = mapper.getMappedObject(new org.bson.Document("with_underscore", "id-1"), + context.getPersistentEntity(WithIdPropertyContainingUnderscore.class)); + + assertThat(target).isEqualTo(new org.bson.Document("_id", "id-1")); } - class WithMapDBRef { + @Test // DATAMONGO-2394 + void leavesDistanceUntouchedWhenUsingGeoJson() { - @DBRef Map mapWithDBRef; + Query query = query(where("geoJsonPoint").near(new GeoJsonPoint(27.987901, 86.9165379)).maxDistance(1000)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(ClassWithGeoTypes.class)); + assertThat(document).containsEntry("geoJsonPoint.$near.$geometry.type", "Point"); + assertThat(document).containsEntry("geoJsonPoint.$near.$maxDistance", 1000.0D); } - class WithTextScoreProperty { + @Test // DATAMONGO-2440 + void convertsInWithNonIdFieldAndObjectIdTypeHintCorrectly() { - @Id String id; - @TextScore @Field("score") Float textScore; + String id = new ObjectId().toHexString(); + NonIdFieldWithObjectIdTargetType source = new NonIdFieldWithObjectIdTargetType(); + + source.stringAsOid = id; + + org.bson.Document target = mapper.getMappedObject(query(where("stringAsOid").in(id)).getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + assertThat(target).isEqualTo(org.bson.Document.parse("{\"stringAsOid\": {\"$in\": [{\"$oid\": \"" + id + "\"}]}}")); } - static class RootForClassWithExplicitlyRenamedIdField { + @Test // DATAMONGO-2440 + void convertsInWithIdFieldAndObjectIdTypeHintCorrectly() { - @Id String id; - ClassWithExplicitlyRenamedField nested; + String id = new ObjectId().toHexString(); + NonIdFieldWithObjectIdTargetType source = new NonIdFieldWithObjectIdTargetType(); + + source.id = id; + + org.bson.Document target = mapper.getMappedObject(query(where("id").in(id)).getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + assertThat(target).isEqualTo(org.bson.Document.parse("{\"_id\": {\"$in\": [{\"$oid\": \"" + id + "\"}]}}")); } - static class ClassWithExplicitlyRenamedField { + @Test // DATAMONGO-2488 + void mapsNestedArrayPathCorrectlyForNonMatchingPath() { - @Field("id") String id; + org.bson.Document target = mapper.getMappedObject( + query(where("array.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(Foo.class)); + + assertThat(target).isEqualTo(new org.bson.Document("array.$[some_item].nested.$[other_item]", "value")); } - static class ClassWithGeoTypes { + @Test // DATAMONGO-2488 + void mapsNestedArrayPathCorrectlyForObjectTargetArray() { - double[] justAnArray; - Point legacyPoint; - GeoJsonPoint geoJsonPoint; - @Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint; + org.bson.Document target = mapper.getMappedObject( + query(where("arrayObj.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(WithNestedArray.class)); + + assertThat(target).isEqualTo(new org.bson.Document("arrayObj.$[some_item].nested.$[other_item]", "value")); } - static class SimpeEntityWithoutId { + @Test // DATAMONGO-2488 + void mapsNestedArrayPathCorrectlyForStringTargetArray() { - String stringProperty; - Integer integerProperty; + org.bson.Document target = mapper.getMappedObject( + query(where("arrayString.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(WithNestedArray.class)); + + assertThat(target).isEqualTo(new org.bson.Document("arrayString.$[some_item].nested.$[other_item]", "value")); } - static class EntityWithComplexValueTypeMap { - Map map; + @Test // DATAMONGO-2488 + void mapsCustomFieldNamesForNestedArrayPathCorrectly() { + + org.bson.Document target = mapper.getMappedObject( + query(where("arrayCustomName.$[some_item].nested.$[other_item]").is("value")).getQueryObject(), + context.getPersistentEntity(WithNestedArray.class)); + + assertThat(target).isEqualTo(new org.bson.Document("arrayCustomName.$[some_item].nes-ted.$[other_item]", "value")); } - static class EntityWithComplexValueTypeList { - List list; + @Test // DATAMONGO-2502 + void shouldAllowDeeplyNestedPlaceholders() { + + org.bson.Document target = mapper.getMappedObject( + query(where("level0.$[some_item].arrayObj.$[other_item].nested").is("value")).getQueryObject(), + context.getPersistentEntity(WithDeepArrayNesting.class)); + + assertThat(target).isEqualTo(new org.bson.Document("level0.$[some_item].arrayObj.$[other_item].nested", "value")); + } + + @Test // DATAMONGO-2502 + void shouldAllowDeeplyNestedPlaceholdersWithCustomName() { + + org.bson.Document target = mapper.getMappedObject( + query(where("level0.$[some_item].arrayCustomName.$[other_item].nested").is("value")).getQueryObject(), + context.getPersistentEntity(WithDeepArrayNesting.class)); + + assertThat(target) + .isEqualTo(new org.bson.Document("level0.$[some_item].arrayCustomName.$[other_item].nes-ted", "value")); + } + + @Test // DATAMONGO-2517 + void shouldParseNestedKeywordWithArgumentMatchingTheSourceEntitiesConstructorCorrectly() { + + TextQuery source = new TextQuery("test"); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithSingleStringArgConstructor.class)); + assertThat(target).isEqualTo(org.bson.Document.parse("{\"$text\" : { \"$search\" : \"test\" }}")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnUnwrappedObjectCorrectly() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "test"; + + Query source = query(Criteria.where("unwrappedValue").is(unwrappableType)); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnUnwrappedCorrectly() { + + Query source = query(Criteria.where("unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnPrefixedUnwrappedCorrectly() { + + Query source = query(Criteria.where("unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("prefix-stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedUnwrappedObjectCorrectly() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "test"; + Query source = query(Criteria.where("withUnwrapped.unwrappedValue").is(unwrappableType)); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("withUnwrapped", new org.bson.Document("stringValue", "test"))); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedPrefixedUnwrappedObjectCorrectly() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "test"; + Query source = query(Criteria.where("withPrefixedUnwrapped.unwrappedValue").is(unwrappableType)); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target) + .isEqualTo(new org.bson.Document("withPrefixedUnwrapped", new org.bson.Document("prefix-stringValue", "test"))); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedUnwrappedCorrectly() { + + Query source = query(Criteria.where("withUnwrapped.unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("withUnwrapped.stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void rendersQueryOnNestedPrefixedUnwrappedCorrectly() { + + Query source = query(Criteria.where("withPrefixedUnwrapped.unwrappedValue.stringValue").is("test")); + + org.bson.Document target = mapper.getMappedObject(source.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(target).isEqualTo(new org.bson.Document("withPrefixedUnwrapped.prefix-stringValue", "test")); + } + + @Test // DATAMONGO-1902 + void sortByUnwrappedIsEmpty() { + + Query query = new Query().with(Sort.by("unwrappedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo( + new org.bson.Document("stringValue", 1).append("listValue", 1).append("with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByUnwrappedValue() { + + // atFieldAnnotatedValue + Query query = new Query().with(Sort.by("unwrappedValue.stringValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringValue", 1)); + } + + @Test // DATAMONGO-1902 + void sortByUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByPrefixedUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("prefix-with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByNestedUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("withUnwrapped.unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("withUnwrapped.with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void sortByNestedPrefixedUnwrappedValueWithFieldAnnotation() { + + Query query = new Query().with(Sort.by("withPrefixedUnwrapped.unwrappedValue.atFieldAnnotatedValue")); + + org.bson.Document document = mapper.getMappedSort(query.getSortObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("withPrefixedUnwrapped.prefix-with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void projectOnUnwrappedUsesFields() { + + Query query = new Query(); + query.fields().include("unwrappedValue"); + + org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo( + new org.bson.Document("stringValue", 1).append("listValue", 1).append("with-at-field-annotation", 1)); + } + + @Test // DATAMONGO-1902 + void projectOnUnwrappedValue() { + + Query query = new Query(); + query.fields().include("unwrappedValue.stringValue"); + + org.bson.Document document = mapper.getMappedFields(query.getFieldsObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringValue", 1)); + } + + @Test // GH-3601 + void resolvesFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithPropertyUsingUnderscoreInName.class)); + + assertThat(document) + .isEqualTo(new org.bson.Document("fieldname_with_underscores", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesMappedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("renamed_fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithPropertyUsingUnderscoreInName.class)); + + assertThat(document).isEqualTo(new org.bson.Document("renamed", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesSimpleNestedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("simple.fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document) + .isEqualTo(new org.bson.Document("simple.fieldname_with_underscores", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesSimpleNestedMappedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("simple.renamed_fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document).isEqualTo(new org.bson.Document("simple.renamed", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesFieldNameWithUnderscoreOnNestedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("double_underscore.fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document).isEqualTo( + new org.bson.Document("double_underscore.fieldname_with_underscores", new org.bson.Document("$exists", true))); + } + + @Test // GH-3601 + void resolvesFieldNameWithUnderscoreOnNestedMappedFieldnameWithUnderscoresCorrectly() { + + Query query = query(where("double_underscore.renamed_fieldname_with_underscores").exists(true)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WrapperAroundWithPropertyUsingUnderscoreInName.class)); + + assertThat(document) + .isEqualTo(new org.bson.Document("double_underscore.renamed", new org.bson.Document("$exists", true))); + } + + @Test // GH-3633 + void mapsNullValueForFieldWithCustomTargetType() { + + Query query = query(where("stringAsOid").isNull()); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", null)); + } + + @Test // GH-3633 + void mapsNullBsonTypeForFieldWithCustomTargetType() { + + Query query = query(where("stringAsOid").isNullValue()); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", new org.bson.Document("$type", 10))); + } + + @Test // GH-3635 + void $floorKeywordDoesNotMatch$or$norPattern() { + + Query query = new BasicQuery(" { $expr: { $gt: [ \"$spent\" , { $floor : \"$budget\" } ] } }"); + assertThatNoException() + .isThrownBy(() -> mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class))); + } + + @Test // GH-3659 + void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() { + + Query query = query(where("address.street").is("1007 Mountain Drive")); + + MongoCustomConversions mongoCustomConversions = new MongoCustomConversions( + Collections.singletonList(new MyAddressToDocumentConverter())); + + this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(mongoCustomConversions.getSimpleTypeHolder()); + this.context.afterPropertiesSet(); + + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(mongoCustomConversions); + this.converter.afterPropertiesSet(); + + this.mapper = new QueryMapper(converter); + + assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class))) + .isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive")); + } + + @Test // GH-3790 + void shouldAcceptExprAsCriteriaDefinition() { + + EvaluationOperators.Expr expr = EvaluationOperators + .valueOf(ConditionalOperators.ifNull("customizedField").then(true)).expr(); + + Query query = query( + expr.toCriteriaDefinition(new TypeBasedAggregationOperationContext(EmbeddedClass.class, context, mapper))); + + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getRequiredPersistentEntity(EmbeddedClass.class)); + + assertThat(mappedQuery).isEqualTo("{ $expr : { $ifNull : [\"$fancy_custom_name\", true] } }"); + } + + @Test // GH-3668 + void mapStringIdFieldProjection() { + + org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), + context.getPersistentEntity(WithStringId.class)); + assertThat(mappedFields).containsEntry("_id", 1); + } + + @Test // GH-3783 + void retainsId$InWithStringArray() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(WithExplicitStringId.class)); + assertThat(mappedQuery.get("_id")).isEqualTo(org.bson.Document.parse("{ $in: [\"5b8bedceb1e0bfc07b008828\"]}")); + } + + @Test // GH-3783 + void mapsId$InInToObjectIds() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(ClassWithDefaultId.class)); + assertThat(mappedQuery.get("_id")) + .isEqualTo(org.bson.Document.parse("{ $in: [ {$oid: \"5b8bedceb1e0bfc07b008828\" } ]}")); + } + + @Test // GH-3596 + void considersValueConverterWhenPresent() { + + org.bson.Document mappedObject = mapper.getMappedObject(new org.bson.Document("text", "value"), + context.getPersistentEntity(WithPropertyValueConverter.class)); + assertThat(mappedObject).isEqualTo(new org.bson.Document("text", "eulav")); + } + + @Test // GH-2750 + void mapsAggregationExpression() { + + Query query = query(whereExpr(ComparisonOperators.valueOf("field").greaterThan("budget"))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$foo', '$budget'] } }"); + } + + @Test // GH-2750 + void unwrapsAggregationExpressionExprObjectWrappedInExpressionCriteria() { + + Query query = query(whereExpr(Expr.valueOf(ComparisonOperators.valueOf("field").greaterThan("budget")))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$foo', '$budget'] } }"); + } + + @Test // GH-2750 + void mapsMongoExpressionToFieldsIfItsAnAggregationExpression() { + + Query query = query(expr(ComparisonOperators.valueOf("field").greaterThan("budget"))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$foo', '$budget'] } }"); + } + + @Test // GH-2750 + void usageOfMongoExpressionOnCriteriaDoesNotUnwrapAnExprAggregationExpression() { + + Query query = query(expr(Expr.valueOf(ComparisonOperators.valueOf("field").greaterThan("budget")))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $expr : { $gt : [ '$foo', '$budget'] } } }"); + } + + @Test // GH-4687 + void usageOfUntypedAggregationShouldRenderOperationsAsIs() { + + Query query = query(expr(Expr.valueOf(ComparisonOperators.valueOf("field").greaterThan("budget")))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(Object.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $expr : { $gt : [ '$field', '$budget'] } } }"); + } + + @Test // GH-2750 + void usesMongoExpressionDocumentAsIsIfItIsNotAnAggregationExpression() { + + Query query = query(expr(() -> org.bson.Document.parse("{ $gt : [ '$field', '$budget'] }"))); + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(CustomizedField.class)); + assertThat(mappedObject).isEqualTo("{ $expr : { $gt : [ '$field', '$budget'] } }"); + } + + @Test // GH-4080 + void convertsListOfValuesForPropertyThatHasValueConverterButIsNotCollectionLikeOneByOne() { + + org.bson.Document mappedObject = mapper.getMappedObject(query(where("text").in("spring", "data")).getQueryObject(), + context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedObject).isEqualTo("{ 'text' : { $in : ['gnirps', 'atad'] } }"); + } + + @Test // GH-4464 + void usesKeyNameWithDotsIfFieldNameTypeIsKey() { + + org.bson.Document mappedObject = mapper.getMappedObject(query(where("value").is("A")).getQueryObject(), + context.getPersistentEntity(WithPropertyHavingDotsInFieldName.class)); + assertThat(mappedObject).isEqualTo("{ 'field.name.with.dots' : 'A' }"); + } + + @Test // GH-4577 + void mappingShouldRetainMapKeyOrder() { + + TreeMap sourceMap = new TreeMap<>(Map.of("test1", "123", "test2", "456")); + + org.bson.Document target = mapper.getMappedObject(query(where("simpleMap").is(sourceMap)).getQueryObject(), + context.getPersistentEntity(WithSimpleMap.class)); + assertThat(target.get("simpleMap", Map.class)).containsExactlyEntriesOf(sourceMap); + } + + @Test // GH-4510 + void convertsNestedOperatorValueForPropertyThatHasValueConverter() { + + org.bson.Document mappedObject = mapper.getMappedObject( + query(where("text").gt("spring").lt("data")).getQueryObject(), + context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedObject).isEqualTo("{ 'text' : { $gt : 'gnirps', $lt : 'atad' } }"); + } + + @Test // GH-4510 + void convertsNestedOperatorValueForPropertyContainingListThatHasValueConverter() { + + org.bson.Document mappedObject = mapper.getMappedObject( + query(where("text").gt("spring").in("data")).getQueryObject(), + context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedObject).isEqualTo("{ 'text' : { $gt : 'gnirps', $in : [ 'atad' ] } }"); + } + + @Test // GH-4736 + void allOperatorShouldConvertIdCollection() { + + ObjectId oid = ObjectId.get(); + Criteria criteria = new Criteria().andOperator(where("name").isNull().and("id").all(List.of(oid.toString()))); + + org.bson.Document mappedObject = mapper.getMappedObject(criteria.getCriteriaObject(), + context.getPersistentEntity(Customer.class)); + + assertThat(mappedObject).containsEntry("$and.[0]._id.$all", List.of(oid)); + } + + class WithSimpleMap { + Map simpleMap; + } + + class WithDeepArrayNesting { + + List level0; + } + + class WithNestedArray { + + List arrayObj; + List arrayString; + List arrayCustomName; + } + + class NestedArrayOfObj { + List nested; + } + + class NestedArrayOfObjCustomFieldName { + + @Field("nes-ted") List nested; + } + + class NestedArrayOfString { + List nested; + } + + class ArrayObj { + String foo; + } + + @Document + class Foo { + @Id private ObjectId id; + EmbeddedClass embedded; + + @Field("my_items") List listOfItems; + } + + class EmbeddedClass { + String id; + + @Field("fancy_custom_name") String customizedField; + } + + class IdWrapper { + Object id; + } + + class ClassWithEmbedded { + @Id String id; + Sample sample; + } + + class ClassWithDefaultId { + + String id; + ClassWithDefaultId nested; + } + + class Sample { + + @Id private String foo; + } + + class WithStringId { + + @MongoId String id; + String name; + } + + class WithExplicitStringId { + + @MongoId(FieldType.STRING) String id; + String name; + } + + class BigIntegerId { + + @Id private BigInteger id; + } + + enum Enum { + INSTANCE; + } + + class UserEntity { + String id; + List publishers = new ArrayList<>(); + } + + class CustomizedField { + + @Field("foo") CustomizedField field; + } + + class WithDBRef { + + String someString; + @DBRef Reference reference; + } + + class WithDBRefList { + + String someString; + @DBRef List referenceList; + } + + class Reference { + + Long id; + } + + class WithDBRefWrapper { + + WithDBRef withDbRef; + } + + class WithMapDBRef { + + @DBRef Map mapWithDBRef; + } + + static class WithDocumentReference { + + private ObjectId id; + + private String name; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private Customer customer; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private List customers; + + @DocumentReference private Sample sample; + + @DocumentReference private List samples; + } + + class WithTextScoreProperty { + + @Id String id; + @TextScore + @Field("score") Float textScore; + } + + static class RootForClassWithExplicitlyRenamedIdField { + + @Id String id; + ClassWithExplicitlyRenamedField nested; + } + + static class ClassWithExplicitlyRenamedField { + + @Field("id") String id; + DeeplyNestedClassWithExplicitlyRenamedField deeplyNested; + } + + static class DeeplyNestedClassWithExplicitlyRenamedField { + @Field("id") String id; + } + + static class ClassWithGeoTypes { + + double[] justAnArray; + Point legacyPoint; + GeoJsonPoint geoJsonPoint; + @Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint; + } + + static class SimpleEntityWithoutId { + + String stringProperty; + Integer integerProperty; + } + + static class EntityWithComplexValueTypeMap { + Map map; + } + + static class EntityWithIntKeyedMapOfMap { + Map outerMap; + } + + static class EntityWithComplexValueTypeList { + List list; + } + + static class WithExplicitTargetTypes { + + @Field(targetType = FieldType.SCRIPT) // + String script; + + @Field(targetType = FieldType.STRING) // + String text; + + @Field(targetType = FieldType.SCRIPT) // + List scripts; + } + + static class WithIdPropertyContainingUnderscore { + @Id String with_underscore; + } + + static class NonIdFieldWithObjectIdTargetType { + + String id; + @Field(targetType = FieldType.OBJECT_ID) String stringAsOid; + } + + @Document + static class WithSingleStringArgConstructor { + + String value; + + public WithSingleStringArgConstructor() {} + + public WithSingleStringArgConstructor(String value) { + this.value = value; + } + } + + static class WrapperAroundWithUnwrapped { + + String someValue; + WithUnwrapped withUnwrapped; + WithPrefixedUnwrapped withPrefixedUnwrapped; + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + } + + static class WithPrefixedUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") UnwrappableType unwrappedValue; + } + + static class UnwrappableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Transient // + String transientValue; + } + + static class WrapperAroundWithPropertyUsingUnderscoreInName { + + WithPropertyUsingUnderscoreInName simple; + WithPropertyUsingUnderscoreInName double_underscore; + } + + static class WithPropertyUsingUnderscoreInName { + + String fieldname_with_underscores; + + @Field("renamed") String renamed_fieldname_with_underscores; + } + + @Document + static class Customer { + + @Id private ObjectId id; + private String name; + private MyAddress address; + } + + static class MyAddress { + private String street; + } + + static class WithPropertyValueConverter { + + @ValueConverter(ReversingValueConverter.class) String text; + } + + @WritingConverter + public static class MyAddressToDocumentConverter implements Converter { + + @Override + public org.bson.Document convert(MyAddress address) { + org.bson.Document doc = new org.bson.Document(); + doc.put("street", address.street); + return doc; + } + } + + static class WithPropertyHavingDotsInFieldName { + + @Field(name = "field.name.with.dots", nameType = Type.KEY) String value; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegateUnitTests.java new file mode 100644 index 0000000000..384cffaad4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegateUnitTests.java @@ -0,0 +1,97 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Collections; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.expression.EvaluationContext; + +/** + * Unit tests for {@link ReferenceLookupDelegate}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class ReferenceLookupDelegateUnitTests { + + @Mock MappingContext, MongoPersistentProperty> mappingContext; + @Mock SpELContext spELContext; + @Mock EvaluationContext evaluationContext; + @Mock MongoEntityReader entityReader; + + private ReferenceLookupDelegate lookupDelegate; + + @BeforeEach + void beforeEach() { + lookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext); + } + + @Test // GH-3842 + void shouldComputePlainStringTargetCollection() { + + DocumentReference documentReference = mock(DocumentReference.class); + MongoPersistentEntity entity = mock(MongoPersistentEntity.class); + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + + doReturn(entity).when(mappingContext).getRequiredPersistentEntity((Class) any()); + + when(property.isDocumentReference()).thenReturn(true); + when(property.getDocumentReference()).thenReturn(documentReference); + when(documentReference.collection()).thenReturn("collection1"); + + lookupDelegate.readReference(property, Collections.singletonList("one"), (referenceQuery, referenceCollection) -> { + + assertThat(referenceCollection.getCollection()).isEqualTo("collection1"); + return Collections.emptyList(); + }, entityReader); + } + + @Test // GH-4612 + void shouldResolveEmptyListOnEmptyTargetCollection() { + + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + ReferenceLookupDelegate.LookupFunction lookupFunction = mock(ReferenceLookupDelegate.LookupFunction.class); + + when(property.isCollectionLike()).thenReturn(true); + lookupDelegate.readReference(property, Collections.emptyList(), lookupFunction, entityReader); + verify(lookupFunction, never()).apply(any(), any()); + } + + @Test // GH-4612 + void shouldResolveEmptyMapOnEmptyTargetCollection() { + + MongoPersistentProperty property = mock(MongoPersistentProperty.class); + ReferenceLookupDelegate.LookupFunction lookupFunction = mock(ReferenceLookupDelegate.LookupFunction.class); + + when(property.isMap()).thenReturn(true); + lookupDelegate.readReference(property, Collections.emptyMap(), lookupFunction, entityReader); + verify(lookupFunction, never()).apply(any(), any()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReversingValueConverter.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReversingValueConverter.java new file mode 100644 index 0000000000..eb3b1aba1a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/ReversingValueConverter.java @@ -0,0 +1,45 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +class ReversingValueConverter implements MongoValueConverter { + + @Nullable + @Override + public String read(@Nullable String value, MongoConversionContext context) { + return reverse(value); + } + + @Nullable + @Override + public String write(@Nullable String value, MongoConversionContext context) { + return reverse(value); + } + + private String reverse(String source) { + + if (source == null) { + return null; + } + + return new StringBuilder(source).reverse().toString(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java index f3202ec8f4..873a49232c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/TermToStringConverterUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,9 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.convert.MongoConverters.TermToStringConverter; import org.springframework.data.mongodb.core.query.Term; import org.springframework.data.mongodb.core.query.Term.Type; @@ -29,11 +27,6 @@ */ public class TermToStringConverterUnitTests { - @Test // DATAMONGO-973 - public void shouldNotConvertNull() { - assertThat(TermToStringConverter.INSTANCE.convert(null), nullValue()); - } - @Test // DATAMONGO-973 public void shouldUseFormattedRepresentationForConversion() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index 65f2820f56..d8e36c8f67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,9 +19,6 @@ import static org.springframework.data.mongodb.core.DocumentTestUtils.*; import static org.springframework.data.mongodb.test.util.Assertions.*; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; - import java.time.LocalDate; import java.util.Arrays; import java.util.Collections; @@ -30,24 +27,29 @@ import java.util.concurrent.atomic.AtomicInteger; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Transient; import org.springframework.data.convert.CustomConversions; +import org.springframework.data.convert.ValueConverter; import org.springframework.data.convert.WritingConverter; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mapping.MappingException; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; @@ -63,20 +65,21 @@ * @author Thomas Darimont * @author Mark Paluch * @author Pavel Vodrazka + * @author David Julia + * @author Divya Srivastava */ -@RunWith(MockitoJUnitRunner.class) -public class UpdateMapperUnitTests { +@ExtendWith(MockitoExtension.class) +class UpdateMapperUnitTests { - @Mock MongoDbFactory factory; - MappingMongoConverter converter; - MongoMappingContext context; - UpdateMapper mapper; + private MappingMongoConverter converter; + private MongoMappingContext context; + private UpdateMapper mapper; private Converter writingConverterSpy; - @Before + @BeforeEach @SuppressWarnings("unchecked") - public void setUp() { + void setUp() { this.writingConverterSpy = Mockito.spy(new NestedEntityWriteConverter()); CustomConversions conversions = new MongoCustomConversions(Collections.singletonList(writingConverterSpy)); @@ -85,7 +88,7 @@ public void setUp() { this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); this.context.initialize(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); @@ -93,7 +96,7 @@ public void setUp() { } @Test // DATAMONGO-721 - public void updateMapperRetainsTypeInformationForCollectionField() { + void updateMapperRetainsTypeInformationForCollectionField() { Update update = new Update().push("list", new ConcreteChildClass("2", "BAR")); @@ -107,7 +110,7 @@ public void updateMapperRetainsTypeInformationForCollectionField() { } @Test // DATAMONGO-807 - public void updateMapperShouldRetainTypeInformationForNestedEntities() { + void updateMapperShouldRetainTypeInformationForNestedEntities() { Update update = Update.update("model", new ModelImpl(1)); UpdateMapper mapper = new UpdateMapper(converter); @@ -121,7 +124,7 @@ public void updateMapperShouldRetainTypeInformationForNestedEntities() { } @Test // DATAMONGO-807 - public void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() { + void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() { Update update = Update.update("model.value", 1); UpdateMapper mapper = new UpdateMapper(converter); @@ -134,7 +137,7 @@ public void updateMapperShouldNotPersistTypeInformationForKnownSimpleTypes() { } @Test // DATAMONGO-807 - public void updateMapperShouldNotPersistTypeInformationForNullValues() { + void updateMapperShouldNotPersistTypeInformationForNullValues() { Update update = Update.update("model", null); UpdateMapper mapper = new UpdateMapper(converter); @@ -147,7 +150,7 @@ public void updateMapperShouldNotPersistTypeInformationForNullValues() { } @Test // DATAMONGO-407 - public void updateMapperShouldRetainTypeInformationForNestedCollectionElements() { + void updateMapperShouldRetainTypeInformationForNestedCollectionElements() { Update update = Update.update("list.$", new ConcreteChildClass("42", "bubu")); @@ -161,7 +164,7 @@ public void updateMapperShouldRetainTypeInformationForNestedCollectionElements() } @Test // DATAMONGO-407 - public void updateMapperShouldSupportNestedCollectionElementUpdates() { + void updateMapperShouldSupportNestedCollectionElementUpdates() { Update update = Update.update("list.$.value", "foo").set("list.$.otherValue", "bar"); @@ -175,7 +178,7 @@ public void updateMapperShouldSupportNestedCollectionElementUpdates() { } @Test // DATAMONGO-407 - public void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() { + void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElementUpdates() { Update update = Update.update("list.$.value", "foo").set("list.$.someObject", new ConcreteChildClass("42", "bubu")); @@ -193,7 +196,7 @@ public void updateMapperShouldWriteTypeInformationForComplexNestedCollectionElem @SuppressWarnings({ "unchecked", "rawtypes" }) @Test // DATAMONGO-812 - public void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingSimpleTypes() { + void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingSimpleTypes() { Update update = new Update().push("values").each("spring", "data", "mongodb"); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Model.class)); @@ -209,7 +212,7 @@ public void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingSimpleT } @Test // DATAMONGO-812 - public void updateMapperShouldConvertPushWhithoutAddingClassInformationWhenUsedWithEvery() { + void updateMapperShouldConvertPushWhithoutAddingClassInformationWhenUsedWithEvery() { Update update = new Update().push("values").each("spring", "data", "mongodb"); @@ -223,7 +226,7 @@ public void updateMapperShouldConvertPushWhithoutAddingClassInformationWhenUsedW @SuppressWarnings({ "unchecked", "rawtypes" }) @Test // DATAMONGO-812 - public void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingCustomTypes() { + void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingCustomTypes() { Update update = new Update().push("models").each(new ListModel("spring", "data", "mongodb")); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -238,7 +241,7 @@ public void updateMapperShouldConvertPushCorrectlyWhenCalledWithEachUsingCustomT } @Test // DATAMONGO-812 - public void updateMapperShouldRetainClassInformationForPushCorrectlyWhenCalledWithEachUsingCustomTypes() { + void updateMapperShouldRetainClassInformationForPushCorrectlyWhenCalledWithEachUsingCustomTypes() { Update update = new Update().push("models").each(new ListModel("spring", "data", "mongodb")); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -252,7 +255,7 @@ public void updateMapperShouldRetainClassInformationForPushCorrectlyWhenCalledWi } @Test // DATAMONGO-812 - public void testUpdateShouldAllowMultiplePushEachForDifferentFields() { + void testUpdateShouldAllowMultiplePushEachForDifferentFields() { Update update = new Update().push("category").each("spring", "data").push("type").each("mongodb"); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); @@ -263,7 +266,7 @@ public void testUpdateShouldAllowMultiplePushEachForDifferentFields() { } @Test // DATAMONGO-943 - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositiveIndexParameter() { + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositiveIndexParameter() { Update update = new Update().push("key").atPosition(2).each(Arrays.asList("Arya", "Arry", "Weasel")); @@ -277,8 +280,22 @@ public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositiveIndexParamete assertThat(getAsDocument(push, "key")).containsKey("$each"); } + @Test // DATAMONGO-943, DATAMONGO-2055 + void updatePushEachAtNegativePositionWorksCorrectly() { + + Update update = new Update().push("key").atPosition(-2).each(Arrays.asList("Arya", "Arry", "Weasel")); + + Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(Object.class)); + + Document push = getAsDocument(mappedObject, "$push"); + Document key = getAsDocument(push, "key"); + + assertThat(key.containsKey("$position")).isTrue(); + assertThat(key.get("$position")).isEqualTo(-2); + } + @Test // DATAMONGO-943 - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionFirst() { + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionFirst() { Update update = new Update().push("key").atPosition(Position.FIRST).each(Arrays.asList("Arya", "Arry", "Weasel")); @@ -293,7 +310,7 @@ public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionFirst() { } @Test // DATAMONGO-943 - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionLast() { + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionLast() { Update update = new Update().push("key").atPosition(Position.LAST).each(Arrays.asList("Arya", "Arry", "Weasel")); @@ -307,7 +324,7 @@ public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionLast() { } @Test // DATAMONGO-943 - public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionNull() { + void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionNull() { Update update = new Update().push("key").atPosition(null).each(Arrays.asList("Arya", "Arry", "Weasel")); @@ -321,7 +338,7 @@ public void updatePushEachAtPositionWorksCorrectlyWhenGivenPositionNull() { } @Test // DATAMONGO-832 - public void updatePushEachWithSliceShouldRenderCorrectly() { + void updatePushEachWithSliceShouldRenderCorrectly() { Update update = new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")); @@ -335,7 +352,7 @@ public void updatePushEachWithSliceShouldRenderCorrectly() { } @Test // DATAMONGO-832 - public void updatePushEachWithSliceShouldRenderWhenUsingMultiplePushCorrectly() { + void updatePushEachWithSliceShouldRenderWhenUsingMultiplePushCorrectly() { Update update = new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")).push("key-2") .slice(-2).each("The Beggar King", "Viserys III Targaryen"); @@ -355,7 +372,7 @@ public void updatePushEachWithSliceShouldRenderWhenUsingMultiplePushCorrectly() } @Test // DATAMONGO-1141 - public void updatePushEachWithValueSortShouldRenderCorrectly() { + void updatePushEachWithValueSortShouldRenderCorrectly() { Update update = new Update().push("scores").sort(Direction.DESC).each(42, 23, 68); @@ -371,7 +388,7 @@ public void updatePushEachWithValueSortShouldRenderCorrectly() { } @Test // DATAMONGO-1141 - public void updatePushEachWithDocumentSortShouldRenderCorrectly() { + void updatePushEachWithDocumentSortShouldRenderCorrectly() { Update update = new Update().push("list") .sort(Sort.by(new Order(Direction.ASC, "value"), new Order(Direction.ASC, "field"))) @@ -389,7 +406,7 @@ public void updatePushEachWithDocumentSortShouldRenderCorrectly() { } @Test // DATAMONGO-1141 - public void updatePushEachWithSortShouldRenderCorrectlyWhenUsingMultiplePush() { + void updatePushEachWithSortShouldRenderCorrectlyWhenUsingMultiplePush() { Update update = new Update().push("authors").sort(Direction.ASC).each("Harry").push("chapters") .sort(Sort.by(Direction.ASC, "order")).each(Collections.emptyList()); @@ -411,20 +428,20 @@ public void updatePushEachWithSortShouldRenderCorrectlyWhenUsingMultiplePush() { } @Test // DATAMONGO-410 - public void testUpdateMapperShouldConsiderCustomWriteTarget() { + void testUpdateMapperShouldConsiderCustomWriteTarget() { List someValues = Arrays.asList(new NestedEntity("spring"), new NestedEntity("data"), new NestedEntity("mongodb")); NestedEntity[] array = new NestedEntity[someValues.size()]; - Update update = new Update().pushAll("collectionOfNestedEntities", someValues.toArray(array)); + Update update = new Update().push("collectionOfNestedEntities").each(someValues.toArray(array)); mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DomainEntity.class)); verify(writingConverterSpy, times(3)).convert(Mockito.any(NestedEntity.class)); } @Test // DATAMONGO-404 - public void createsDbRefForEntityIdOnPulls() { + void createsDbRefForEntityIdOnPulls() { Update update = new Update().pull("dbRefAnnotatedList.id", "2"); @@ -436,7 +453,7 @@ public void createsDbRefForEntityIdOnPulls() { } @Test // DATAMONGO-404 - public void createsDbRefForEntityOnPulls() { + void createsDbRefForEntityOnPulls() { Entity entity = new Entity(); entity.id = "5"; @@ -449,15 +466,16 @@ public void createsDbRefForEntityOnPulls() { assertThat(pullClause.get("dbRefAnnotatedList")).isEqualTo(new DBRef("entity", entity.id)); } - @Test(expected = MappingException.class) // DATAMONGO-404 - public void rejectsInvalidFieldReferenceForDbRef() { + @Test // DATAMONGO-404 + void rejectsInvalidFieldReferenceForDbRef() { Update update = new Update().pull("dbRefAnnotatedList.name", "NAME"); - mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(DocumentWithDBRefCollection.class)); + assertThatThrownBy(() -> mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(DocumentWithDBRefCollection.class))).isInstanceOf(MappingException.class); } @Test // DATAMONGO-404 - public void rendersNestedDbRefCorrectly() { + void rendersNestedDbRefCorrectly() { Update update = new Update().pull("nested.dbRefAnnotatedList.id", "2"); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -468,7 +486,7 @@ public void rendersNestedDbRefCorrectly() { } @Test // DATAMONGO-468 - public void rendersUpdateOfDbRefPropertyWithDomainObjectCorrectly() { + void rendersUpdateOfDbRefPropertyWithDomainObjectCorrectly() { Entity entity = new Entity(); entity.id = "5"; @@ -482,7 +500,7 @@ public void rendersUpdateOfDbRefPropertyWithDomainObjectCorrectly() { } @Test // DATAMONGO-862 - public void rendersUpdateAndPreservesKeyForPathsNotPointingToProperty() { + void rendersUpdateAndPreservesKeyForPathsNotPointingToProperty() { Update update = new Update().set("listOfInterface.$.value", "expected-value"); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -493,7 +511,7 @@ public void rendersUpdateAndPreservesKeyForPathsNotPointingToProperty() { } @Test // DATAMONGO-863 - public void doesNotConvertRawDocuments() { + void doesNotConvertRawDocuments() { Update update = new Update(); update.pull("options", @@ -512,7 +530,7 @@ public void doesNotConvertRawDocuments() { @SuppressWarnings({ "unchecked", "rawtypes" }) @Test // DATAMONG0-471 - public void testUpdateShouldApply$addToSetCorrectlyWhenUsedWith$each() { + void testUpdateShouldApply$addToSetCorrectlyWhenUsedWith$each() { Update update = new Update().addToSet("values").each("spring", "data", "mongodb"); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -526,7 +544,7 @@ public void doesNotConvertRawDocuments() { } @Test // DATAMONG0-471 - public void testUpdateShouldRetainClassTypeInformationWhenUsing$addToSetWith$eachForCustomTypes() { + void testUpdateShouldRetainClassTypeInformationWhenUsing$addToSetWith$eachForCustomTypes() { Update update = new Update().addToSet("models").each(new ModelImpl(2014), new ModelImpl(1), new ModelImpl(28)); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -543,7 +561,7 @@ public void doesNotConvertRawDocuments() { } @Test // DATAMONGO-897 - public void updateOnDbrefPropertyOfInterfaceTypeWithoutExplicitGetterForIdShouldBeMappedCorrectly() { + void updateOnDbrefPropertyOfInterfaceTypeWithoutExplicitGetterForIdShouldBeMappedCorrectly() { Update update = new Update().set("referencedDocument", new InterfaceDocumentDefinitionImpl("1", "Foo")); Document mappedObject = mapper.getMappedObject(update.getUpdateObject(), @@ -557,7 +575,7 @@ public void updateOnDbrefPropertyOfInterfaceTypeWithoutExplicitGetterForIdShould } @Test // DATAMONGO-847 - public void updateMapperConvertsNestedQueryCorrectly() { + void updateMapperConvertsNestedQueryCorrectly() { Update update = new Update().pull("list", Query.query(Criteria.where("value").in("foo", "bar"))); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -572,7 +590,7 @@ public void updateMapperConvertsNestedQueryCorrectly() { } @Test // DATAMONGO-847 - public void updateMapperConvertsPullWithNestedQuerfyOnDBRefCorrectly() { + void updateMapperConvertsPullWithNestedQuerfyOnDBRefCorrectly() { Update update = new Update().pull("dbRefAnnotatedList", Query.query(Criteria.where("id").is("1"))); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -585,7 +603,7 @@ public void updateMapperConvertsPullWithNestedQuerfyOnDBRefCorrectly() { } @Test // DATAMONGO-1077 - public void shouldNotRemovePositionalParameter() { + void shouldNotRemovePositionalParameter() { Update update = new Update(); update.unset("dbRefAnnotatedList.$"); @@ -599,7 +617,7 @@ public void shouldNotRemovePositionalParameter() { } @Test // DATAMONGO-1210 - public void mappingEachOperatorShouldNotAddTypeInfoForNonInterfaceNonAbstractTypes() { + void mappingEachOperatorShouldNotAddTypeInfoForNonInterfaceNonAbstractTypes() { Update update = new Update().addToSet("nestedDocs").each(new NestedDocument("nested-1"), new NestedDocument("nested-2")); @@ -612,7 +630,7 @@ public void mappingEachOperatorShouldNotAddTypeInfoForNonInterfaceNonAbstractTyp } @Test // DATAMONGO-1210 - public void mappingEachOperatorShouldAddTypeHintForInterfaceTypes() { + void mappingEachOperatorShouldAddTypeHintForInterfaceTypes() { Update update = new Update().addToSet("models").each(new ModelImpl(1), new ModelImpl(2)); @@ -624,7 +642,7 @@ public void mappingEachOperatorShouldAddTypeHintForInterfaceTypes() { } @Test // DATAMONGO-1210 - public void mappingEachOperatorShouldAddTypeHintForAbstractTypes() { + void mappingEachOperatorShouldAddTypeHintForAbstractTypes() { Update update = new Update().addToSet("list").each(new ConcreteChildClass("foo", "one"), new ConcreteChildClass("bar", "two")); @@ -637,7 +655,7 @@ public void mappingEachOperatorShouldAddTypeHintForAbstractTypes() { } @Test // DATAMONGO-1210 - public void mappingShouldOnlyRemoveTypeHintFromTopLevelTypeInCaseOfNestedDocument() { + void mappingShouldOnlyRemoveTypeHintFromTopLevelTypeInCaseOfNestedDocument() { WrapperAroundInterfaceType wait = new WrapperAroundInterfaceType(); wait.interfaceType = new ModelImpl(1); @@ -654,7 +672,7 @@ public void mappingShouldOnlyRemoveTypeHintFromTopLevelTypeInCaseOfNestedDocumen } @Test // DATAMONGO-1210 - public void mappingShouldRetainTypeInformationOfNestedListWhenUpdatingConcreteyParentType() { + void mappingShouldRetainTypeInformationOfNestedListWhenUpdatingConcreteyParentType() { ListModelWrapper lmw = new ListModelWrapper(); lmw.models = Collections.singletonList(new ModelImpl(1)); @@ -669,7 +687,7 @@ public void mappingShouldRetainTypeInformationOfNestedListWhenUpdatingConcreteyP } @Test // DATAMONGO-1809 - public void pathShouldIdentifyPositionalParameterWithMoreThanOneDigit() { + void pathShouldIdentifyPositionalParameterWithMoreThanOneDigit() { Document at2digitPosition = mapper.getMappedObject(new Update() .addToSet("concreteInnerList.10.concreteTypeList", new SomeInterfaceImpl("szeth")).getUpdateObject(), @@ -686,7 +704,7 @@ public void pathShouldIdentifyPositionalParameterWithMoreThanOneDigit() { } @Test // DATAMONGO-1236 - public void mappingShouldRetainTypeInformationForObjectValues() { + void mappingShouldRetainTypeInformationForObjectValues() { Update update = new Update().set("value", new NestedDocument("kaladin")); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -697,7 +715,7 @@ public void mappingShouldRetainTypeInformationForObjectValues() { } @Test // DATAMONGO-1236 - public void mappingShouldNotRetainTypeInformationForConcreteValues() { + void mappingShouldNotRetainTypeInformationForConcreteValues() { Update update = new Update().set("concreteValue", new NestedDocument("shallan")); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -708,7 +726,7 @@ public void mappingShouldNotRetainTypeInformationForConcreteValues() { } @Test // DATAMONGO-1236 - public void mappingShouldRetainTypeInformationForObjectValuesWithAlias() { + void mappingShouldRetainTypeInformationForObjectValuesWithAlias() { Update update = new Update().set("value", new NestedDocument("adolin")); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -719,7 +737,7 @@ public void mappingShouldRetainTypeInformationForObjectValuesWithAlias() { } @Test // DATAMONGO-1236 - public void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchItsDeclaration() { + void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchItsDeclaration() { Map map = Collections.singletonMap("szeth", new NestedDocument("son-son-vallano")); @@ -732,7 +750,7 @@ public void mappingShouldRetrainTypeInformationWhenValueTypeOfMapDoesNotMatchIts } @Test // DATAMONGO-1236 - public void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDeclaration() { + void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDeclaration() { Map map = Collections.singletonMap("jasnah", new NestedDocument("kholin")); @@ -744,9 +762,21 @@ public void mappingShouldNotContainTypeInformationWhenValueTypeOfMapMatchesDecla assertThat(mappedUpdate).doesNotContainKey("$set.concreteMap.jasnah._class"); } + @Test // GH-4567 + void updateShouldAllowNullValuesInMap() { + + Map map = Collections.singletonMap("jasnah", new NestedDocument("kholin")); + + Update update = new Update().set("concreteMap", Collections.singletonMap("jasnah", null)); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("concreteMap", Collections.singletonMap("jasnah", null)))); + } + @Test // DATAMONGO-1250 @SuppressWarnings("unchecked") - public void mapsUpdateWithBothReadingAndWritingConverterRegistered() { + void mapsUpdateWithBothReadingAndWritingConverterRegistered() { CustomConversions conversions = new MongoCustomConversions(Arrays.asList( ClassWithEnum.AllocationToStringConverter.INSTANCE, ClassWithEnum.StringToAllocationConverter.INSTANCE)); @@ -769,7 +799,7 @@ public void mapsUpdateWithBothReadingAndWritingConverterRegistered() { } @Test // DATAMONGO-1251 - public void mapsNullValueCorrectlyForSimpleTypes() { + void mapsNullValueCorrectlyForSimpleTypes() { Update update = new Update().set("value", null); @@ -781,7 +811,7 @@ public void mapsNullValueCorrectlyForSimpleTypes() { } @Test // DATAMONGO-1251 - public void mapsNullValueCorrectlyForJava8Date() { + void mapsNullValueCorrectlyForJava8Date() { Update update = new Update().set("date", null); @@ -793,7 +823,7 @@ public void mapsNullValueCorrectlyForJava8Date() { } @Test // DATAMONGO-1251 - public void mapsNullValueCorrectlyForCollectionTypes() { + void mapsNullValueCorrectlyForCollectionTypes() { Update update = new Update().set("values", null); @@ -805,7 +835,7 @@ public void mapsNullValueCorrectlyForCollectionTypes() { } @Test // DATAMONGO-1251 - public void mapsNullValueCorrectlyForPropertyOfNestedDocument() { + void mapsNullValueCorrectlyForPropertyOfNestedDocument() { Update update = new Update().set("concreteValue.name", null); @@ -818,7 +848,7 @@ public void mapsNullValueCorrectlyForPropertyOfNestedDocument() { } @Test // DATAMONGO-1288 - public void mapsAtomicIntegerToIntegerCorrectly() { + void mapsAtomicIntegerToIntegerCorrectly() { Update update = new Update().set("intValue", new AtomicInteger(10)); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -829,7 +859,7 @@ public void mapsAtomicIntegerToIntegerCorrectly() { } @Test // DATAMONGO-1288 - public void mapsAtomicIntegerToPrimitiveIntegerCorrectly() { + void mapsAtomicIntegerToPrimitiveIntegerCorrectly() { Update update = new Update().set("primIntValue", new AtomicInteger(10)); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -840,7 +870,7 @@ public void mapsAtomicIntegerToPrimitiveIntegerCorrectly() { } @Test // DATAMONGO-1404 - public void mapsMinCorrectly() { + void mapsMinCorrectly() { Update update = new Update().min("minfield", 10); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -850,7 +880,7 @@ public void mapsMinCorrectly() { } @Test // DATAMONGO-1404 - public void mapsMaxCorrectly() { + void mapsMaxCorrectly() { Update update = new Update().max("maxfield", 999); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), @@ -859,9 +889,9 @@ public void mapsMaxCorrectly() { assertThat(mappedUpdate).containsEntry("$max", new Document("maxfield", 999)); } - @Test // DATAMONGO-1423 + @Test // DATAMONGO-1423, DATAMONGO-2155 @SuppressWarnings("unchecked") - public void mappingShouldConsiderCustomConvertersForEnumMapKeys() { + void mappingShouldConsiderCustomConvertersForEnumMapKeys() { CustomConversions conversions = new MongoCustomConversions(Arrays.asList( ClassWithEnum.AllocationToStringConverter.INSTANCE, ClassWithEnum.StringToAllocationConverter.INSTANCE)); @@ -883,12 +913,12 @@ public void mappingShouldConsiderCustomConvertersForEnumMapKeys() { Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); assertThat($set.containsKey("enumAsMapKey")).isTrue(); - Document enumAsMapKey = $set.get("enumAsMapKey", Document.class); - assertThat(enumAsMapKey.get("AVAILABLE")).isEqualTo(100); + Map enumAsMapKey = $set.get("enumAsMapKey", Map.class); + assertThat(enumAsMapKey.get("V")).isEqualTo(100); } @Test // DATAMONGO-1176 - public void mappingShouldPrepareUpdateObjectForMixedOperatorsAndFields() { + void mappingShouldPrepareUpdateObjectForMixedOperatorsAndFields() { Document document = new Document("key", "value").append("$set", new Document("a", "b").append("x", "y")); @@ -899,7 +929,7 @@ public void mappingShouldPrepareUpdateObjectForMixedOperatorsAndFields() { } @Test // DATAMONGO-1176 - public void mappingShouldReturnReplaceObject() { + void mappingShouldReturnReplaceObject() { Document document = new Document("key", "value").append("a", "b").append("x", "y"); @@ -912,7 +942,7 @@ public void mappingShouldReturnReplaceObject() { } @Test // DATAMONGO-1176 - public void mappingShouldReturnUpdateObject() { + void mappingShouldReturnUpdateObject() { Document document = new Document("$push", new Document("x", "y")).append("$set", new Document("a", "b")); @@ -923,22 +953,24 @@ public void mappingShouldReturnUpdateObject() { assertThat(mappedObject).hasSize(2); } - @Test // DATAMONGO-1486 - public void mappingShouldConvertMapKeysToString() { + @Test // DATAMONGO-1486, DATAMONGO-2155 + void mappingShouldConvertMapKeysToString() { Update update = new Update().set("map", Collections.singletonMap(25, "#StarTrek50")); Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(EntityWithObjectMap.class)); - Document mapToSet = getAsDocument(getAsDocument(mappedUpdate, "$set"), "map"); + Document $set = DocumentTestUtils.getAsDocument(mappedUpdate, "$set"); + assertThat($set.containsKey("map")).isTrue(); + Map mapToSet = $set.get("map", Map.class); for (Object key : mapToSet.keySet()) { assertThat(key).isInstanceOf(String.class); } } @Test // DATAMONGO-1772 - public void mappingShouldAddTypeKeyInListOfInterfaceTypeContainedInConcreteObjectCorrectly() { + void mappingShouldAddTypeKeyInListOfInterfaceTypeContainedInConcreteObjectCorrectly() { ConcreteInner inner = new ConcreteInner(); inner.interfaceTypeList = Collections.singletonList(new SomeInterfaceImpl()); @@ -952,7 +984,7 @@ public void mappingShouldAddTypeKeyInListOfInterfaceTypeContainedInConcreteObjec } @Test // DATAMONGO-1772 - public void mappingShouldAddTypeKeyInListOfAbstractTypeContainedInConcreteObjectCorrectly() { + void mappingShouldAddTypeKeyInListOfAbstractTypeContainedInConcreteObjectCorrectly() { ConcreteInner inner = new ConcreteInner(); inner.abstractTypeList = Collections.singletonList(new SomeInterfaceImpl()); @@ -965,6 +997,371 @@ public void mappingShouldAddTypeKeyInListOfAbstractTypeContainedInConcreteObject .doesNotContainKey("$set.concreteInnerList.[0]._class"); } + @Test // DATAMONGO-2155 + void shouldPreserveFieldNamesOfMapProperties() { + + Update update = Update + .fromDocument(new Document("concreteMap", new Document("Name", new Document("name", "fooo")))); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("concreteMap", new Document("Name", new Document("name", "fooo")))); + } + + @Test // DATAMONGO-2155 + void shouldPreserveExplicitFieldNamesInsideMapProperties() { + + Update update = Update + .fromDocument(new Document("map", new Document("Value", new Document("renamed-value", "fooo")))); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithMapOfAliased.class)); + + assertThat(mappedUpdate) + .isEqualTo(new Document("map", new Document("Value", new Document("renamed-value", "fooo")))); + } + + @Test // DATAMONGO-2155 + void shouldMapAliasedFieldNamesInMapsCorrectly() { + + Update update = Update + .fromDocument(new Document("map", Collections.singletonMap("Value", new Document("value", "fooo")))); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithMapOfAliased.class)); + + assertThat(mappedUpdate) + .isEqualTo(new Document("map", new Document("Value", new Document("renamed-value", "fooo")))); + } + + @Test // DATAMONGO-2174 + void mappingUpdateDocumentWithExplicitFieldNameShouldBePossible() { + + Document mappedUpdate = mapper.getMappedObject(new Document("AValue", "a value"), + context.getPersistentEntity(TypeWithFieldNameThatCannotBeDecapitalized.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("AValue", "a value")); + } + + @Test // DATAMONGO-2054 + void mappingShouldAllowPositionAllParameter() { + + Update update = new Update().inc("grades.$[]", 10); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithListOfIntegers.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$inc", new Document("grades.$[]", 10))); + } + + @Test // DATAMONGO-2054 + void mappingShouldAllowPositionAllParameterWhenPropertyHasExplicitFieldName() { + + Update update = new Update().inc("list.$[]", 10); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$inc", new Document("aliased.$[]", 10))); + } + + @Test // DATAMONGO-2215 + void mappingShouldAllowPositionParameterWithIdentifier() { + + Update update = new Update().set("grades.$[element]", 10) // + .filterArray(Criteria.where("element").gte(100)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithListOfIntegers.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("grades.$[element]", 10))); + } + + @Test // DATAMONGO-2215 + void mappingShouldAllowPositionParameterWithIdentifierWhenFieldHasExplicitFieldName() { + + Update update = new Update().set("list.$[element]", 10) // + .filterArray(Criteria.where("element").gte(100)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("aliased.$[element]", 10))); + } + + @Test // DATAMONGO-2215 + void mappingShouldAllowNestedPositionParameterWithIdentifierWhenFieldHasExplicitFieldName() { + + Update update = new Update().set("list.$[element].value", 10) // + .filterArray(Criteria.where("element").gte(100)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(ParentClass.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("aliased.$[element].value", 10))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderValueOfUnwrappedType() { + + Update update = new Update().set("unwrappedValue.stringValue", "updated"); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("stringValue", "updated"))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderUnwrappedType() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "updated"; + unwrappableType.listValue = Arrays.asList("val-1", "val-2"); + Update update = new Update().set("unwrappedValue", unwrappableType); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", + new Document("stringValue", "updated").append("listValue", Arrays.asList("val-1", "val-2")))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderValueOfPrefixedUnwrappedType() { + + Update update = new Update().set("unwrappedValue.stringValue", "updated"); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("prefix-stringValue", "updated"))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderPrefixedUnwrappedType() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "updated"; + unwrappableType.listValue = Arrays.asList("val-1", "val-2"); + + Update update = new Update().set("unwrappedValue", unwrappableType); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithPrefixedUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", + new Document("prefix-stringValue", "updated").append("prefix-listValue", Arrays.asList("val-1", "val-2")))); + } + + @Test // DATAMONGO-1902 + void mappingShouldConsiderNestedPrefixedUnwrappedType() { + + UnwrappableType unwrappableType = new UnwrappableType(); + unwrappableType.stringValue = "updated"; + unwrappableType.listValue = Arrays.asList("val-1", "val-2"); + + Update update = new Update().set("withPrefixedUnwrapped.unwrappedValue", unwrappableType); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WrapperAroundWithUnwrapped.class)); + + assertThat(mappedUpdate).isEqualTo(new Document("$set", new Document("withPrefixedUnwrapped", + new Document("prefix-stringValue", "updated").append("prefix-listValue", Arrays.asList("val-1", "val-2"))))); + } + + @Test // GH-3552 + void numericKeyForMap() { + + Update update = new Update().set("map.601218778970110001827396", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396\": \"testing\"}}"); + } + + @Test // GH-3552 + void numericKeyInMapOfNestedPath() { + + Update update = new Update().set("map.601218778970110001827396.value", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}"); + } + + @Test // GH-3688 + void multipleNumericKeysInNestedPath() { + + Update update = new Update().set("intKeyedMap.12345.map.0", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.12345.map.0\": \"testing\"}}"); + } + + @Test // GH-3566 + void mapsObjectClassPropertyFieldInMapValueTypeAsKey() { + + Update update = new Update().set("map.class", "value"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithObjectMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.class\": \"value\"}}"); + } + + @Test // GH-3775 + void mapNestedStringFieldCorrectly() { + + Update update = new Update().set("levelOne.a.b.d", "e"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.a.b.d", "e"))); + } + + @ParameterizedTest // GH-3775, GH-4426 + @ValueSource(strings = {"levelOne.0.1.3", "levelOne.0.1.32", "levelOne2.0.1.32", "levelOne2.0.1.320"}) + void mapNestedIntegerFieldCorrectly(String path) { + + Update update = new Update().set(path, "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4"))); + } + + @ParameterizedTest // GH-3775, GH-4426 + @ValueSource(strings = {"levelOne.0.1.c", "levelOne.0.1.c.32", "levelOne2.0.1.32.c", "levelOne2.0.1.c.320"}) + void mapNestedMixedStringIntegerFieldCorrectly(String path) { + + Update update = new Update().set(path, "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document(path, "4"))); + } + + @Test // GH-3775 + void mapNestedMixedStringIntegerWithStartNumberFieldCorrectly() { + + Update update = new Update().set("levelOne.0a.1b.3c", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("levelOne.0a.1b.3c", "4"))); + } + + @Test // GH-3688 + void multipleKeysStartingWithANumberInNestedPath() { + + Update update = new Update().set("intKeyedMap.1a.map.0b", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.1a.map.0b\": \"testing\"}}"); + } + + @Test // GH-3853 + void updateWithDocuRefOnId() { + + Sample sample = new Sample(); + sample.foo = "s1"; + + Update update = new Update().set("sample", sample); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("sample", "s1"))); + } + + @Test // GH-3853 + void updateListWithDocuRefOnId() { + + Sample sample = new Sample(); + sample.foo = "s1"; + + Update update = new Update().set("samples", Arrays.asList(sample)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate) + .isEqualTo(new org.bson.Document("$set", new org.bson.Document("samples", Arrays.asList("s1")))); + } + + @Test // GH-3853 + void updateWithDocuRefOnProperty() { + + Customer customer = new Customer(); + customer.id = new ObjectId(); + customer.name = "c-name"; + + Update update = new Update().set("customer", customer); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set", new org.bson.Document("customer", "c-name"))); + } + + @Test // GH-3853 + void updateListWithDocuRefOnProperty() { + + Customer customer = new Customer(); + customer.id = new ObjectId(); + customer.name = "c-name"; + + Update update = new Update().set("customers", Arrays.asList(customer)); + + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(WithDocumentReference.class)); + + assertThat(mappedUpdate) + .isEqualTo(new org.bson.Document("$set", new org.bson.Document("customers", Arrays.asList("c-name")))); + } + + @Test // GH-3921 + void mapNumericKeyInPathHavingComplexMapValyeTypes() { + + Update update = new Update().set("testInnerData.testMap.1.intValue", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(TestData.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.testMap.1.intValue': '4' }}"); + } + + @Test // GH-3921 + void mapNumericKeyInPathNotMatchingExistingProperties() { + + Update update = new Update().set("testInnerData.imaginaryMap.1.nonExistingProperty", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(TestData.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.imaginaryMap.1.nonExistingProperty': '4' }}"); + } + + @Test // GH-3921 + void mapNumericKeyInPathPartiallyMatchingExistingProperties() { + + Update update = new Update().set("testInnerData.testMap.1.nonExistingProperty.2.someValue", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(TestData.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set: { 'testInnerData.testMap.1.nonExistingProperty.2.someValue': '4' }}"); + } + + @Test // GH-3596 + void updateConsidersValueConverterWhenPresent() { + + Update update = new Update().set("text", "value"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), context.getPersistentEntity(WithPropertyValueConverter.class)); + + assertThat(mappedUpdate).isEqualTo("{ $set : { 'text' : 'eulav' } }"); + } + static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes { ListModelWrapper concreteTypeWithListAttributeOfInterfaceType; } @@ -996,7 +1393,7 @@ static class InterfaceDocumentDefinitionImpl implements InterfaceDocumentDefinit @Id String id; String value; - public InterfaceDocumentDefinitionImpl(String id, String value) { + InterfaceDocumentDefinitionImpl(String id, String value) { this.id = id; this.value = value; @@ -1040,7 +1437,7 @@ interface Model {} static class ModelImpl implements Model { public int value; - public ModelImpl(int value) { + ModelImpl(int value) { this.value = value; } @@ -1065,7 +1462,7 @@ static class ListModel { List values; - public ListModel(String... values) { + ListModel(String... values) { this.values = Arrays.asList(values); } } @@ -1094,7 +1491,7 @@ static abstract class AbstractChildClass { String otherValue; AbstractChildClass someObject; - public AbstractChildClass(String id, String value) { + AbstractChildClass(String id, String value) { this.id = id; this.value = value; this.otherValue = "other_" + value; @@ -1103,7 +1500,7 @@ public AbstractChildClass(String id, String value) { static class ConcreteChildClass extends AbstractChildClass { - public ConcreteChildClass(String id, String value) { + ConcreteChildClass(String id, String value) { super(id, value); } } @@ -1119,7 +1516,7 @@ public List getCollectionOfNestedEntities() { static class NestedEntity { String name; - public NestedEntity(String name) { + NestedEntity(String name) { super(); this.name = name; } @@ -1165,7 +1562,7 @@ static class NestedDocument { String name; - public NestedDocument(String name) { + NestedDocument(String name) { super(); this.name = name; } @@ -1181,18 +1578,30 @@ static class EntityWithList { List list; } + static class EntityWithListOfIntegers { + List grades; + } + static class EntityWithAliasedObject { @Field("renamed-value") Object value; Object field; } + static class EntityWithMapOfAliased { + Map map; + } + static class EntityWithObjectMap { Map map; Map concreteMap; } + static class EntityWithIntKeyedMap { + Map intKeyedMap; + } + static class ClassWithEnum { Allocation allocation; @@ -1270,11 +1679,155 @@ static abstract class SomeAbstractType { } - @AllArgsConstructor - @NoArgsConstructor static class SomeInterfaceImpl extends SomeAbstractType implements SomeInterfaceType { String value; + + public SomeInterfaceImpl() {} + + public SomeInterfaceImpl(String value) { + this.value = value; + } } + static class TypeWithFieldNameThatCannotBeDecapitalized { + + @Id protected String id; + + @Field("AValue") private Long aValue = 0L; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public Long getaValue() { + return aValue; + } + + public void setaValue(Long aValue) { + this.aValue = aValue; + } + } + + static class WrapperAroundWithUnwrapped { + + String someValue; + WithUnwrapped withUnwrapped; + WithPrefixedUnwrapped withPrefixedUnwrapped; + } + + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappedValue; + } + + static class WithPrefixedUnwrapped { + + String id; + + @Unwrapped.Nullable("prefix-") UnwrappableType unwrappedValue; + } + + static class UnwrappableType { + + String stringValue; + List listValue; + + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + + @Transient // + String transientValue; + } + + static class EntityWithNestedMap { + Map>> levelOne; + Map>> levelOne2; + } + + static class Customer { + + @Id private ObjectId id; + private String name; + } + + static class Sample { + + @Id private String foo; + } + + static class WithDocumentReference { + + private ObjectId id; + + private String name; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private Customer customer; + + @DocumentReference(lookup = "{ 'name' : ?#{#target} }") private List customers; + + @DocumentReference private Sample sample; + + @DocumentReference private List samples; + } + + private static class TestData { + + @Id private String id; + private TestInnerData testInnerData; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public TestInnerData getTestInnerData() { + return testInnerData; + } + + public void setTestInnerData(TestInnerData testInnerData) { + this.testInnerData = testInnerData; + } + } + + private static class TestInnerData { + + private Map testMap; + + public Map getTestMap() { + return testMap; + } + + public void setTestMap(Map testMap) { + this.testMap = testMap; + } + } + + private static class TestValue { + + private int intValue; + + public int getIntValue() { + return intValue; + } + + public void setIntValue(int intValue) { + this.intValue = intValue; + } + } + + static class WithPropertyValueConverter { + + @ValueConverter(ReversingValueConverter.class) + String text; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/AbstractEncryptionTestBase.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/AbstractEncryptionTestBase.java new file mode 100644 index 0000000000..083221053d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/AbstractEncryptionTestBase.java @@ -0,0 +1,756 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.security.SecureRandom; +import java.time.LocalDate; +import java.time.Month; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.assertj.core.api.Assertions; +import org.bson.BsonBinary; +import org.bson.Document; +import org.bson.types.Binary; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.util.Lazy; + +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoNamespace; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.Indexes; +import com.mongodb.client.model.vault.DataKeyOptions; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.client.vault.ClientEncryptions; + +/** + * @author Christoph Strobl + * @author Julia Lee + */ +public abstract class AbstractEncryptionTestBase { + + @Autowired MongoTemplate template; + + @Test // GH-4284 + void encryptAndDecryptSimpleValue() { + + Person source = new Person(); + source.id = "id-1"; + source.ssn = "mySecretSSN"; + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4432 + void encryptAndDecryptJavaTime() { + + Person source = new Person(); + source.id = "id-1"; + source.today = LocalDate.of(1979, Month.SEPTEMBER, 18); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("today")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptComplexValue() { + + Person source = new Person(); + source.id = "id-1"; + source.address = new Address(); + source.address.city = "NYC"; + source.address.street = "4th Ave."; + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptValueWithinComplexOne() { + + Person source = new Person(); + source.id = "id-1"; + source.encryptedZip = new AddressWithEncryptedZip(); + source.encryptedZip.city = "Boston"; + source.encryptedZip.street = "central square"; + source.encryptedZip.zip = "1234567890"; + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> { + assertThat(it.get("encryptedZip")).isInstanceOf(Document.class); + assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class); + }) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptListOfSimpleValue() { + + Person source = new Person(); + source.id = "id-1"; + source.listOfString = Arrays.asList("spring", "data", "mongodb"); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("listOfString")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptListOfComplexValue() { + + Person source = new Person(); + source.id = "id-1"; + + Address address = new Address(); + address.city = "SFO"; + address.street = "---"; + + source.listOfComplex = Collections.singletonList(address); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("listOfComplex")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptMapOfSimpleValues() { + + Person source = new Person(); + source.id = "id-1"; + source.mapOfString = Map.of("k1", "v1", "k2", "v2"); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("mapOfString")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void encryptAndDecryptMapOfComplexValues() { + + Person source = new Person(); + source.id = "id-1"; + + Address address1 = new Address(); + address1.city = "SFO"; + address1.street = "---"; + + Address address2 = new Address(); + address2.city = "NYC"; + address2.street = "---"; + + source.mapOfComplex = Map.of("a1", address1, "a2", address2); + + template.save(source); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("mapOfComplex")).isInstanceOf(Binary.class)) // + .loadedIsEqualToSource(); + } + + @Test // GH-4284 + void canQueryDeterministicallyEncrypted() { + + Person source = new Person(); + source.id = "id-1"; + source.ssn = "mySecretSSN"; + + template.save(source); + + Person loaded = template.query(Person.class).matching(where("ssn").is(source.ssn)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4284 + void cannotQueryRandomlyEncrypted() { + + Person source = new Person(); + source.id = "id-1"; + source.wallet = "secret-wallet-id"; + + template.save(source); + + Person loaded = template.query(Person.class).matching(where("wallet").is(source.wallet)).firstValue(); + assertThat(loaded).isNull(); + } + + @Test // GH-4284 + void updateSimpleTypeEncryptedFieldWithNewValue() { + + Person source = new Person(); + source.id = "id-1"; + + template.save(source); + + template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("ssn", "secret-value")) + .first(); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("ssn")).isInstanceOf(Binary.class)) // + .loadedMatches(it -> assertThat(it.getSsn()).isEqualTo("secret-value")); + } + + @Test // GH-4284 + void updateComplexTypeEncryptedFieldWithNewValue() { + + Person source = new Person(); + source.id = "id-1"; + + template.save(source); + + Address address = new Address(); + address.city = "SFO"; + address.street = "---"; + + template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("address", address)).first(); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> assertThat(it.get("address")).isInstanceOf(Binary.class)) // + .loadedMatches(it -> assertThat(it.getAddress()).isEqualTo(address)); + } + + @Test // GH-4284 + void updateEncryptedFieldInNestedElementWithNewValue() { + + Person source = new Person(); + source.id = "id-1"; + source.encryptedZip = new AddressWithEncryptedZip(); + source.encryptedZip.city = "Boston"; + source.encryptedZip.street = "central square"; + + template.save(source); + + template.update(Person.class).matching(where("id").is(source.id)).apply(Update.update("encryptedZip.zip", "179")) + .first(); + + verifyThat(source) // + .identifiedBy(Person::getId) // + .wasSavedMatching(it -> { + assertThat(it.get("encryptedZip")).isInstanceOf(Document.class); + assertThat(it.get("encryptedZip", Document.class).get("city")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("street")).isInstanceOf(String.class); + assertThat(it.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class); + }) // + .loadedMatches(it -> assertThat(it.getEncryptedZip().getZip()).isEqualTo("179")); + } + + @Test + void aggregationWithMatch() { + + Person person = new Person(); + person.id = "id-1"; + person.name = "p1-name"; + person.ssn = "mySecretSSN"; + + template.save(person); + + AggregationResults aggregationResults = template.aggregateAndReturn(Person.class) + .by(newAggregation(Person.class, Aggregation.match(where("ssn").is(person.ssn)))).all(); + assertThat(aggregationResults.getMappedResults()).containsExactly(person); + } + + @Test + void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException { + + BsonBinary user1key = mongoClientEncryption.getClientEncryption().createDataKey("local", + new DataKeyOptions().keyAltNames(Collections.singletonList("user-1"))); + + BsonBinary user2key = mongoClientEncryption.getClientEncryption().createDataKey("local", + new DataKeyOptions().keyAltNames(Collections.singletonList("user-2"))); + + Person p1 = new Person(); + p1.id = "id-1"; + p1.name = "user-1"; + p1.ssn = "ssn"; + p1.viaAltKeyNameField = "value-1"; + + Person p2 = new Person(); + p2.id = "id-2"; + p2.name = "user-2"; + p2.viaAltKeyNameField = "value-1"; + + Person p3 = new Person(); + p3.id = "id-3"; + p3.name = "user-1"; + p3.viaAltKeyNameField = "value-1"; + + template.save(p1); + template.save(p2); + template.save(p3); + + template.execute(Person.class, collection -> { + collection.find(new Document()); + return null; + }); + + // remove the key and invalidate encrypted data + mongoClientEncryption.getClientEncryption().deleteKey(user2key); + + // clear the 60 second key cache within the mongo client + mongoClientEncryption.destroy(); + + assertThat(template.query(Person.class).matching(where("id").is(p1.id)).firstValue()).isEqualTo(p1); + + assertThatExceptionOfType(PermissionDeniedDataAccessException.class) + .isThrownBy(() -> template.query(Person.class).matching(where("id").is(p2.id)).firstValue()); + } + + SaveAndLoadAssert verifyThat(T source) { + return new SaveAndLoadAssert<>(source); + } + + class SaveAndLoadAssert { + + T source; + Function idProvider; + + SaveAndLoadAssert(T source) { + this.source = source; + } + + SaveAndLoadAssert identifiedBy(Function idProvider) { + this.idProvider = idProvider; + return this; + } + + SaveAndLoadAssert wasSavedAs(Document expected) { + return wasSavedMatching(it -> Assertions.assertThat(it).isEqualTo(expected)); + } + + SaveAndLoadAssert wasSavedMatching(Consumer saved) { + AbstractEncryptionTestBase.this.assertSaved(source, idProvider, saved); + return this; + } + + SaveAndLoadAssert loadedMatches(Consumer expected) { + AbstractEncryptionTestBase.this.assertLoaded(source, idProvider, expected); + return this; + } + + SaveAndLoadAssert loadedIsEqualToSource() { + return loadedIsEqualTo(source); + } + + SaveAndLoadAssert loadedIsEqualTo(T expected) { + return loadedMatches(it -> Assertions.assertThat(it).isEqualTo(expected)); + } + + } + + void assertSaved(T source, Function idProvider, Consumer dbValue) { + + Document savedDocument = template.execute(Person.class, collection -> { + + MongoNamespace namespace = collection.getNamespace(); + + try (MongoClient rawClient = MongoClients.create()) { + return rawClient.getDatabase(namespace.getDatabaseName()).getCollection(namespace.getCollectionName()) + .find(new Document("_id", idProvider.apply(source))).first(); + } + }); + dbValue.accept(savedDocument); + } + + void assertLoaded(T source, Function idProvider, Consumer loadedValue) { + + T loaded = template.query((Class) source.getClass()).matching(where("id").is(idProvider.apply(source))) + .firstValue(); + + loadedValue.accept(loaded); + } + + protected static class EncryptionConfig extends AbstractMongoClientConfiguration { + + @Autowired ApplicationContext applicationContext; + + @Override + protected String getDatabaseName() { + return "fle-test"; + } + + @Bean + public MongoClient mongoClient() { + return super.mongoClient(); + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + + converterConfigurationAdapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext)) + .useNativeDriverJavaTimeCodecs(); + } + + @Bean + MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption) { + + Lazy dataKey = Lazy.of(() -> mongoClientEncryption.getClientEncryption().createDataKey("local", + new DataKeyOptions().keyAltNames(Collections.singletonList("mySuperSecretKey")))); + + return new MongoEncryptionConverter(mongoClientEncryption, + EncryptionKeyResolver.annotated((ctx) -> EncryptionKey.keyId(dataKey.get()))); + } + + @Bean + CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) { + return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings)); + } + + @Bean + ClientEncryptionSettings encryptionSettings(MongoClient mongoClient) { + + MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault"); + MongoCollection keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName()) + .getCollection(keyVaultNamespace.getCollectionName()); + keyVaultCollection.drop(); + // Ensure that two data keys cannot share the same keyAltName. + keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"), + new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames"))); + + MongoCollection collection = mongoClient.getDatabase(getDatabaseName()).getCollection("test"); + collection.drop(); // Clear old data + + byte[] localMasterKey = new byte[96]; + new SecureRandom().nextBytes(localMasterKey); + Map> kmsProviders = Map.of("local", Map.of("key", localMasterKey)); + + // Create the ClientEncryption instance + return ClientEncryptionSettings.builder() // + .keyVaultMongoClientSettings( + MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) // + .keyVaultNamespace(keyVaultNamespace.getFullName()) // + .kmsProviders(kmsProviders) // + .build(); + } + } + + static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean { + + static final AtomicReference cache = new AtomicReference<>(); + + CachingMongoClientEncryption(Supplier source) { + super(() -> { + + if (cache.get() != null) { + return cache.get(); + } + + ClientEncryption clientEncryption = source.get(); + cache.set(clientEncryption); + + return clientEncryption; + }); + } + + @Override + public void destroy() { + + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption != null) { + clientEncryption.close(); + cache.set(null); + } + } + } + + @org.springframework.data.mongodb.core.mapping.Document("test") + static class Person { + + String id; + String name; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) // + String ssn; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "mySuperSecretKey") // + String wallet; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // full document must be random + Address address; + + AddressWithEncryptedZip encryptedZip; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random + List listOfString; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random + List
          listOfComplex; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/name") // + String viaAltKeyNameField; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfString; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfComplex; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + LocalDate today; + + public String getId() { + return this.id; + } + + public String getName() { + return this.name; + } + + public String getSsn() { + return this.ssn; + } + + public String getWallet() { + return this.wallet; + } + + public Address getAddress() { + return this.address; + } + + public AddressWithEncryptedZip getEncryptedZip() { + return this.encryptedZip; + } + + public List getListOfString() { + return this.listOfString; + } + + public List
          getListOfComplex() { + return this.listOfComplex; + } + + public String getViaAltKeyNameField() { + return this.viaAltKeyNameField; + } + + public Map getMapOfString() { + return this.mapOfString; + } + + public Map getMapOfComplex() { + return this.mapOfComplex; + } + + public LocalDate getToday() { + return today; + } + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setSsn(String ssn) { + this.ssn = ssn; + } + + public void setWallet(String wallet) { + this.wallet = wallet; + } + + public void setAddress(Address address) { + this.address = address; + } + + public void setEncryptedZip(AddressWithEncryptedZip encryptedZip) { + this.encryptedZip = encryptedZip; + } + + public void setListOfString(List listOfString) { + this.listOfString = listOfString; + } + + public void setListOfComplex(List
          listOfComplex) { + this.listOfComplex = listOfComplex; + } + + public void setViaAltKeyNameField(String viaAltKeyNameField) { + this.viaAltKeyNameField = viaAltKeyNameField; + } + + public void setMapOfString(Map mapOfString) { + this.mapOfString = mapOfString; + } + + public void setMapOfComplex(Map mapOfComplex) { + this.mapOfComplex = mapOfComplex; + } + + public void setToday(LocalDate today) { + this.today = today; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(name, person.name) && Objects.equals(ssn, person.ssn) + && Objects.equals(wallet, person.wallet) && Objects.equals(address, person.address) + && Objects.equals(encryptedZip, person.encryptedZip) && Objects.equals(listOfString, person.listOfString) + && Objects.equals(listOfComplex, person.listOfComplex) + && Objects.equals(viaAltKeyNameField, person.viaAltKeyNameField) + && Objects.equals(mapOfString, person.mapOfString) && Objects.equals(mapOfComplex, person.mapOfComplex) + && Objects.equals(today, person.today); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, ssn, wallet, address, encryptedZip, listOfString, listOfComplex, viaAltKeyNameField, + mapOfString, mapOfComplex, today); + } + + public String toString() { + return "EncryptionTests.Person(id=" + this.getId() + ", name=" + this.getName() + ", ssn=" + this.getSsn() + + ", wallet=" + this.getWallet() + ", address=" + this.getAddress() + ", encryptedZip=" + + this.getEncryptedZip() + ", listOfString=" + this.getListOfString() + ", listOfComplex=" + + this.getListOfComplex() + ", viaAltKeyNameField=" + this.getViaAltKeyNameField() + ", mapOfString=" + + this.getMapOfString() + ", mapOfComplex=" + this.getMapOfComplex() + ", today=" + this.getToday() + ")"; + } + } + + static class Address { + String city; + String street; + + public Address() {} + + public String getCity() { + return this.city; + } + + public String getStreet() { + return this.street; + } + + public void setCity(String city) { + this.city = city; + } + + public void setStreet(String street) { + this.street = street; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(city, address.city) && Objects.equals(street, address.street); + } + + @Override + public int hashCode() { + return Objects.hash(city, street); + } + + public String toString() { + return "EncryptionTests.Address(city=" + this.getCity() + ", street=" + this.getStreet() + ")"; + } + } + + static class AddressWithEncryptedZip extends Address { + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) String zip; + + @Override + public String toString() { + return "AddressWithEncryptedZip{" + "zip='" + zip + '\'' + ", city='" + getCity() + '\'' + ", street='" + + getStreet() + '\'' + '}'; + } + + public String getZip() { + return this.zip; + } + + public void setZip(String zip) { + this.zip = zip; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/BypassAutoEncryptionTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/BypassAutoEncryptionTest.java new file mode 100644 index 0000000000..3aab3a7485 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/BypassAutoEncryptionTest.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.encryption; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Encryption tests for client having {@link AutoEncryptionSettings#isBypassAutoEncryption()}. + * + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = BypassAutoEncryptionTest.Config.class) +public class BypassAutoEncryptionTest extends AbstractEncryptionTestBase { + + @Disabled + @Override + void altKeyDetection(@Autowired CachingMongoClientEncryption mongoClientEncryption) throws InterruptedException { + super.altKeyDetection(mongoClientEncryption); + } + + @Configuration + static class Config extends EncryptionConfig { + + @Override + protected void configureClientSettings(Builder builder) { + + MongoClient mongoClient = MongoClients.create(); + ClientEncryptionSettings clientEncryptionSettings = encryptionSettings(mongoClient); + mongoClient.close(); + + builder.autoEncryptionSettings(AutoEncryptionSettings.builder() // + .kmsProviders(clientEncryptionSettings.getKmsProviders()) // + .keyVaultNamespace(clientEncryptionSettings.getKeyVaultNamespace()) // + .bypassAutoEncryption(true).build()); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolverUnitTests.java new file mode 100644 index 0000000000..eeb4df2275 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyResolverUnitTests.java @@ -0,0 +1,248 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; + +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.function.Function; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; + +/** + * Unit tests for {@link EncryptionKeyResolver}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class EncryptionKeyResolverUnitTests { + + @Mock // + EncryptionKeyResolver fallbackKeyResolver; + + MongoTestMappingContext mappingContext = MongoTestMappingContext.newTestContext().init(); + + EncryptionKey defaultEncryptionKey = EncryptionKey + .keyId(new BsonBinary("super-secret".getBytes(StandardCharsets.UTF_8))); + + @BeforeEach + void beforeEach() { + when(fallbackKeyResolver.getKey(any())).thenReturn(defaultEncryptionKey); + } + + @Test // GH-4284 + void usesDefaultKeyIfNoAnnotationPresent() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getNotAnnotated); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isSameAs(defaultEncryptionKey); + } + + @Test // GH-4284 + void usesDefaultKeyIfAnnotatedValueIsEmpty() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getAlgorithm); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isSameAs(defaultEncryptionKey); + } + + @Test // GH-4284 + void usesDefaultAltKeyNameIfPresent() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getAlgorithmAndAltKeyName); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyAltName("sec-key-name")); + } + + @Test // GH-4284 + void readsAltKeyNameFromContextIfReferencingPropertyValue() { + + EncryptionContext ctx = prepareEncryptionContext(AnnotatedWithExplicitlyEncrypted.class, + AnnotatedWithExplicitlyEncrypted::getAlgorithmAndAltKeyNameFromPropertyValue); + when(ctx.lookupValue(eq("notAnnotated"))).thenReturn("born-to-be-wild"); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyAltName("born-to-be-wild")); + } + + @Test // GH-4284 + void readsKeyIdFromEncryptedAnnotationIfNoBetterCandidateAvailable() { + + EncryptionContext ctx = prepareEncryptionContext( + AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType.class, + AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType::getKeyIdFromDomainType); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyId( + new BsonBinary(BsonBinarySubType.UUID_STANDARD, Base64.getDecoder().decode("xKVup8B1Q+CkHaVRx+qa+g==")))); + } + + @Test // GH-4284 + void ignoresKeyIdFromEncryptedAnnotationWhenBetterCandidateAvailable() { + + EncryptionContext ctx = prepareEncryptionContext(KeyIdFromSpel.class, KeyIdFromSpel::getKeyIdFromDomainType); + + StandardEvaluationContext evaluationContext = new StandardEvaluationContext(); + evaluationContext.setVariable("myKeyId", "xKVup8B1Q+CkHaVRx+qa+g=="); + + when(ctx.getEvaluationContext(any())).thenReturn(evaluationContext); + + EncryptionKey key = EncryptionKeyResolver.annotated(fallbackKeyResolver).getKey(ctx); + + assertThat(key).isEqualTo(EncryptionKey.keyId( + new BsonBinary(BsonBinarySubType.UUID_STANDARD, Base64.getDecoder().decode("xKVup8B1Q+CkHaVRx+qa+g==")))); + } + + private EncryptionContext prepareEncryptionContext(Class type, Function property) { + + EncryptionContext encryptionContext = mock(EncryptionContext.class); + when(encryptionContext.getProperty()).thenReturn(mappingContext.getPersistentPropertyFor(type, property)); + return encryptionContext; + } + + class AnnotatedWithExplicitlyEncrypted { + + String notAnnotated; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + String algorithm; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "sec-key-name") // + String algorithmAndAltKeyName; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/notAnnotated") // + String algorithmAndAltKeyNameFromPropertyValue; + + public String getNotAnnotated() { + return this.notAnnotated; + } + + public String getAlgorithm() { + return this.algorithm; + } + + public String getAlgorithmAndAltKeyName() { + return this.algorithmAndAltKeyName; + } + + public String getAlgorithmAndAltKeyNameFromPropertyValue() { + return this.algorithmAndAltKeyNameFromPropertyValue; + } + + public void setNotAnnotated(String notAnnotated) { + this.notAnnotated = notAnnotated; + } + + public void setAlgorithm(String algorithm) { + this.algorithm = algorithm; + } + + public void setAlgorithmAndAltKeyName(String algorithmAndAltKeyName) { + this.algorithmAndAltKeyName = algorithmAndAltKeyName; + } + + public void setAlgorithmAndAltKeyNameFromPropertyValue(String algorithmAndAltKeyNameFromPropertyValue) { + this.algorithmAndAltKeyNameFromPropertyValue = algorithmAndAltKeyNameFromPropertyValue; + } + + public String toString() { + return "EncryptionKeyResolverUnitTests.AnnotatedWithExplicitlyEncrypted(notAnnotated=" + this.getNotAnnotated() + + ", algorithm=" + this.getAlgorithm() + ", algorithmAndAltKeyName=" + this.getAlgorithmAndAltKeyName() + + ", algorithmAndAltKeyNameFromPropertyValue=" + this.getAlgorithmAndAltKeyNameFromPropertyValue() + ")"; + } + } + + @Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==") + class AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType { + + @ExplicitEncrypted // + String keyIdFromDomainType; + + @ExplicitEncrypted(keyAltName = "sec-key-name") // + String altKeyNameFromPropertyIgnoringKeyIdFromDomainType; + + public AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType() {} + + public String getKeyIdFromDomainType() { + return this.keyIdFromDomainType; + } + + public String getAltKeyNameFromPropertyIgnoringKeyIdFromDomainType() { + return this.altKeyNameFromPropertyIgnoringKeyIdFromDomainType; + } + + public void setKeyIdFromDomainType(String keyIdFromDomainType) { + this.keyIdFromDomainType = keyIdFromDomainType; + } + + public void setAltKeyNameFromPropertyIgnoringKeyIdFromDomainType( + String altKeyNameFromPropertyIgnoringKeyIdFromDomainType) { + this.altKeyNameFromPropertyIgnoringKeyIdFromDomainType = altKeyNameFromPropertyIgnoringKeyIdFromDomainType; + } + + public String toString() { + return "EncryptionKeyResolverUnitTests.AnnotatedWithExplicitlyEncryptedHavingDefaultAlgorithmServedViaAnnotationOnType(keyIdFromDomainType=" + + this.getKeyIdFromDomainType() + ", altKeyNameFromPropertyIgnoringKeyIdFromDomainType=" + + this.getAltKeyNameFromPropertyIgnoringKeyIdFromDomainType() + ")"; + } + } + + @Encrypted(keyId = "#{#myKeyId}") + class KeyIdFromSpel { + + @ExplicitEncrypted // + String keyIdFromDomainType; + + public String getKeyIdFromDomainType() { + return this.keyIdFromDomainType; + } + + public void setKeyIdFromDomainType(String keyIdFromDomainType) { + this.keyIdFromDomainType = keyIdFromDomainType; + } + + public String toString() { + return "EncryptionKeyResolverUnitTests.KeyIdFromSpel(keyIdFromDomainType=" + this.getKeyIdFromDomainType() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyUnitTests.java new file mode 100644 index 0000000000..40b0753b80 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionKeyUnitTests.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; + +import java.util.UUID; + +import org.bson.BsonBinary; +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link EncryptionKey}. + * + * @author Christoph Strobl + */ +class EncryptionKeyUnitTests { + + @Test // GH-4284 + void keyIdToStringDoesNotRevealEntireKey() { + + UUID uuid = UUID.randomUUID(); + + assertThat(EncryptionKey.keyId(new BsonBinary(uuid, UuidRepresentation.STANDARD)).toString()) + .contains(uuid.toString().substring(0, 6) + "***"); + } + + @Test // GH-4284 + void altKeyNameToStringDoesNotRevealEntireKey() { + + assertThat(EncryptionKey.keyAltName("s").toString()).contains("***"); + assertThat(EncryptionKey.keyAltName("su").toString()).contains("***"); + assertThat(EncryptionKey.keyAltName("sup").toString()).contains("***"); + assertThat(EncryptionKey.keyAltName("super-secret-key").toString()).contains("sup***"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionTests.java new file mode 100644 index 0000000000..3e840ed858 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/EncryptionTests.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +/** + * @author Christoph Strobl + * @author Julia Lee + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = AbstractEncryptionTestBase.EncryptionConfig.class) +public class EncryptionTests extends AbstractEncryptionTestBase { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryptionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryptionUnitTests.java new file mode 100644 index 0000000000..825645d86c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryptionUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; + +import java.util.function.Supplier; + +import org.bson.BsonBinary; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.vault.ClientEncryption; + +/** + * Unit tests for {@link MongoClientEncryption}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +class MongoClientEncryptionUnitTests { + + @Mock // + ClientEncryption clientEncryption; + + @Test // GH-4284 + void delegatesDecrypt() { + + MongoClientEncryption mce = MongoClientEncryption.just(clientEncryption); + mce.decrypt(new BsonBinary(new byte[0])); + + verify(clientEncryption).decrypt(Mockito.any()); + } + + @Test // GH-4284 + void delegatesEncrypt() { + + MongoClientEncryption mce = MongoClientEncryption.just(clientEncryption); + mce.encrypt(new BsonBinary(new byte[0]), + new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Random, EncryptionKey.keyAltName("sec-key-name"))); + + ArgumentCaptor options = ArgumentCaptor.forClass(EncryptOptions.class); + verify(clientEncryption).encrypt(any(), options.capture()); + assertThat(options.getValue().getAlgorithm()).isEqualTo(AEAD_AES_256_CBC_HMAC_SHA_512_Random); + assertThat(options.getValue().getKeyAltName()).isEqualTo("sec-key-name"); + } + + @Test // GH-4284 + void refreshObtainsNextInstanceFromSupplier() { + + ClientEncryption next = mock(ClientEncryption.class); + + MongoClientEncryption mce = new MongoClientEncryption(new Supplier<>() { + + int counter = 0; + + @Override + public ClientEncryption get() { + return counter++ % 2 == 0 ? clientEncryption : next; + } + }); + + assertThat(mce.getClientEncryption()).isSameAs(clientEncryption); + assertThat(mce.getClientEncryption()).isSameAs(next); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoEncryptionConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoEncryptionConverterUnitTests.java new file mode 100644 index 0000000000..4e76346e56 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoEncryptionConverterUnitTests.java @@ -0,0 +1,373 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*; + +import java.util.List; +import java.util.Map; + +import org.bson.BsonArray; +import org.bson.BsonBinary; +import org.bson.BsonString; +import org.bson.BsonValue; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.ExplicitEncrypted; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoEncryptionConverterUnitTests { + + @Mock // + Encryption encryption; + + @Mock // + EncryptionKeyResolver fallbackKeyResolver; + + @Mock // + MongoConversionContext conversionContext; + + MongoTestMappingContext mappingContext = MongoTestMappingContext.newTestContext(); + EncryptionKeyResolver keyResolver; + MongoEncryptionConverter converter; + + @Captor ArgumentCaptor encryptionOptions; + + @Captor ArgumentCaptor valueToBeEncrypted; + + @BeforeEach + void beforeEach() { + + when(fallbackKeyResolver.getKey(any())).thenReturn(EncryptionKey.keyAltName("default")); + when(encryption.encrypt(valueToBeEncrypted.capture(), encryptionOptions.capture())) + .thenReturn(new BsonBinary(new byte[0])); + keyResolver = EncryptionKeyResolver.annotated(fallbackKeyResolver); + converter = new MongoEncryptionConverter(encryption, keyResolver); + } + + @Test // GH-4284 + void delegatesConversionOfSimpleValueWithDefaultEncryptionKeyFromKeyResolver() { + + when(conversionContext.getProperty()) + .thenReturn(mappingContext.getPersistentPropertyFor(Type.class, Type::getStringValueWithAlgorithmOnly)); + + converter.write("foo", conversionContext); + + assertThat(valueToBeEncrypted.getValue()).isEqualTo(new BsonString("foo")); + assertThat(encryptionOptions.getValue()).isEqualTo( + new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, EncryptionKey.keyAltName("default"))); + } + + @Test // GH-4284 + void favorsAltKeyNameIfPresent() { + + when(conversionContext.getProperty()).thenReturn( + mappingContext.getPersistentPropertyFor(Type.class, Type::getStringValueWithAlgorithmAndAltKeyName)); + + converter.write("foo", conversionContext); + + assertThat(encryptionOptions.getValue()).isEqualTo( + new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Random, EncryptionKey.keyAltName("sec-key-name"))); + } + + @Test // GH-4284 + void readsAltKeyNameFromProperty() { + + when(conversionContext.getProperty()).thenReturn(mappingContext.getPersistentPropertyFor(Type.class, + Type::getStringValueWithAlgorithmAndAltKeyNameFromPropertyValue)); + + ArgumentCaptor path = ArgumentCaptor.forClass(String.class); + when(conversionContext.getValue(path.capture())).thenReturn("(ツ)"); + + converter.write("foo", conversionContext); + assertThat(path.getValue()).isEqualTo("notAnnotated"); + + assertThat(encryptionOptions.getValue()) + .isEqualTo(new EncryptionOptions(AEAD_AES_256_CBC_HMAC_SHA_512_Random, EncryptionKey.keyAltName("(ツ)"))); + } + + @Test // GH-4284 + void delegatesConversionOfEntityTypes() { + + Document convertedValue = new Document("unencryptedValue", "nested-unencrypted"); + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, + Type::getNestedFullyEncrypted); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(convertedValue).when(conversionContext).write(any(), eq(property.getTypeInformation())); + + ArgumentCaptor path = ArgumentCaptor.forClass(String.class); + when(conversionContext.getValue(path.capture())).thenReturn("(ツ)"); + + JustATypeWithAnUnencryptedField source = new JustATypeWithAnUnencryptedField(); + source.unencryptedValue = "nested-unencrypted"; + + converter.write(source, conversionContext); + + assertThat(valueToBeEncrypted.getValue()).isEqualTo(convertedValue.toBsonDocument()); + } + + @Test // GH-4284 + void listsOfSimpleTypesAreConvertedEntirely() { + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getListOfString); + when(conversionContext.getProperty()).thenReturn(property); + + converter.write(List.of("one", "two"), conversionContext); + + assertThat(valueToBeEncrypted.getValue()) + .isEqualTo(new BsonArray(List.of(new BsonString("one"), new BsonString("two")))); + } + + @Test // GH-4284 + void listsOfComplexTypesAreConvertedEntirely() { + + Document convertedValue1 = new Document("unencryptedValue", "nested-unencrypted-1"); + Document convertedValue2 = new Document("unencryptedValue", "nested-unencrypted-2"); + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getListOfComplex); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(convertedValue1, convertedValue2).when(conversionContext).write(any(), eq(property.getTypeInformation())); + + JustATypeWithAnUnencryptedField source1 = new JustATypeWithAnUnencryptedField(); + source1.unencryptedValue = "nested-unencrypted-1"; + + JustATypeWithAnUnencryptedField source2 = new JustATypeWithAnUnencryptedField(); + source2.unencryptedValue = "nested-unencrypted-1"; + + converter.write(List.of(source1, source2), conversionContext); + + assertThat(valueToBeEncrypted.getValue()) + .isEqualTo(new BsonArray(List.of(convertedValue1.toBsonDocument(), convertedValue2.toBsonDocument()))); + } + + @Test // GH-4284 + void simpleMapsAreConvertedEntirely() { + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getMapOfString); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(new Document("k1", "v1").append("k2", "v2")).when(conversionContext).write(any(), + eq(property.getTypeInformation())); + + converter.write(Map.of("k1", "v1", "k2", "v2"), conversionContext); + + assertThat(valueToBeEncrypted.getValue()) + .isEqualTo(new Document("k1", new BsonString("v1")).append("k2", new BsonString("v2")).toBsonDocument()); + } + + @Test // GH-4284 + void complexMapsAreConvertedEntirely() { + + Document convertedValue1 = new Document("unencryptedValue", "nested-unencrypted-1"); + Document convertedValue2 = new Document("unencryptedValue", "nested-unencrypted-2"); + + MongoPersistentProperty property = mappingContext.getPersistentPropertyFor(Type.class, Type::getMapOfComplex); + when(conversionContext.getProperty()).thenReturn(property); + doReturn(new Document("k1", convertedValue1).append("k2", convertedValue2)).when(conversionContext).write(any(), + eq(property.getTypeInformation())); + + JustATypeWithAnUnencryptedField source1 = new JustATypeWithAnUnencryptedField(); + source1.unencryptedValue = "nested-unencrypted-1"; + + JustATypeWithAnUnencryptedField source2 = new JustATypeWithAnUnencryptedField(); + source2.unencryptedValue = "nested-unencrypted-1"; + + converter.write(Map.of("k1", source1, "k2", source2), conversionContext); + + assertThat(valueToBeEncrypted.getValue()).isEqualTo(new Document("k1", convertedValue1.toBsonDocument()) + .append("k2", convertedValue2.toBsonDocument()).toBsonDocument()); + } + + static class Type { + + String notAnnotated; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) // + String stringValueWithAlgorithmOnly; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "sec-key-name") // + String stringValueWithAlgorithmAndAltKeyName; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, keyAltName = "/notAnnotated") // + String stringValueWithAlgorithmAndAltKeyNameFromPropertyValue; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // full document must be random + JustATypeWithAnUnencryptedField nestedFullyEncrypted; + + NestedWithEncryptedField nestedWithEncryptedField; + + // Client-Side Field Level Encryption does not support encrypting individual array elements + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + List listOfString; + + // Client-Side Field Level Encryption does not support encrypting individual array elements + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random + List listOfComplex; + + // just as it was a domain type encrypt the entire thing here + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfString; + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // + Map mapOfComplex; + + RecordWithEncryptedValue recordWithEncryptedValue; + + List listOfRecordWithEncryptedValue; + + public String getNotAnnotated() { + return this.notAnnotated; + } + + public String getStringValueWithAlgorithmOnly() { + return this.stringValueWithAlgorithmOnly; + } + + public String getStringValueWithAlgorithmAndAltKeyName() { + return this.stringValueWithAlgorithmAndAltKeyName; + } + + public String getStringValueWithAlgorithmAndAltKeyNameFromPropertyValue() { + return this.stringValueWithAlgorithmAndAltKeyNameFromPropertyValue; + } + + public JustATypeWithAnUnencryptedField getNestedFullyEncrypted() { + return this.nestedFullyEncrypted; + } + + public NestedWithEncryptedField getNestedWithEncryptedField() { + return this.nestedWithEncryptedField; + } + + public List getListOfString() { + return this.listOfString; + } + + public List getListOfComplex() { + return this.listOfComplex; + } + + public Map getMapOfString() { + return this.mapOfString; + } + + public Map getMapOfComplex() { + return this.mapOfComplex; + } + + public RecordWithEncryptedValue getRecordWithEncryptedValue() { + return this.recordWithEncryptedValue; + } + + public List getListOfRecordWithEncryptedValue() { + return this.listOfRecordWithEncryptedValue; + } + + public void setNotAnnotated(String notAnnotated) { + this.notAnnotated = notAnnotated; + } + + public void setStringValueWithAlgorithmOnly(String stringValueWithAlgorithmOnly) { + this.stringValueWithAlgorithmOnly = stringValueWithAlgorithmOnly; + } + + public void setStringValueWithAlgorithmAndAltKeyName(String stringValueWithAlgorithmAndAltKeyName) { + this.stringValueWithAlgorithmAndAltKeyName = stringValueWithAlgorithmAndAltKeyName; + } + + public void setStringValueWithAlgorithmAndAltKeyNameFromPropertyValue( + String stringValueWithAlgorithmAndAltKeyNameFromPropertyValue) { + this.stringValueWithAlgorithmAndAltKeyNameFromPropertyValue = stringValueWithAlgorithmAndAltKeyNameFromPropertyValue; + } + + public void setNestedFullyEncrypted(JustATypeWithAnUnencryptedField nestedFullyEncrypted) { + this.nestedFullyEncrypted = nestedFullyEncrypted; + } + + public void setNestedWithEncryptedField(NestedWithEncryptedField nestedWithEncryptedField) { + this.nestedWithEncryptedField = nestedWithEncryptedField; + } + + public void setListOfString(List listOfString) { + this.listOfString = listOfString; + } + + public void setListOfComplex(List listOfComplex) { + this.listOfComplex = listOfComplex; + } + + public void setMapOfString(Map mapOfString) { + this.mapOfString = mapOfString; + } + + public void setMapOfComplex(Map mapOfComplex) { + this.mapOfComplex = mapOfComplex; + } + + public void setRecordWithEncryptedValue(RecordWithEncryptedValue recordWithEncryptedValue) { + this.recordWithEncryptedValue = recordWithEncryptedValue; + } + + public void setListOfRecordWithEncryptedValue(List listOfRecordWithEncryptedValue) { + this.listOfRecordWithEncryptedValue = listOfRecordWithEncryptedValue; + } + + public String toString() { + return "MongoEncryptionConverterUnitTests.Type(notAnnotated=" + this.getNotAnnotated() + + ", stringValueWithAlgorithmOnly=" + this.getStringValueWithAlgorithmOnly() + + ", stringValueWithAlgorithmAndAltKeyName=" + this.getStringValueWithAlgorithmAndAltKeyName() + + ", stringValueWithAlgorithmAndAltKeyNameFromPropertyValue=" + + this.getStringValueWithAlgorithmAndAltKeyNameFromPropertyValue() + ", nestedFullyEncrypted=" + + this.getNestedFullyEncrypted() + ", nestedWithEncryptedField=" + this.getNestedWithEncryptedField() + + ", listOfString=" + this.getListOfString() + ", listOfComplex=" + this.getListOfComplex() + ", mapOfString=" + + this.getMapOfString() + ", mapOfComplex=" + this.getMapOfComplex() + ", recordWithEncryptedValue=" + + this.getRecordWithEncryptedValue() + ", listOfRecordWithEncryptedValue=" + + this.getListOfRecordWithEncryptedValue() + ")"; + } + } + + static class JustATypeWithAnUnencryptedField { + + String unencryptedValue; + } + + static class NestedWithEncryptedField extends JustATypeWithAnUnencryptedField { + + @ExplicitEncrypted(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) // + String encryptedValue; + } + + record RecordWithEncryptedValue(@ExplicitEncrypted String value) { + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java new file mode 100644 index 0000000000..dd9e459e78 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.QueryCharacteristics.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.List; +import java.util.UUID; +import java.util.stream.Stream; + +import org.bson.BsonBinary; +import org.bson.Document; +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for creating collections with encrypted fields. + * + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") +@ContextConfiguration +public class MongoQueryableEncryptionCollectionCreationTests { + + public static final String COLLECTION_NAME = "enc-collection"; + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "encryption-schema-tests"; + } + + } + + @Autowired MongoTemplate template; + + @BeforeEach + void beforeEach() { + template.dropCollection(COLLECTION_NAME); + } + + @ParameterizedTest // GH-4185 + @MethodSource("collectionOptions") + public void createsCollectionWithEncryptedFieldsCorrectly(CollectionOptions collectionOptions) { + + template.createCollection(COLLECTION_NAME, collectionOptions); + + Document encryptedFields = readEncryptedFieldsFromDatabase(COLLECTION_NAME); + assertThat(encryptedFields).containsKey("fields"); + + List fields = encryptedFields.get("fields", List.of()); + assertThat(fields.get(0)).containsEntry("path", "encryptedInt") // + .containsEntry("bsonType", "int") // + .containsEntry("queries", List + .of(Document.parse("{'queryType': 'range', 'contention': { '$numberLong' : '1' }, 'min': 5, 'max': 100}"))); + + assertThat(fields.get(1)).containsEntry("path", "nested.encryptedLong") // + .containsEntry("bsonType", "long") // + .containsEntry("queries", List.of(Document.parse( + "{'queryType': 'range', 'contention': { '$numberLong' : '0' }, 'min': { '$numberLong' : '-1' }, 'max': { '$numberLong' : '1' }}"))); + } + + private static Stream collectionOptions() { + + BsonBinary key1 = new BsonBinary(UUID.randomUUID(), UuidRepresentation.STANDARD); + BsonBinary key2 = new BsonBinary(UUID.randomUUID(), UuidRepresentation.STANDARD); + + CollectionOptions manualOptions = CollectionOptions.encryptedCollection(options -> options // + .queryable(encrypted(int32("encryptedInt")).keys(key1), range().min(5).max(100).contention(1)) // + .queryable(encrypted(JsonSchemaProperty.int64("nested.encryptedLong")).keys(key2), + range().min(-1L).max(1L).contention(0))); + + CollectionOptions schemaOptions = CollectionOptions.encryptedCollection(MongoJsonSchema.builder() + .property( + queryable(encrypted(int32("encryptedInt")).keyId(key1), List.of(range().min(5).max(100).contention(1)))) + .property(queryable(encrypted(int64("nested.encryptedLong")).keyId(key2), + List.of(range().min(-1L).max(1L).contention(0)))) + .build()); + + return Stream.of(Arguments.of(manualOptions), Arguments.of(schemaOptions)); + } + + Document readEncryptedFieldsFromDatabase(String collectionName) { + + Document collectionInfo = template + .executeCommand(new Document("listCollections", 1).append("filter", new Document("name", collectionName))); + + if (collectionInfo.containsKey("cursor")) { + collectionInfo = (Document) collectionInfo.get("cursor", Document.class).get("firstBatch", List.class).iterator() + .next(); + } + + if (!collectionInfo.containsKey("options")) { + return new Document(); + } + + return collectionInfo.get("options", Document.class).get("encryptedFields", Document.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java new file mode 100644 index 0000000000..e4e760cc91 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java @@ -0,0 +1,573 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.security.SecureRandom; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +import org.assertj.core.api.Assumptions; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonInt32; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.MongoJsonSchemaCreator; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.mapping.RangeEncrypted; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.data.util.Lazy; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.util.StringUtils; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoNamespace; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateEncryptedCollectionParams; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.Indexes; +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.client.vault.ClientEncryptions; + +/** + * @author Ross Lawley + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") +@EnableIfReplicaSetAvailable +@ContextConfiguration(classes = RangeEncryptionTests.EncryptionConfig.class) +class RangeEncryptionTests { + + @Autowired MongoTemplate template; + @Autowired MongoClientEncryption clientEncryption; + @Autowired EncryptionKeyHolder keyHolder; + + @BeforeEach + void clientVersionCheck() { + Assumptions.assumeThat(MongoClientVersion.isVersion5orNewer()).isTrue(); + } + + @AfterEach + void tearDown() { + template.getDb().getCollection("test").deleteMany(new BsonDocument()); + } + + @Test // GH-4185 + void manuallyEncryptedValuesCanBeSavedAndRetrievedCorrectly() { + + EncryptOptions encryptOptions = new EncryptOptions("Range").contentionFactor(1L) + .keyId(keyHolder.getEncryptionKey("encryptedInt")) + .rangeOptions(new RangeOptions().min(new BsonInt32(0)).max(new BsonInt32(200)).sparsity(1L)); + + EncryptOptions encryptExpressionOptions = new EncryptOptions("Range").contentionFactor(1L) + .rangeOptions(new RangeOptions().min(new BsonInt32(0)).max(new BsonInt32(200))) + .keyId(keyHolder.getEncryptionKey("encryptedInt")).queryType("range"); + + EncryptOptions equalityEncOptions = new EncryptOptions("Indexed").contentionFactor(0L) + .keyId(keyHolder.getEncryptionKey("age")); + ; + + EncryptOptions equalityEncOptionsString = new EncryptOptions("Indexed").contentionFactor(0L) + .keyId(keyHolder.getEncryptionKey("name")); + ; + + Document source = new Document("_id", "id-1"); + + source.put("name", + clientEncryption.getClientEncryption().encrypt(new BsonString("It's a Me, Mario!"), equalityEncOptionsString)); + source.put("age", clientEncryption.getClientEncryption().encrypt(new BsonInt32(101), equalityEncOptions)); + source.put("encryptedInt", clientEncryption.getClientEncryption().encrypt(new BsonInt32(101), encryptOptions)); + source.put("_class", Person.class.getName()); + + template.execute(Person.class, col -> col.insertOne(source)); + + Document result = template.execute(Person.class, col -> { + + BsonDocument filterSource = new BsonDocument("encryptedInt", new BsonDocument("$gte", new BsonInt32(100))); + BsonDocument filter = clientEncryption.getClientEncryption() + .encryptExpression(new Document("$and", List.of(filterSource)), encryptExpressionOptions); + + return col.find(filter).first(); + }); + + assertThat(result).containsEntry("encryptedInt", 101); + } + + @Test // GH-4185 + void canLesserThanEqualMatchRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Person loaded = template.query(Person.class).matching(where("encryptedInt").lte(source.encryptedInt)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryMixOfEqualityEncryptedAndUnencrypted() { + + Person source = template.insert(createPerson()); + + Person loaded = template.query(Person.class) + .matching(where("name").is(source.name).and("unencryptedValue").is(source.unencryptedValue)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryMixOfRangeEncryptedAndUnencrypted() { + + Person source = template.insert(createPerson()); + + Person loaded = template.query(Person.class) + .matching(where("encryptedInt").lte(source.encryptedInt).and("unencryptedValue").is(source.unencryptedValue)) + .firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryEqualityEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Person loaded = template.query(Person.class).matching(where("age").is(source.age)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canExcludeSafeContentFromResult() { + + Person source = createPerson(); + template.insert(source); + + Query q = Query.query(where("encryptedLong").lte(1001L).gte(1001L)); + q.fields().exclude("__safeContent__"); + + Person loaded = template.query(Person.class).matching(q).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canRangeMatchRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Query q = Query.query(where("encryptedLong").lte(1001L).gte(1001L)); + Person loaded = template.query(Person.class).matching(q).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canReplaceEntityWithRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + source.encryptedInt = 123; + source.encryptedLong = 9999L; + template.save(source); + + Person loaded = template.query(Person.class).matching(where("id").is(source.id)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canUpdateRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + UpdateResult updateResult = template.update(Person.class).matching(where("id").is(source.id)) + .apply(Update.update("encryptedLong", 5000L)).first(); + assertThat(updateResult.getModifiedCount()).isOne(); + + Person loaded = template.query(Person.class).matching(where("id").is(source.id)).firstValue(); + assertThat(loaded.encryptedLong).isEqualTo(5000L); + } + + @Test // GH-4185 + void errorsWhenUsingNonRangeOperatorEqOnRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + assertThatThrownBy( + () -> template.query(Person.class).matching(where("encryptedInt").is(source.encryptedInt)).firstValue()) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith("Not a valid range query. Querying a range encrypted field but " + + "the query operator '$eq' for field path 'encryptedInt' is not a range query."); + } + + @Test // GH-4185 + void errorsWhenUsingNonRangeOperatorInOnRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + assertThatThrownBy( + () -> template.query(Person.class).matching(where("encryptedLong").in(1001L, 9999L)).firstValue()) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith("Not a valid range query. Querying a range encrypted field but " + + "the query operator '$in' for field path 'encryptedLong' is not a range query."); + } + + private Person createPerson() { + + Person source = new Person(); + source.id = "id-1"; + source.unencryptedValue = "y2k"; + source.name = "it's a me mario!"; + source.age = 42; + source.encryptedInt = 101; + source.encryptedLong = 1001L; + source.nested = new NestedWithQEFields(); + source.nested.value = "Luigi time!"; + return source; + } + + protected static class EncryptionConfig extends AbstractMongoClientConfiguration { + + private static final String LOCAL_KMS_PROVIDER = "local"; + + private static final Lazy>> LAZY_KMS_PROVIDERS = Lazy.of(() -> { + byte[] localMasterKey = new byte[96]; + new SecureRandom().nextBytes(localMasterKey); + return Map.of(LOCAL_KMS_PROVIDER, Map.of("key", localMasterKey)); + }); + + @Autowired ApplicationContext applicationContext; + + @Override + protected String getDatabaseName() { + return "qe-test"; + } + + @Bean + public MongoClient mongoClient() { + return super.mongoClient(); + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + converterConfigurationAdapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext)) + .useNativeDriverJavaTimeCodecs(); + } + + @Bean + EncryptionKeyHolder keyHolder(MongoClientEncryption mongoClientEncryption) { + + Lazy> lazyDataKeyMap = Lazy.of(() -> { + try (MongoClient client = mongoClient()) { + + MongoDatabase database = client.getDatabase(getDatabaseName()); + database.getCollection("test").drop(); + + ClientEncryption clientEncryption = mongoClientEncryption.getClientEncryption(); + + MongoJsonSchema personSchema = MongoJsonSchemaCreator.create(new MongoMappingContext()) // init schema creator + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(Person.class); // + + Document encryptedFields = CollectionOptions.encryptedCollection(personSchema) // + .getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .orElseThrow(); + + CreateCollectionOptions createCollectionOptions = new CreateCollectionOptions() + .encryptedFields(encryptedFields); + + BsonDocument local = clientEncryption.createEncryptedCollection(database, "test", createCollectionOptions, + new CreateEncryptedCollectionParams(LOCAL_KMS_PROVIDER)); + + Map keyMap = new LinkedHashMap<>(); + for (Object o : local.getArray("fields")) { + if (o instanceof BsonDocument db) { + String path = db.getString("path").getValue(); + BsonBinary binary = db.getBinary("keyId"); + for (String part : path.split("\\.")) { + keyMap.put(part, binary); + } + } + } + return keyMap; + } + }); + + return new EncryptionKeyHolder(lazyDataKeyMap); + } + + @Bean + MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption, + EncryptionKeyHolder keyHolder) { + return new MongoEncryptionConverter(mongoClientEncryption, EncryptionKeyResolver.annotated((ctx) -> { + + String path = ctx.getProperty().getFieldName(); + + if (ctx.getProperty().getMongoField().getName().isPath()) { + path = StringUtils.arrayToDelimitedString(ctx.getProperty().getMongoField().getName().parts(), "."); + } + if (ctx.getOperatorContext() != null) { + path = ctx.getOperatorContext().path(); + } + return EncryptionKey.keyId(keyHolder.getEncryptionKey(path)); + })); + } + + @Bean + CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) { + return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings)); + } + + @Override + protected void configureClientSettings(MongoClientSettings.Builder builder) { + try (MongoClient client = MongoClients.create()) { + ClientEncryptionSettings clientEncryptionSettings = encryptionSettings(client); + + builder.autoEncryptionSettings(AutoEncryptionSettings.builder() // + .kmsProviders(clientEncryptionSettings.getKmsProviders()) // + .keyVaultNamespace(clientEncryptionSettings.getKeyVaultNamespace()) // + .bypassQueryAnalysis(true).build()); + } + } + + @Bean + ClientEncryptionSettings encryptionSettings(MongoClient mongoClient) { + MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault"); + MongoCollection keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName()) + .getCollection(keyVaultNamespace.getCollectionName()); + keyVaultCollection.drop(); + // Ensure that two data keys cannot share the same keyAltName. + keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"), + new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames"))); + + mongoClient.getDatabase(getDatabaseName()).getCollection("test").drop(); // Clear old data + + // Create the ClientEncryption instance + return ClientEncryptionSettings.builder() // + .keyVaultMongoClientSettings( + MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) // + .keyVaultNamespace(keyVaultNamespace.getFullName()) // + .kmsProviders(LAZY_KMS_PROVIDERS.get()) // + .build(); + } + } + + static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean { + + static final AtomicReference cache = new AtomicReference<>(); + + CachingMongoClientEncryption(Supplier source) { + super(() -> { + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption == null) { + clientEncryption = source.get(); + cache.set(clientEncryption); + } + + return clientEncryption; + }); + } + + @Override + public void destroy() { + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption != null) { + clientEncryption.close(); + cache.set(null); + } + } + } + + static class EncryptionKeyHolder { + + Supplier> lazyDataKeyMap; + + public EncryptionKeyHolder(Supplier> lazyDataKeyMap) { + this.lazyDataKeyMap = Lazy.of(lazyDataKeyMap); + } + + BsonBinary getEncryptionKey(String path) { + return lazyDataKeyMap.get().get(path); + } + } + + @org.springframework.data.mongodb.core.mapping.Document("test") + static class Person { + + String id; + + String unencryptedValue; + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + String name; + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + Integer age; + + @ValueConverter(MongoEncryptionConverter.class) + @RangeEncrypted(contentionFactor = 0L, + rangeOptions = "{\"min\": 0, \"max\": 200, \"trimFactor\": 1, \"sparsity\": 1}") // + Integer encryptedInt; + + @ValueConverter(MongoEncryptionConverter.class) + @RangeEncrypted(contentionFactor = 0L, + rangeOptions = "{\"min\": {\"$numberLong\": \"1000\"}, \"max\": {\"$numberLong\": \"9999\"}, \"trimFactor\": 1, \"sparsity\": 1}") // + Long encryptedLong; + + NestedWithQEFields nested; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getEncryptedInt() { + return this.encryptedInt; + } + + public void setEncryptedInt(Integer encryptedInt) { + this.encryptedInt = encryptedInt; + } + + public Long getEncryptedLong() { + return this.encryptedLong; + } + + public void setEncryptedLong(Long encryptedLong) { + this.encryptedLong = encryptedLong; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(unencryptedValue, person.unencryptedValue) + && Objects.equals(name, person.name) && Objects.equals(age, person.age) + && Objects.equals(encryptedInt, person.encryptedInt) && Objects.equals(encryptedLong, person.encryptedLong); + } + + @Override + public int hashCode() { + return Objects.hash(id, unencryptedValue, name, age, encryptedInt, encryptedLong); + } + + @Override + public String toString() { + return "Person{" + "id='" + id + '\'' + ", unencryptedValue='" + unencryptedValue + '\'' + ", name='" + name + + '\'' + ", age=" + age + ", encryptedInt=" + encryptedInt + ", encryptedLong=" + encryptedLong + '}'; + } + } + + static class NestedWithQEFields { + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + String value; + + @Override + public String toString() { + return "NestedWithQEFields{" + "value='" + value + '\'' + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NestedWithQEFields that = (NestedWithQEFields) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java index 6bf8b02e2b..ed5ab78cdc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/AbstractGeoSpatialTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,15 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.Collections; +import java.util.Date; import java.util.List; +import java.util.Set; -import org.joda.time.LocalDate; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -32,32 +33,33 @@ import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.GeoResults; -import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.TestEntities; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; /** * @author Christoph Strobl * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public abstract class AbstractGeoSpatialTests { @Configuration - static class TestConfig extends AbstractMongoConfiguration { + static class TestConfig extends MongoClientClosingTestConfiguration { @Override protected String getDatabaseName() { @@ -66,7 +68,12 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return MongoTestUtils.client(); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); } } @@ -75,7 +82,7 @@ public MongoClient mongoClient() { @Before public void setUp() { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + template.setWriteConcern(WriteConcern.JOURNALED); createIndex(); addVenues(); @@ -103,54 +110,51 @@ protected void removeVenues() { } protected void addVenues() { - - template.insert(new Venue("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue("10gen Office", -73.99171, 40.738868)); - template.insert(new Venue("Flatiron Building", -73.988135, 40.741404)); - template.insert(new Venue("Players Club", -73.997812, 40.739128)); - template.insert(new Venue("City Bakery ", -73.992491, 40.738673)); - template.insert(new Venue("Splash Bar", -73.992491, 40.738673)); - template.insert(new Venue("Momofuku Milk Bar", -73.985839, 40.731698)); - template.insert(new Venue("Shake Shack", -73.98820, 40.74164)); - template.insert(new Venue("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue("Empire State Building", -73.98602, 40.74894)); - template.insert(new Venue("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); - template.insert(new Venue("Maplewood, NJ", -74.2713, 40.73137)); + template.bulkOps(BulkMode.UNORDERED, Venue.class).insert(TestEntities.geolocation().newYork()).execute(); } @Test public void geoNear() { - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).maxDistance(150); + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).maxDistance(150); GeoResults result = template.geoNear(geoNear, Venue.class); - assertThat(result.getContent().size(), is(not(0))); - assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); } @Test public void withinCenter() { Circle circle = new Circle(-73.99171, 40.738868, 0.01); - List venues = template.find(query(where("location").within(circle)), Venue.class); - assertThat(venues.size(), is(7)); + Query query = query(where("location").within(circle)); + List venues = template.find(query, Venue.class); + + assertThat(venues).hasSize(7); + assertThat(template.count(query, Venue.class)).isEqualTo(7); } @Test public void withinCenterSphere() { Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784); - List venues = template.find(query(where("location").withinSphere(circle)), Venue.class); - assertThat(venues.size(), is(11)); + Query query = query(where("location").withinSphere(circle)); + + List venues = template.find(query, Venue.class); + assertThat(venues).hasSize(11); + assertThat(template.count(query, Venue.class)).isEqualTo(11); } @Test public void withinBox() { Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)); - List venues = template.find(query(where("location").within(box)), Venue.class); - assertThat(venues.size(), is(4)); + Query query = query(where("location").within(box)); + + List venues = template.find(query, Venue.class); + assertThat(venues).hasSize(4); + assertThat(template.count(query, Venue.class)).isEqualTo(4); } @Test @@ -163,22 +167,27 @@ public void withinPolygon() { Polygon polygon = new Polygon(first, second, third, fourth); - List venues = template.find(query(where("location").within(polygon)), Venue.class); - assertThat(venues.size(), is(4)); + Query query = query(where("location").within(polygon)); + List venues = template.find(query, Venue.class); + assertThat(venues).hasSize(4); + assertThat(template.count(query, Venue.class)).isEqualTo(4); } @Test public void nearSphere() { + Point point = new Point(-73.99171, 40.738868); Query query = query(where("location").nearSphere(point).maxDistance(0.003712240453784)); + List venues = template.find(query, Venue.class); - assertThat(venues.size(), is(11)); + assertThat(venues).hasSize(11); + assertThat(template.count(query, Venue.class)).isEqualTo(11); } @Test // DATAMONGO-1360 public void mapsQueryContainedInNearQuery() { - Query query = query(where("openingDate").lt(LocalDate.now())); + Query query = query(where("openingDate").lt(new Date())); template.geoNear(NearQuery.near(1.5, 1.7).spherical(true).query(query), Venue.class); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java index e31fa5044f..e65101177c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonModuleUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,14 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.io.IOException; import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.data.geo.Point; import com.fasterxml.jackson.core.JsonParseException; @@ -36,7 +36,7 @@ public class GeoJsonModuleUnitTests { ObjectMapper mapper; - @Before + @BeforeEach public void setUp() { mapper = new ObjectMapper(); @@ -48,40 +48,38 @@ public void shouldDeserializeJsonPointCorrectly() throws JsonParseException, Jso String json = "{ \"type\": \"Point\", \"coordinates\": [10.0, 20.0] }"; - assertThat(mapper.readValue(json, GeoJsonPoint.class), is(new GeoJsonPoint(10D, 20D))); + assertThat(mapper.readValue(json, GeoJsonPoint.class)).isEqualTo(new GeoJsonPoint(10D, 20D)); } @Test // DATAMONGO-1181 - public void shouldDeserializeGeoJsonLineStringCorrectly() throws JsonParseException, JsonMappingException, - IOException { + public void shouldDeserializeGeoJsonLineStringCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"LineString\", \"coordinates\": [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]}"; - assertThat(mapper.readValue(json, GeoJsonLineString.class), - is(new GeoJsonLineString(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))))); + assertThat(mapper.readValue(json, GeoJsonLineString.class)) + .isEqualTo(new GeoJsonLineString(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60)))); } @Test // DATAMONGO-1181 - public void shouldDeserializeGeoJsonMultiPointCorrectly() throws JsonParseException, JsonMappingException, - IOException { + public void shouldDeserializeGeoJsonMultiPointCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"MultiPoint\", \"coordinates\": [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]}"; - assertThat(mapper.readValue(json, GeoJsonLineString.class), - is(new GeoJsonMultiPoint(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))))); + assertThat(mapper.readValue(json, GeoJsonLineString.class)) + .isEqualTo(new GeoJsonMultiPoint(Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60)))); } @Test // DATAMONGO-1181 @SuppressWarnings("unchecked") - public void shouldDeserializeGeoJsonMultiLineStringCorrectly() throws JsonParseException, JsonMappingException, - IOException { + public void shouldDeserializeGeoJsonMultiLineStringCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"MultiLineString\", \"coordinates\": [ [ [10.0, 20.0], [30.0, 40.0] ], [ [50.0, 60.0] , [70.0, 80.0] ] ]}"; - assertThat( - mapper.readValue(json, GeoJsonMultiLineString.class), - is(new GeoJsonMultiLineString(Arrays.asList(new Point(10, 20), new Point(30, 40)), Arrays.asList(new Point(50, - 60), new Point(70, 80))))); + assertThat(mapper.readValue(json, GeoJsonMultiLineString.class)).isEqualTo(new GeoJsonMultiLineString( + Arrays.asList(new Point(10, 20), new Point(30, 40)), Arrays.asList(new Point(50, 60), new Point(70, 80)))); } @Test // DATAMONGO-1181 @@ -89,15 +87,13 @@ public void shouldDeserializeGeoJsonPolygonCorrectly() throws JsonParseException String json = "{ \"type\": \"Polygon\", \"coordinates\": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] ]}"; - assertThat( - mapper.readValue(json, GeoJsonPolygon.class), - is(new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), - new Point(100, 0))))); + assertThat(mapper.readValue(json, GeoJsonPolygon.class)).isEqualTo(new GeoJsonPolygon( + Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), new Point(100, 0)))); } @Test // DATAMONGO-1181 - public void shouldDeserializeGeoJsonMultiPolygonCorrectly() throws JsonParseException, JsonMappingException, - IOException { + public void shouldDeserializeGeoJsonMultiPolygonCorrectly() + throws JsonParseException, JsonMappingException, IOException { String json = "{ \"type\": \"Polygon\", \"coordinates\": [" + "[[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]]," @@ -105,15 +101,13 @@ public void shouldDeserializeGeoJsonMultiPolygonCorrectly() throws JsonParseExce + "[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]"// + "]}"; - assertThat( - mapper.readValue(json, GeoJsonMultiPolygon.class), - is(new GeoJsonMultiPolygon(Arrays.asList( - new GeoJsonPolygon(Arrays.asList(new Point(102, 2), new Point(103, 2), new Point(103, 3), - new Point(102, 3), new Point(102, 2))), - new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), - new Point(100, 1), new Point(100, 0))), - new GeoJsonPolygon(Arrays.asList(new Point(100.2, 0.2), new Point(100.8, 0.2), new Point(100.8, 0.8), - new Point(100.2, 0.8), new Point(100.2, 0.2))))))); + assertThat(mapper.readValue(json, GeoJsonMultiPolygon.class)).isEqualTo(new GeoJsonMultiPolygon(Arrays.asList( + new GeoJsonPolygon(Arrays.asList(new Point(102, 2), new Point(103, 2), new Point(103, 3), new Point(102, 3), + new Point(102, 2))), + new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), + new Point(100, 0))), + new GeoJsonPolygon(Arrays.asList(new Point(100.2, 0.2), new Point(100.8, 0.2), new Point(100.8, 0.8), + new Point(100.2, 0.8), new Point(100.2, 0.2)))))); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersUnitTests.java new file mode 100644 index 0000000000..43ea3945f0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonSerializersUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.geo; + +import static org.assertj.core.api.Assertions.*; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.geo.Point; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * Unit tests for {@link GeoJsonSerializersModule}. + * + * @author Bjorn Harvold + * @author Christoph Strobl + */ +class GeoJsonSerializersUnitTests { + + private ObjectMapper mapper; + + @BeforeEach + void beforeEach() { + + mapper = new ObjectMapper(); + mapper.registerModule(new GeoJsonSerializersModule()); + } + + @Test // GH-3517 + void shouldSerializeJsonPointCorrectly() throws IOException { + + GeoJsonPoint geoJsonPoint = new GeoJsonPoint(10D, 20D); + + assertThat(mapper.writeValueAsString(geoJsonPoint)).isEqualTo("{\"type\":\"Point\",\"coordinates\":[10.0,20.0]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonLineStringCorrectly() throws IOException { + + GeoJsonLineString lineString = new GeoJsonLineString( + Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))); + + assertThat(mapper.writeValueAsString(lineString)) + .isEqualTo("{\"type\":\"LineString\",\"coordinates\":[[10.0,20.0],[30.0,40.0],[50.0,60.0]]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonMultiPointCorrectly() throws IOException { + + GeoJsonMultiPoint multiPoint = new GeoJsonMultiPoint( + Arrays.asList(new Point(10, 20), new Point(30, 40), new Point(50, 60))); + + assertThat(mapper.writeValueAsString(multiPoint)) + .isEqualTo("{\"type\":\"MultiPoint\",\"coordinates\":[[10.0,20.0],[30.0,40.0],[50.0,60.0]]}"); + } + + @Test // GH-3517 + void shouldSerializeJsonMultiLineStringCorrectly() throws IOException { + + GeoJsonMultiLineString multiLineString = new GeoJsonMultiLineString( + Arrays.asList(new Point(10, 20), new Point(30, 40)), Arrays.asList(new Point(50, 60), new Point(70, 80))); + + assertThat(mapper.writeValueAsString(multiLineString)).isEqualTo( + "{\"type\":\"MultiLineString\",\"coordinates\":[[[10.0,20.0],[30.0,40.0]],[[50.0,60.0],[70.0,80.0]]]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonPolygonCorrectly() throws IOException { + + List points = Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), + new Point(100, 0)); + GeoJsonPolygon polygon = new GeoJsonPolygon(points); + + assertThat(mapper.writeValueAsString(polygon)).isEqualTo( + "{\"type\":\"Polygon\",\"coordinates\":[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]]]}"); + } + + @Test // GH-3517 + void shouldSerializeGeoJsonMultiPolygonCorrectly() throws IOException { + + String json = "{\"type\":\"MultiPolygon\",\"coordinates\":[" + "[" + "[" + + "[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]" + "]" + "]," + "[" + "[" + + "[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]" + "]" + "]," + "[" + "[" + + "[100.2,0.2],[100.8,0.2],[100.8,0.8],[100.2,0.8],[100.2,0.2]" + "]" + "]" + "]" + "}"; + + GeoJsonMultiPolygon multiPolygon = new GeoJsonMultiPolygon(Arrays.asList( + new GeoJsonPolygon(Arrays.asList(new Point(102, 2), new Point(103, 2), new Point(103, 3), new Point(102, 3), + new Point(102, 2))), + new GeoJsonPolygon(Arrays.asList(new Point(100, 0), new Point(101, 0), new Point(101, 1), new Point(100, 1), + new Point(100, 0))), + new GeoJsonPolygon(Arrays.asList(new Point(100.2, 0.2), new Point(100.8, 0.2), new Point(100.8, 0.8), + new Point(100.2, 0.8), new Point(100.2, 0.2))))); + + assertThat(mapper.writeValueAsString(multiPolygon)).isEqualTo(json); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java index a03236762b..b81b51abd5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,54 +15,61 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; -import com.mongodb.client.MongoCollection; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.assertj.core.data.Percentage; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataAccessException; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.geo.GeoResults; -import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; import org.springframework.data.mongodb.core.index.GeospatialIndex; +import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.test.util.BasicDbListBuilder; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.MongoException; -import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; /** * @author Christoph Strobl + * @author Mark Paluch + * @author Ivan Volzhev */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class GeoJsonTests { + static @Client MongoClient mongoClient; + @Configuration - static class TestConfig extends AbstractMongoConfiguration { + static class TestConfig extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -71,42 +78,141 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Set.of(Venue2DSphere.class, VenueWithDistanceField.class, OpenGeoJson.class, + DocumentWithPropertyUsingGeoJsonType.class); } } @Autowired MongoTemplate template; - @Before + @BeforeEach public void setUp() { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + // template.setWriteConcern(WriteConcern.JOURNALED); + + // createIndex(); + // addVenues(); + } + + private void createIndexAndAddVenues() { createIndex(); addVenues(); } - @After + @AfterEach public void tearDown() { dropIndex(); removeCollections(); } - @Test // DATAMONGO-1135 + @Test // DATAMONGO-1135, DATAMONGO-2264 public void geoNear() { - NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).num(10).maxDistance(150); + createIndexAndAddVenues(); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); - assertThat(result.getContent().size(), is(not(0))); - assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getAverageDistance().getValue()).isCloseTo(117.84629457941556, Percentage.withPercentage(0.001)); + } + + @Test // DATAMONGO-2264 + public void geoNearShouldNotOverridePropertyWithDefaultNameForCalculatedDistance/* namely "dis" */() { + + createIndexAndAddVenues(); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); + + GeoResults result = template.geoNear(geoNear, VenueWithDistanceField.class); + + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getAverageDistance().getValue()).isCloseTo(117.84629457941556, Percentage.withPercentage(0.001)); + result.getContent().forEach(it -> { + + assertThat(it.getDistance().getValue()).isNotZero(); + assertThat(it.getContent().getDis()).isNull(); + }); + } + + @Test // DATAMONGO-2264 + public void geoNearShouldAllowToReadBackCalculatedDistanceIntoTargetTypeProperty/* namely "dis" */() { + + createIndexAndAddVenues(); + + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.KILOMETERS).limit(10).maxDistance(150); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class, + template.getCollectionName(Venue2DSphere.class), VenueWithDistanceField.class); + + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getAverageDistance().getValue()).isCloseTo(117.84629457941556, Percentage.withPercentage(0.001)); + result.getContent().forEach(it -> { + + assertThat(it.getDistance().getValue()).isNotZero(); + assertThat(it.getContent().getDis()).isEqualTo(it.getDistance().getValue()); + }); + } + + @Test // DATAMONGO-1148 + public void geoNearShouldReturnDistanceCorrectlyUsingGeoJson/*which is using the meters*/() { + + createIndexAndAddVenues(); + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73.99171, 40.738868), Metrics.KILOMETERS).limit(10) + .maxDistance(0.4); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); + + assertThat(result.getContent()).hasSize(3); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getContent().get(0).getDistance().getValue()).isCloseTo(0.0, offset(0.000001)); + assertThat(result.getContent().get(1).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + assertThat(result.getContent().get(2).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + } + + @Test // DATAMONGO-1348 + public void geoNearShouldReturnDistanceCorrectly/*which is using the meters*/() { + + createIndexAndAddVenues(); + NearQuery geoNear = NearQuery.near(new Point(-73.99171, 40.738868), Metrics.KILOMETERS).limit(10).maxDistance(0.4); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); + + assertThat(result.getContent()).hasSize(3); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.KILOMETERS); + assertThat(result.getContent().get(0).getDistance().getValue()).isCloseTo(0.0, offset(0.000001)); + assertThat(result.getContent().get(1).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + assertThat(result.getContent().get(2).getDistance().getValue()).isCloseTo(0.0693582, offset(0.000001)); + } + + @Test // DATAMONGO-1135 + public void geoNearWithMiles() { + + createIndexAndAddVenues(); + NearQuery geoNear = NearQuery.near(new GeoJsonPoint(-73, 40), Metrics.MILES).limit(10).maxDistance(93.2057); + + GeoResults result = template.geoNear(geoNear, Venue2DSphere.class); + + assertThat(result.getContent()).isNotEmpty(); + assertThat(result.getAverageDistance().getMetric()).isEqualTo(Metrics.MILES); } @Test // DATAMONGO-1135 public void withinPolygon() { + createIndexAndAddVenues(); + Point first = new Point(-73.99756, 40.73083); Point second = new Point(-73.99756, 40.741404); Point third = new Point(-73.988135, 40.741404); @@ -115,32 +221,36 @@ public void withinPolygon() { GeoJsonPolygon polygon = new GeoJsonPolygon(first, second, third, fourth, first); List venues = template.find(query(where("location").within(polygon)), Venue2DSphere.class); - assertThat(venues.size(), is(4)); + assertThat(venues).hasSize(4); } @Test // DATAMONGO-1135 public void nearPoint() { + createIndexAndAddVenues(); + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); Query query = query(where("location").near(point).maxDistance(0.01)); List venues = template.find(query, Venue2DSphere.class); - assertThat(venues.size(), is(1)); + assertThat(venues).hasSize(1); } @Test // DATAMONGO-1135 public void nearSphere() { + createIndexAndAddVenues(); + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); Query query = query(where("location").nearSphere(point).maxDistance(0.003712240453784)); List venues = template.find(query, Venue2DSphere.class); - assertThat(venues.size(), is(1)); + assertThat(venues).hasSize(1); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonPointTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonPoint"; @@ -151,11 +261,11 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonPoint, equalTo(obj.geoJsonPoint)); + assertThat(result.geoJsonPoint).isEqualTo(obj.geoJsonPoint); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonPolygonTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonPolygonTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonPolygon"; @@ -167,11 +277,11 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonPolygonTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonPolygon, equalTo(obj.geoJsonPolygon)); + assertThat(result.geoJsonPolygon).isEqualTo(obj.geoJsonPolygon); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonLineStringTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonLineStringTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonLineString"; @@ -182,27 +292,28 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonLineStringTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonLineString, equalTo(obj.geoJsonLineString)); + assertThat(result.geoJsonLineString).isEqualTo(obj.geoJsonLineString); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiLineStringTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiLineStringTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonMultiLineString"; - obj.geoJsonMultiLineString = new GeoJsonMultiLineString(Arrays.asList(new GeoJsonLineString(new Point(0, 0), - new Point(0, 1), new Point(1, 1)), new GeoJsonLineString(new Point(199, 0), new Point(2, 3)))); + obj.geoJsonMultiLineString = new GeoJsonMultiLineString( + Arrays.asList(new GeoJsonLineString(new Point(0, 0), new Point(0, 1), new Point(1, 1)), + new GeoJsonLineString(new Point(199, 0), new Point(2, 3)))); template.save(obj); DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonMultiLineString, equalTo(obj.geoJsonMultiLineString)); + assertThat(result.geoJsonMultiLineString).isEqualTo(obj.geoJsonMultiLineString); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonMultiPoint"; @@ -213,70 +324,90 @@ public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonMultiPoint, equalTo(obj.geoJsonMultiPoint)); + assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); + } + + @Test // DATAMONGO-3776 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeWithOnePointCorrectly() { + + DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); + obj.id = "geoJsonMultiPoint"; + obj.geoJsonMultiPoint = new GeoJsonMultiPoint(new Point(0, 0)); + + template.save(obj); + + DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), + DocumentWithPropertyUsingGeoJsonType.class); + + assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonMultiPolygon"; - obj.geoJsonMultiPolygon = new GeoJsonMultiPolygon(Arrays.asList(new GeoJsonPolygon(new Point(0, 0), - new Point(0, 1), new Point(1, 1), new Point(0, 0)))); + obj.geoJsonMultiPolygon = new GeoJsonMultiPolygon( + Arrays.asList(new GeoJsonPolygon(new Point(0, 0), new Point(0, 1), new Point(1, 1), new Point(0, 0)))); template.save(obj); DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonMultiPolygon, equalTo(obj.geoJsonMultiPolygon)); + assertThat(result.geoJsonMultiPolygon).isEqualTo(obj.geoJsonMultiPolygon); } @Test // DATAMONGO-1137 - public void shouleSaveAndRetrieveDocumentWithGeoJsonGeometryCollectionTypeCorrectly() { + public void shouldSaveAndRetrieveDocumentWithGeoJsonGeometryCollectionTypeCorrectly() { DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); obj.id = "geoJsonGeometryCollection"; - obj.geoJsonGeometryCollection = new GeoJsonGeometryCollection(Arrays.> asList( - new GeoJsonPoint(100, 200), new GeoJsonPolygon(new Point(0, 0), new Point(0, 1), new Point(1, 1), new Point(1, - 0), new Point(0, 0)))); + obj.geoJsonGeometryCollection = new GeoJsonGeometryCollection(Arrays.> asList(new GeoJsonPoint(100, 200), + new GeoJsonPolygon(new Point(0, 0), new Point(0, 1), new Point(1, 1), new Point(1, 0), new Point(0, 0)))); template.save(obj); DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), DocumentWithPropertyUsingGeoJsonType.class); - assertThat(result.geoJsonGeometryCollection, equalTo(obj.geoJsonGeometryCollection)); + assertThat(result.geoJsonGeometryCollection).isEqualTo(obj.geoJsonGeometryCollection); } @Test // DATAMONGO-1110 public void nearWithMinDistance() { + createIndexAndAddVenues(); + Point point = new GeoJsonPoint(-73.99171, 40.738868); List venues = template.find(query(where("location").near(point).minDistance(0.01)), Venue2DSphere.class); - assertThat(venues.size(), is(11)); + assertThat(venues).hasSize(11); } @Test // DATAMONGO-1110 public void nearSphereWithMinDistance() { + createIndexAndAddVenues(); + Point point = new GeoJsonPoint(-73.99171, 40.738868); List venues = template.find(query(where("location").nearSphere(point).minDistance(0.01)), Venue2DSphere.class); - assertThat(venues.size(), is(11)); + assertThat(venues).hasSize(11); } @Test // DATAMONGO-1135 public void nearWithMinAndMaxDistance() { + createIndexAndAddVenues(); + GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); Query query = query(where("location").near(point).minDistance(0.01).maxDistance(100)); List venues = template.find(query, Venue2DSphere.class); - assertThat(venues.size(), is(2)); + assertThat(venues).hasSize(2); } @Test // DATAMONGO-1453 @@ -286,7 +417,8 @@ public void shouldConvertPointRepresentationCorrectlyWhenSourceCoordinatesUsesIn new CollectionCallback() { @Override - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + public Object doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { org.bson.Document pointRepresentation = new org.bson.Document(); pointRepresentation.put("type", "Point"); @@ -303,7 +435,7 @@ public Object doInCollection(MongoCollection collection) thro }); assertThat(template.findOne(query(where("id").is("datamongo-1453")), - DocumentWithPropertyUsingGeoJsonType.class).geoJsonPoint, is(equalTo(new GeoJsonPoint(0D, 0D)))); + DocumentWithPropertyUsingGeoJsonType.class).geoJsonPoint).isEqualTo(new GeoJsonPoint(0D, 0D)); } @Test // DATAMONGO-1453 @@ -313,7 +445,8 @@ public void shouldConvertLineStringRepresentationCorrectlyWhenSourceCoordinatesU new CollectionCallback() { @Override - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + public Object doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { org.bson.Document lineStringRepresentation = new org.bson.Document(); lineStringRepresentation.put("type", "LineString"); @@ -331,32 +464,56 @@ public Object doInCollection(MongoCollection collection) thro } }); - assertThat( - template.findOne(query(where("id").is("datamongo-1453")), - DocumentWithPropertyUsingGeoJsonType.class).geoJsonLineString, - is(equalTo(new GeoJsonLineString(new Point(0D, 0D), new Point(1, 1))))); + assertThat(template.findOne(query(where("id").is("datamongo-1453")), + DocumentWithPropertyUsingGeoJsonType.class).geoJsonLineString) + .isEqualTo(new GeoJsonLineString(new Point(0D, 0D), new Point(1, 1))); + } + + @Test // DATAMONGO-1466 + public void readGeoJsonBasedOnEmbeddedTypeInformation() { + + Point first = new Point(-73.99756, 40.73083); + Point second = new Point(-73.99756, 40.741404); + Point third = new Point(-73.988135, 40.741404); + Point fourth = new Point(-73.988135, 40.73083); + + GeoJsonPolygon polygon = new GeoJsonPolygon(first, second, third, fourth, first); + + ConcreteGeoJson source = new ConcreteGeoJson(); + source.shape = polygon; + source.id = "id-1"; + + template.save(source); + + OpenGeoJson target = template.findOne(query(where("id").is(source.id)), OpenGeoJson.class); + + assertThat(target.shape).isEqualTo(source.shape); } private void addVenues() { - template.insert(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue2DSphere("10gen Office", -73.99171, 40.738868)); - template.insert(new Venue2DSphere("Flatiron Building", -73.988135, 40.741404)); - template.insert(new Venue2DSphere("Players Club", -73.997812, 40.739128)); - template.insert(new Venue2DSphere("City Bakery ", -73.992491, 40.738673)); - template.insert(new Venue2DSphere("Splash Bar", -73.992491, 40.738673)); - template.insert(new Venue2DSphere("Momofuku Milk Bar", -73.985839, 40.731698)); - template.insert(new Venue2DSphere("Shake Shack", -73.98820, 40.74164)); - template.insert(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); - template.insert(new Venue2DSphere("Empire State Building", -73.98602, 40.74894)); - template.insert(new Venue2DSphere("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); - template.insert(new Venue2DSphere("Maplewood, NJ", -74.2713, 40.73137)); + List venues = new ArrayList<>(); + + venues.add(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); + venues.add(new Venue2DSphere("10gen Office", -73.99171, 40.738868)); + venues.add(new Venue2DSphere("Flatiron Building", -73.988135, 40.741404)); + venues.add(new Venue2DSphere("Players Club", -73.997812, 40.739128)); + venues.add(new Venue2DSphere("City Bakery ", -73.992491, 40.738673)); + venues.add(new Venue2DSphere("Splash Bar", -73.992491, 40.738673)); + venues.add(new Venue2DSphere("Momofuku Milk Bar", -73.985839, 40.731698)); + venues.add(new Venue2DSphere("Shake Shack", -73.98820, 40.74164)); + venues.add(new Venue2DSphere("Penn Station", -73.99408, 40.75057)); + venues.add(new Venue2DSphere("Empire State Building", -73.98602, 40.74894)); + venues.add(new Venue2DSphere("Ulaanbaatar, Mongolia", 106.9154, 47.9245)); + venues.add(new Venue2DSphere("Maplewood, NJ", -74.2713, 40.73137)); + + template.bulkOps(BulkMode.UNORDERED, Venue2DSphere.class).insert(venues).execute(); } protected void createIndex() { dropIndex(); - template.indexOps(Venue2DSphere.class).ensureIndex( - new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); + template.indexOps(Venue2DSphere.class) + .ensureIndex(new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE)); } protected void dropIndex() { @@ -404,6 +561,23 @@ public String toString() { } } + static class VenueWithDistanceField extends Venue2DSphere { + + private Double dis; // geoNear command default distance field name + + public VenueWithDistanceField(String name, double[] location) { + super(name, location); + } + + public Double getDis() { + return dis; + } + + public void setDis(Double dis) { + this.dis = dis; + } + } + static class DocumentWithPropertyUsingGeoJsonType { String id; @@ -416,4 +590,58 @@ static class DocumentWithPropertyUsingGeoJsonType { GeoJsonGeometryCollection geoJsonGeometryCollection; } + @Document("geo-json-shapes") + static class ConcreteGeoJson { + + String id; + GeoJsonPolygon shape; + + public String getId() { + return this.id; + } + + public GeoJsonPolygon getShape() { + return this.shape; + } + + public void setId(String id) { + this.id = id; + } + + public void setShape(GeoJsonPolygon shape) { + this.shape = shape; + } + + public String toString() { + return "GeoJsonTests.ConcreteGeoJson(id=" + this.getId() + ", shape=" + this.getShape() + ")"; + } + } + + @Document("geo-json-shapes") + static class OpenGeoJson { + + String id; + GeoJson shape; + + public String getId() { + return this.id; + } + + public GeoJson getShape() { + return this.shape; + } + + public void setId(String id) { + this.id = id; + } + + public void setShape(GeoJson shape) { + this.shape = shape; + } + + public String toString() { + return "GeoJsonTests.OpenGeoJson(id=" + this.getId() + ", shape=" + this.getShape() + ")"; + } + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java index 3154ceacee..3a9140d34c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DSphereTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,7 @@ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; @@ -29,13 +28,14 @@ import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; /** * @author Christoph Strobl @@ -48,33 +48,45 @@ public void indexInfoIsCorrect() { IndexOperations operations = template.indexOps(Venue.class); List indexInfo = operations.getIndexInfo(); - assertThat(indexInfo.size(), is(2)); + assertThat(indexInfo.size()).isEqualTo(2); List fields = indexInfo.get(0).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC))); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.create("_id", Direction.ASC)); fields = indexInfo.get(1).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.geo("location"))); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.geo("location")); } @Test // DATAMONGO-1110 public void geoNearWithMinDistance() { - NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).num(10).minDistance(1); + NearQuery geoNear = NearQuery.near(-73, 40, Metrics.KILOMETERS).limit(10).minDistance(1); GeoResults result = template.geoNear(geoNear, Venue.class); - assertThat(result.getContent().size(), is(not(0))); - assertThat(result.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(result.getContent().size()).isNotEqualTo(0); + assertThat(result.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } @Test // DATAMONGO-1110 public void nearSphereWithMinDistance() { + Point point = new Point(-73.99171, 40.738868); - List venues = template.find(query(where("location").nearSphere(point).minDistance(0.01)), Venue.class); - assertThat(venues.size(), is(1)); + Query query = query(where("location").nearSphere(point).minDistance(0.01)); + + List venues = template.find(query, Venue.class); + assertThat(venues.size()).isEqualTo(1); + } + + @Test + public void countNearSphereWithMinDistance() { + + Point point = new Point(-73.99171, 40.738868); + Query query = query(where("location").nearSphere(point).minDistance(0.01)); + + assertThat(template.count(query, Venue.class)).isEqualTo(1); } @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java index 85928c2114..15c3a89dab 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatial2DTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,22 +16,23 @@ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import java.util.List; import org.junit.Test; + import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; import org.springframework.data.mongodb.core.index.IndexField; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; +import org.springframework.data.mongodb.core.query.Query; /** * Modified from https://github.com/deftlabs/mongo-java-geospatial-example @@ -45,9 +46,13 @@ public class GeoSpatial2DTests extends AbstractGeoSpatialTests { @Test public void nearPoint() { + Point point = new Point(-73.99171, 40.738868); - List venues = template.find(query(where("location").near(point).maxDistance(0.01)), Venue.class); - assertThat(venues.size(), is(7)); + Query query = query(where("location").near(point).maxDistance(0.01)); + + List venues = template.find(query, Venue.class); + assertThat(venues.size()).isEqualTo(7); + assertThat(template.count(query, Venue.class)).isEqualTo(7); } @Test // DATAMONGO-360 @@ -56,15 +61,15 @@ public void indexInfoIsCorrect() { IndexOperations operations = template.indexOps(Venue.class); List indexInfo = operations.getIndexInfo(); - assertThat(indexInfo.size(), is(2)); + assertThat(indexInfo.size()).isEqualTo(2); List fields = indexInfo.get(0).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC))); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.create("_id", Direction.ASC)); fields = indexInfo.get(1).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.geo("location"))); + assertThat(fields.size()).isEqualTo(1); + assertThat(fields).contains(IndexField.geo("location")); } @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java index addab731f2..37ae08dff4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,30 @@ */ package org.springframework.data.mongodb.core.geo; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.assertj.core.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.config.AbstractIntegrationTests; import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.WriteResultChecking; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; import org.springframework.data.mongodb.core.index.IndexInfo; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.util.MongoClientVersion; import com.mongodb.MongoException; import com.mongodb.WriteConcern; @@ -53,10 +56,10 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests { @Autowired private MongoTemplate template; - @Before + @BeforeEach public void setUp() { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + template.setWriteConcern(WriteConcern.JOURNALED); template.setWriteResultChecking(WriteResultChecking.EXCEPTION); } @@ -65,7 +68,7 @@ public void test2dIndex() { try { template.save(new GeoSpatialEntity2D(45.2, 4.6)); - assertThat(hasIndexOfType(GeoSpatialEntity2D.class, "2d"), is(true)); + assertThat(hasIndexOfType(GeoSpatialEntity2D.class, "2d")).isTrue(); } finally { template.dropCollection(GeoSpatialEntity2D.class); } @@ -76,18 +79,21 @@ public void test2dSphereIndex() { try { template.save(new GeoSpatialEntity2DSphere(45.2, 4.6)); - assertThat(hasIndexOfType(GeoSpatialEntity2DSphere.class, "2dsphere"), is(true)); + assertThat(hasIndexOfType(GeoSpatialEntity2DSphere.class, "2dsphere")).isTrue(); } finally { template.dropCollection(GeoSpatialEntity2DSphere.class); } } @Test // DATAMONGO-778 + @EnableIfMongoServerVersion(isLessThan = "5.0") public void testHaystackIndex() { + Assumptions.assumeThat(MongoClientVersion.isVersion5orNewer()).isFalse(); + try { template.save(new GeoSpatialEntityHaystack(45.2, 4.6, "Paris")); - assertThat(hasIndexOfType(GeoSpatialEntityHaystack.class, "geoHaystack"), is(true)); + assertThat(hasIndexOfType(GeoSpatialEntityHaystack.class, "geoHaystack")).isTrue(); } finally { template.dropCollection(GeoSpatialEntityHaystack.class); } @@ -104,9 +110,9 @@ public void useGeneratedNameShouldGenerateAnIndexName() { IndexOperations indexOps = template.indexOps(GeoSpatialEntity2dWithGeneratedIndex.class); List indexInfo = indexOps.getIndexInfo(); - assertThat(indexInfo, hasSize(2)); - assertThat(indexInfo.get(1), is(notNullValue())); - assertThat(indexInfo.get(1).getName(), is("location_2d")); + assertThat(indexInfo).hasSize(2); + assertThat(indexInfo.get(1)).isNotNull(); + assertThat(indexInfo.get(1).getName()).isEqualTo("location_2d"); } finally { template.dropCollection(GeoSpatialEntity2D.class); @@ -116,8 +122,8 @@ public void useGeneratedNameShouldGenerateAnIndexName() { /** * Returns whether an index with the given name exists for the given entity type. * - * @param indexName * @param entityType + * @param type * @return */ private boolean hasIndexOfType(Class entityType, final String type) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java index d8964a0c18..6f505289e6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexFieldUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,18 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.domain.Sort.Direction; /** * Unit tests for {@link IndexField}. * * @author Oliver Gierke + * @author Christoph Strobl */ -@SuppressWarnings("deprecation") public class IndexFieldUnitTests { @Test @@ -34,9 +34,9 @@ public void createsPlainIndexFieldCorrectly() { IndexField field = IndexField.create("foo", Direction.ASC); - assertThat(field.getKey(), is("foo")); - assertThat(field.getDirection(), is(Direction.ASC)); - assertThat(field.isGeo(), is(false)); + assertThat(field.getKey()).isEqualTo("foo"); + assertThat(field.getDirection()).isEqualTo(Direction.ASC); + assertThat(field.isGeo()).isFalse(); } @Test @@ -44,9 +44,9 @@ public void createsGeoIndexFieldCorrectly() { IndexField field = IndexField.geo("foo"); - assertThat(field.getKey(), is("foo")); - assertThat(field.getDirection(), is(nullValue())); - assertThat(field.isGeo(), is(true)); + assertThat(field.getKey()).isEqualTo("foo"); + assertThat(field.getDirection()).isNull(); + assertThat(field.isGeo()).isTrue(); } @Test @@ -55,8 +55,8 @@ public void correctEqualsForPlainFields() { IndexField first = IndexField.create("foo", Direction.ASC); IndexField second = IndexField.create("foo", Direction.ASC); - assertThat(first, is(second)); - assertThat(second, is(first)); + assertThat(first).isEqualTo(second); + assertThat(second).isEqualTo(first); } @Test @@ -65,7 +65,22 @@ public void correctEqualsForGeoFields() { IndexField first = IndexField.geo("bar"); IndexField second = IndexField.geo("bar"); - assertThat(first, is(second)); - assertThat(second, is(first)); + assertThat(first).isEqualTo(second); + assertThat(second).isEqualTo(first); + } + + @Test // DATAMONGO-1183 + public void correctTypeForHashedFields() { + assertThat(IndexField.hashed("key").isHashed()).isTrue(); + } + + @Test // DATAMONGO-1183 + public void correctEqualsForHashedFields() { + + IndexField first = IndexField.hashed("bar"); + IndexField second = IndexField.hashed("bar"); + + assertThat(first).isEqualTo(second); + assertThat(second).isEqualTo(first); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java index 8eff042629..12fc967c83 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,28 +15,108 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import java.time.Duration; import java.util.Arrays; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Sort.Direction; /** * Unit tests for {@link IndexInfo}. * * @author Oliver Gierke + * @author Christoph Strobl + * @author Stefan Tirea */ -public class IndexInfoUnitTests { +class IndexInfoUnitTests { + + static final String ID_INDEX = "{ \"v\" : 2, \"key\" : { \"_id\" : 1 }, \"name\" : \"_id_\", \"ns\" : \"db.collection\" }"; + static final String INDEX_WITH_PARTIAL_FILTER = "{ \"v\" : 2, \"key\" : { \"k3y\" : 1 }, \"name\" : \"partial-filter-index\", \"ns\" : \"db.collection\", \"partialFilterExpression\" : { \"quantity\" : { \"$gte\" : 10 } } }"; + static final String INDEX_WITH_EXPIRATION_TIME = "{ \"v\" : 2, \"key\" : { \"lastModifiedDate\" : 1 },\"name\" : \"expire-after-last-modified\", \"ns\" : \"db.collectio\", \"expireAfterSeconds\" : 3600 }"; + static final String HASHED_INDEX = "{ \"v\" : 2, \"key\" : { \"score\" : \"hashed\" }, \"name\" : \"score_hashed\", \"ns\" : \"db.collection\" }"; + static final String WILDCARD_INDEX = "{ \"v\" : 2, \"key\" : { \"$**\" : 1 }, \"name\" : \"$**_1\", \"wildcardProjection\" : { \"fieldA\" : 0, \"fieldB.fieldC\" : 0 } }"; + static final String INDEX_WITH_COLLATION = "{ \"v\" : 2, \"key\" : { \"_id\" : 1 }, \"name\" : \"projectName\", \"collation\": { \"locale\": \"en_US\", \"strength\": 2 } }"; + static final String HIDDEN_INDEX = """ + { + "v" : 2, + "key" : { + "borough" : 1 + }, + "name" : "borough_1", + "hidden" : true + } + """; @Test - public void isIndexForFieldsCorrectly() { + void isIndexForFieldsCorrectly() { IndexField fooField = IndexField.create("foo", Direction.ASC); IndexField barField = IndexField.create("bar", Direction.DESC); IndexInfo info = new IndexInfo(Arrays.asList(fooField, barField), "myIndex", false, false, ""); - assertThat(info.isIndexForFields(Arrays.asList("foo", "bar")), is(true)); + assertThat(info.isIndexForFields(Arrays.asList("foo", "bar"))).isTrue(); + } + + @Test // DATAMONGO-2170 + void partialFilterExpressionShouldBeNullIfNotSetInSource() { + assertThat(getIndexInfo(ID_INDEX).getPartialFilterExpression()).isNull(); + } + + @Test // DATAMONGO-2170 + void partialFilterExpressionShouldMatchSource() { + + assertThat(Document.parse(getIndexInfo(INDEX_WITH_PARTIAL_FILTER).getPartialFilterExpression())) + .isEqualTo(Document.parse("{ \"quantity\" : { \"$gte\" : 10 } }")); + } + + @Test // DATAMONGO-2081 + void expireAfterIsParsedCorrectly() { + assertThat(getIndexInfo(INDEX_WITH_EXPIRATION_TIME).getExpireAfter()).contains(Duration.ofHours(1)); + } + + @Test // DATAMONGO-2081 + void expireAfterIsEmptyIfNotSet() { + assertThat(getIndexInfo(ID_INDEX).getExpireAfter()).isEmpty(); + } + + @Test // DATAMONGO-1183 + void readsHashedIndexCorrectly() { + assertThat(getIndexInfo(HASHED_INDEX).getIndexFields()).containsExactly(IndexField.hashed("score")); + } + + @Test // DATAMONGO-1183 + public void hashedIndexIsMarkedAsSuch() { + assertThat(getIndexInfo(HASHED_INDEX).isHashed()).isTrue(); + } + + @Test // GH-3225 + void identifiesWildcardIndexCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).isWildcard()).isTrue(); + } + + @Test // GH-3225 + void readsWildcardIndexProjectionCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).getWildcardProjection()) + .contains(new Document("fieldA", 0).append("fieldB.fieldC", 0)); + } + + @Test // GH-3002 + void collationParsedCorrectly() { + assertThat(getIndexInfo(INDEX_WITH_COLLATION).getCollation()) + .contains(Document.parse("{ \"locale\": \"en_US\", \"strength\": 2 }")); + } + + @Test // GH-4348 + void hiddenInfoSetCorrectly() { + + assertThat(getIndexInfo(ID_INDEX).isHidden()).isFalse(); + assertThat(getIndexInfo(HIDDEN_INDEX).isHidden()).isTrue(); + } + + private static IndexInfo getIndexInfo(String documentJson) { + return IndexInfo.indexInfoOf(Document.parse(documentJson)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexOptionsUnitTests.java new file mode 100644 index 0000000000..3a193f0133 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexOptionsUnitTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.time.Duration; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.index.IndexOptions.Unique; + +/** + * @author Christoph Strobl + */ +class IndexOptionsUnitTests { + + @Test // GH-4851 + void noneIsEmpty() { + + IndexOptions options = IndexOptions.none(); + + assertThat(options.getExpire()).isNull(); + assertThat(options.getUnique()).isNull(); + assertThat(options.isHidden()).isNull(); + assertThat(options.toDocument()).isEqualTo(new Document()); + } + + @Test // GH-4851 + void uniqueSetsFlag() { + + IndexOptions options = IndexOptions.unique(); + + assertThat(options.getUnique()).isEqualTo(Unique.YES); + assertThat(options.toDocument()).containsEntry("unique", true); + + options.setUnique(Unique.NO); + assertThat(options.toDocument()).containsEntry("unique", false); + + options.setUnique(Unique.PREPARE); + assertThat(options.toDocument()).containsEntry("prepareUnique", true); + } + + @Test // GH-4851 + void hiddenSetsFlag() { + + IndexOptions options = IndexOptions.hidden(); + + assertThat(options.isHidden()).isTrue(); + assertThat(options.toDocument()).containsEntry("hidden", true); + } + + @Test // GH-4851 + void expireAfterSetsExpiration() { + + Duration duration = Duration.ofMinutes(2); + IndexOptions options = IndexOptions.expireAfter(duration); + + assertThat(options.getExpire()).isEqualTo(duration); + assertThat(options.toDocument()).containsEntry("expireAfterSeconds", duration.toSeconds()); + } + + @Test // GH-4851 + void expireAfterForZeroAndNegativeDuration() { + + assertThat(IndexOptions.expireAfter(Duration.ZERO).toDocument()).containsEntry("expireAfterSeconds", 0L); + assertThat(IndexOptions.expireAfter(Duration.ofSeconds(-1)).toDocument()).isEmpty(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java index 014be0cfd3..caa40e96c0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,45 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.time.Instant; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Optional; +import java.util.Set; -import org.junit.After; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.CollectionCallback; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.MongoCollectionUtils; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoException; -import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoClient; /** * Integration tests for index handling. @@ -49,18 +61,52 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Jordi Llach + * @author Mark Paluch + * @author Ben Foster */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@ContextConfiguration public class IndexingIntegrationTests { + static @Client MongoClient mongoClient; + @Autowired MongoOperations operations; - @Autowired MongoDbFactory mongoDbFactory; + @Autowired MongoDatabaseFactory mongoDbFactory; @Autowired ConfigurableApplicationContext context; - @After + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Bean + TimeoutResolver myTimeoutResolver() { + return new TimeoutResolver("11s"); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Override + protected boolean autoIndexCreation() { + return true; + } + } + + @AfterEach public void tearDown() { operations.dropCollection(IndexedPerson.class); + operations.dropCollection(TimeSeriesWithSpelIndexTimeout.class); } @Test // DATAMONGO-237 @@ -69,7 +115,22 @@ public void createsIndexWithFieldName() { operations.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class); - assertThat(hasIndex("_firstname", IndexedPerson.class), is(true)); + assertThat(hasIndex("_firstname", IndexedPerson.class)).isTrue(); + } + + @Test // DATAMONGO-2188 + @DirtiesContext + public void shouldNotCreateIndexOnIndexingDisabled() { + + MongoMappingContext context = new MongoMappingContext(); + context.setAutoIndexCreation(false); + + MongoTemplate template = new MongoTemplate(mongoDbFactory, + new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context)); + + template.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class); + + assertThat(hasIndex("_firstname", MongoCollectionUtils.getPreferredCollectionName(IndexedPerson.class))).isFalse(); } @Test // DATAMONGO-1163 @@ -78,7 +139,51 @@ public void createsIndexFromMetaAnnotation() { operations.getConverter().getMappingContext().getPersistentEntity(IndexedPerson.class); - assertThat(hasIndex("_lastname", IndexedPerson.class), is(true)); + assertThat(hasIndex("_lastname", IndexedPerson.class)).isTrue(); + } + + @Test // DATAMONGO-2112 + @DirtiesContext + public void evaluatesTimeoutSpelExpresssionWithBeanReference() { + + operations.getConverter().getMappingContext().getPersistentEntity(WithSpelIndexTimeout.class); + + Optional indexInfo = operations.execute("withSpelIndexTimeout", collection -> { + + return collection.listIndexes(org.bson.Document.class).into(new ArrayList<>()) // + .stream() // + .filter(it -> it.get("name").equals("someString")) // + .findFirst(); + }); + + assertThat(indexInfo).isPresent(); + assertThat(indexInfo.get()).hasEntrySatisfying("expireAfterSeconds", timeout -> { + + // MongoDB 5 returns int not long + assertThat(timeout).isIn(11, 11L); + }); + } + + @Test // GH-4099 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + @DirtiesContext + public void evaluatesTimeSeriesTimeoutSpelExpresssionWithBeanReference() { + + operations.createCollection(TimeSeriesWithSpelIndexTimeout.class); + + final Optional collectionInfo = operations.execute(db -> { + return db.listCollections().into(new ArrayList<>()) + .stream() + .filter(c -> "timeSeriesWithSpelIndexTimeout".equals(c.get("name"))) + .findFirst(); + }); + + assertThat(collectionInfo).isPresent(); + assertThat(collectionInfo.get()).hasEntrySatisfying("options", options -> { + final org.bson.Document optionsDoc = (org.bson.Document) options; + // MongoDB 5 returns int not long + assertThat(optionsDoc.get("expireAfterSeconds")).isIn(11, 11L); + }); } @Target({ ElementType.FIELD }) @@ -94,6 +199,28 @@ class IndexedPerson { @Field("_lastname") @IndexedFieldAnnotation String lastname; } + static class TimeoutResolver { + final String timeout; + + public TimeoutResolver(String timeout) { + this.timeout = timeout; + } + + public String getTimeout() { + return this.timeout; + } + } + + @Document + class WithSpelIndexTimeout { + @Indexed(expireAfter = "#{@myTimeoutResolver?.timeout}") String someString; + } + + @TimeSeries(expireAfter = "#{@myTimeoutResolver?.timeout}", timeField = "timestamp") + class TimeSeriesWithSpelIndexTimeout { + Instant timestamp; + } + /** * Returns whether an index with the given name exists for the given entity type. * @@ -101,22 +228,30 @@ class IndexedPerson { * @param entityType * @return */ - private boolean hasIndex(final String indexName, Class entityType) { + private boolean hasIndex(String indexName, Class entityType) { + return hasIndex(indexName, operations.getCollectionName(entityType)); + } + + /** + * Returns whether an index with the given name exists for the given collection. + * + * @param indexName + * @param collectionName + * @return + */ + private boolean hasIndex(String indexName, String collectionName) { - return operations.execute(entityType, new CollectionCallback() { - public Boolean doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + return operations.execute(collectionName, collection -> { - List indexes = new ArrayList(); - collection.listIndexes(org.bson.Document.class).into(indexes); + List indexes = new ArrayList<>(); + collection.listIndexes(org.bson.Document.class).into(indexes); - for (org.bson.Document indexInfo : indexes) { - if (indexName.equals(indexInfo.get("name"))) { - return true; - } + for (org.bson.Document indexInfo : indexes) { + if (indexName.equals(indexInfo.get("name"))) { + return true; } - return false; } + return false; }); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java index da61dd7ff5..a506decf67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,11 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import java.net.UnknownHostException; import java.util.Arrays; import java.util.List; -import org.hamcrest.Matchers; import org.hamcrest.core.IsInstanceOf; import org.junit.ClassRule; import org.junit.Rule; @@ -30,6 +27,7 @@ import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.dao.DataIntegrityViolationException; @@ -41,13 +39,14 @@ import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.test.util.CleanMongoDB; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.data.mongodb.test.util.MongoVersionRule; import org.springframework.data.util.Version; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.MongoClient; import com.mongodb.MongoCommandException; +import com.mongodb.client.MongoClient; /** * Integration tests for {@link MongoPersistentEntityIndexCreator}. @@ -56,7 +55,7 @@ * @author Christoph Strobl * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoPersistentEntityIndexCreatorIntegrationTests { @@ -76,11 +75,11 @@ public class MongoPersistentEntityIndexCreatorIntegrationTests { public void createsIndexForConfiguredMappingContextOnly() { List indexInfo = templateOne.indexOps(SampleEntity.class).getIndexInfo(); - assertThat(indexInfo, hasSize(greaterThan(0))); - assertThat(indexInfo, Matchers. hasItem(hasProperty("name", is("prop")))); + assertThat(indexInfo).isNotEmpty(); + assertThat(indexInfo).extracting(IndexInfo::getName).contains("prop"); indexInfo = templateTwo.indexOps(SAMPLE_TYPE_COLLECTION_NAME).getIndexInfo(); - assertThat(indexInfo, hasSize(0)); + assertThat(indexInfo).hasSize(0); } @Test // DATAMONGO-1202 @@ -88,12 +87,12 @@ public void shouldHonorIndexedPropertiesWithRecursiveMappings() { List indexInfo = templateOne.indexOps(RecursiveConcreteType.class).getIndexInfo(); - assertThat(indexInfo, hasSize(greaterThan(0))); - assertThat(indexInfo, Matchers. hasItem(hasProperty("name", is("firstName")))); + assertThat(indexInfo).isNotEmpty(); + assertThat(indexInfo).extracting(IndexInfo::getName).contains("firstName"); } @Test // DATAMONGO-1125 - public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() throws UnknownHostException { + public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() { expectedException.expect(DataIntegrityViolationException.class); expectedException.expectMessage("collection 'datamongo-1125'"); @@ -101,16 +100,18 @@ public void createIndexShouldThrowMeaningfulExceptionWhenIndexCreationFails() th expectedException.expectMessage("lastname"); expectedException.expectCause(IsInstanceOf. instanceOf(MongoCommandException.class)); - MongoTemplate mongoTemplate = new MongoTemplate(new MongoClient(), "issue"); + try (MongoClient client = MongoTestUtils.client()) { + MongoTemplate mongoTemplate = new MongoTemplate(client, "issue"); - MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(), - mongoTemplate); + MongoPersistentEntityIndexCreator indexCreator = new MongoPersistentEntityIndexCreator(new MongoMappingContext(), + mongoTemplate); - indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", - new Index().named("stormlight").on("lastname", Direction.ASC).unique(), "datamongo-1125")); + indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", + new Index().named("stormlight").on("lastname", Direction.ASC).unique(), "datamongo-1125")); - indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", - new Index().named("stormlight").on("lastname", Direction.ASC).sparse(), "datamongo-1125")); + indexCreator.createIndex(new IndexDefinitionHolder("dalinar.kohlin", + new Index().named("stormlight").on("lastname", Direction.ASC).sparse(), "datamongo-1125")); + } } @Document(RECURSIVE_TYPE_COLLECTION_NAME) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java index 1ce59c9648..0e48b39e56 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,26 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.mockito.Matchers.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.Collections; import java.util.Date; import java.util.concurrent.TimeUnit; -import org.hamcrest.core.IsEqual; -import org.hamcrest.number.IsCloseTo; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.context.ApplicationContext; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; import org.springframework.data.mapping.context.MappingContextEvent; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.DefaultIndexOperations; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.Document; @@ -62,29 +58,30 @@ * @author Thomas Darimont * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class MongoPersistentEntityIndexCreatorUnitTests { - private @Mock MongoDbFactory factory; - private @Mock ApplicationContext context; + private @Mock MongoDatabaseFactory factory; private @Mock MongoDatabase db; private @Mock MongoCollection collection; private MongoTemplate mongoTemplate; - ArgumentCaptor keysCaptor; - ArgumentCaptor optionsCaptor; - ArgumentCaptor collectionCaptor; + private ArgumentCaptor keysCaptor; + private ArgumentCaptor optionsCaptor; + private ArgumentCaptor collectionCaptor; - @Before - public void setUp() { + @BeforeEach + void setUp() { keysCaptor = ArgumentCaptor.forClass(org.bson.Document.class); optionsCaptor = ArgumentCaptor.forClass(IndexOptions.class); collectionCaptor = ArgumentCaptor.forClass(String.class); - when(factory.getDb()).thenReturn(db); + when(factory.getMongoDatabase()).thenReturn(db); when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); - when(db.getCollection(collectionCaptor.capture())).thenReturn(collection); + when(db.getCollection(collectionCaptor.capture(), eq(org.bson.Document.class))) + .thenReturn((MongoCollection) collection); mongoTemplate = new MongoTemplate(factory); @@ -92,21 +89,20 @@ public void setUp() { } @Test - public void buildsIndexDefinitionUsingFieldName() { + void buildsIndexDefinitionUsingFieldName() { MongoMappingContext mappingContext = prepareMappingContext(Person.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), is(notNullValue())); - assertThat(keysCaptor.getValue().keySet(), hasItem("fieldname")); - assertThat(optionsCaptor.getValue().getName(), is("indexName")); - assertThat(optionsCaptor.getValue().isBackground(), is(false)); - assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS), nullValue()); + assertThat(keysCaptor.getValue()).isNotNull().containsKey("fieldname"); + assertThat(optionsCaptor.getValue().getName()).isEqualTo("indexName"); + assertThat(optionsCaptor.getValue().isBackground()).isFalse(); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isNull(); } @Test - public void doesNotCreateIndexForEntityComingFromDifferentMappingContext() { + void doesNotCreateIndexForEntityComingFromDifferentMappingContext() { MongoMappingContext mappingContext = new MongoMappingContext(); MongoMappingContext personMappingContext = prepareMappingContext(Person.class); @@ -119,117 +115,113 @@ public void doesNotCreateIndexForEntityComingFromDifferentMappingContext() { creator.onApplicationEvent(event); - verifyZeroInteractions(collection); + verifyNoInteractions(collection); } @Test // DATAMONGO-530 - public void isIndexCreatorForMappingContextHandedIntoConstructor() { + void isIndexCreatorForMappingContextHandedIntoConstructor() { MongoMappingContext mappingContext = new MongoMappingContext(); mappingContext.initialize(); MongoPersistentEntityIndexCreator creator = new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(creator.isIndexCreatorFor(mappingContext), is(true)); - assertThat(creator.isIndexCreatorFor(new MongoMappingContext()), is(false)); + assertThat(creator.isIndexCreatorFor(mappingContext)).isTrue(); + assertThat(creator.isIndexCreatorFor(new MongoMappingContext())).isFalse(); } @Test // DATAMONGO-554 - public void triggersBackgroundIndexingIfConfigured() { + void triggersBackgroundIndexingIfConfigured() { MongoMappingContext mappingContext = prepareMappingContext(AnotherPerson.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), is(notNullValue())); - assertThat(keysCaptor.getValue().keySet(), hasItem("lastname")); - assertThat(optionsCaptor.getValue().getName(), is("lastname")); - assertThat(optionsCaptor.getValue().isBackground(), IsEqual. equalTo(true)); - assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS), nullValue()); + assertThat(keysCaptor.getValue()).isNotNull().containsKey("lastname"); + assertThat(optionsCaptor.getValue().getName()).isEqualTo("lastname"); + assertThat(optionsCaptor.getValue().isBackground()).isTrue(); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isNull(); } @Test // DATAMONGO-544 - public void expireAfterSecondsIfConfigured() { + void expireAfterSecondsIfConfigured() { MongoMappingContext mappingContext = prepareMappingContext(Milk.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), is(notNullValue())); - assertThat(keysCaptor.getValue().keySet(), hasItem("expiry")); - assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS), IsEqual. equalTo(60L)); + assertThat(keysCaptor.getValue()).isNotNull().containsKey("expiry"); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isEqualTo(60); } @Test // DATAMONGO-899 - public void createsNotNestedGeoSpatialIndexCorrectly() { + void createsNotNestedGeoSpatialIndexCorrectly() { MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue(), equalTo(new org.bson.Document().append("company.address.location", "2d"))); + assertThat(keysCaptor.getValue()).isEqualTo(new org.bson.Document("company.address.location", "2d")); IndexOptions opts = optionsCaptor.getValue(); - assertThat(opts.getName(), is(equalTo("company.address.location"))); - assertThat(opts.getMin(), IsCloseTo.closeTo(-180, 0)); - assertThat(opts.getMax(), IsCloseTo.closeTo(180, 0)); - assertThat(opts.getBits(), is(26)); + assertThat(opts.getName()).isEqualTo("company.address.location"); + assertThat(opts.getMin()).isCloseTo(-180d, offset(0d)); + assertThat(opts.getMax()).isCloseTo(180d, offset(0d)); + assertThat(opts.getBits()).isEqualTo(26); } @Test // DATAMONGO-827 - public void autoGeneratedIndexNameShouldGenerateNoName() { + void autoGeneratedIndexNameShouldGenerateNoName() { MongoMappingContext mappingContext = prepareMappingContext(EntityWithGeneratedIndexName.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); - assertThat(keysCaptor.getValue().containsKey("name"), is(false)); - assertThat(keysCaptor.getValue().keySet(), hasItem("lastname")); - - assertThat(optionsCaptor.getValue().getName(), nullValue()); + assertThat(keysCaptor.getValue()).doesNotContainKey("name").containsKey("lastname"); + assertThat(optionsCaptor.getValue().getName()).isNull(); } @Test // DATAMONGO-367 - public void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() { + void indexCreationShouldNotCreateNewCollectionForNestedGeoSpatialIndexStructures() { MongoMappingContext mappingContext = prepareMappingContext(Wrapper.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); ArgumentCaptor collectionNameCapturer = ArgumentCaptor.forClass(String.class); - verify(db, times(1)).getCollection(collectionNameCapturer.capture()); - assertThat(collectionNameCapturer.getValue(), equalTo("wrapper")); + verify(db, times(1)).getCollection(collectionNameCapturer.capture(), any()); + assertThat(collectionNameCapturer.getValue()).isEqualTo("wrapper"); } @Test // DATAMONGO-367 - public void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() { + void indexCreationShouldNotCreateNewCollectionForNestedIndexStructures() { MongoMappingContext mappingContext = prepareMappingContext(IndexedDocumentWrapper.class); new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); ArgumentCaptor collectionNameCapturer = ArgumentCaptor.forClass(String.class); - verify(db, times(1)).getCollection(collectionNameCapturer.capture()); - assertThat(collectionNameCapturer.getValue(), equalTo("indexedDocumentWrapper")); + verify(db, times(1)).getCollection(collectionNameCapturer.capture(), any()); + assertThat(collectionNameCapturer.getValue()).isEqualTo("indexedDocumentWrapper"); } - @Test(expected = DataAccessException.class) // DATAMONGO-1125 - public void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() { + @Test // DATAMONGO-1125 + void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() { - when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); - doThrow(new MongoException(6, "HostUnreachable")).when(collection).createIndex(Mockito.any(org.bson.Document.class), - Mockito.any(IndexOptions.class)); + doThrow(new MongoException(6, "HostUnreachable")).when(collection).createIndex(any(org.bson.Document.class), + any(IndexOptions.class)); MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); + assertThatThrownBy(() -> new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate)) + .isInstanceOf(DataAccessException.class); } - @Test(expected = ClassCastException.class) // DATAMONGO-1125 - public void createIndexShouldNotConvertUnknownExceptionTypes() { + @Test // DATAMONGO-1125 + void createIndexShouldNotConvertUnknownExceptionTypes() { - when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); - doThrow(new ClassCastException("o_O")).when(collection).createIndex(Mockito.any(org.bson.Document.class), - Mockito.any(IndexOptions.class)); + doThrow(new ClassCastException("o_O")).when(collection).createIndex(any(org.bson.Document.class), + any(IndexOptions.class)); MongoMappingContext mappingContext = prepareMappingContext(Person.class); - new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate); + assertThatThrownBy(() -> new MongoPersistentEntityIndexCreator(mappingContext, mongoTemplate)) + .isInstanceOf(ClassCastException.class); } private static MongoMappingContext prepareMappingContext(Class type) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java index 5443db2e00..aa26445f2d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,25 +15,28 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.util.Collections; +import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.core.annotation.AliasFor; +import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.CompoundIndexResolutionTests; @@ -49,21 +52,28 @@ import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.util.ClassTypeInformation; /** + * Tests for {@link MongoPersistentEntityIndexResolver}. + * * @author Christoph Strobl * @author Mark Paluch + * @author Dave Perryman + * @author Stefan Tirea */ @RunWith(Suite.class) @SuiteClasses({ IndexResolutionTests.class, GeoSpatialIndexResolutionTests.class, CompoundIndexResolutionTests.class, TextIndexedResolutionTests.class, MixedIndexResolutionTests.class }) +@SuppressWarnings("unused") public class MongoPersistentEntityIndexResolverUnitTests { /** * Test resolution of {@link Indexed}. * * @author Christoph Strobl + * @author Mark Paluch */ public static class IndexResolutionTests { @@ -73,7 +83,7 @@ public void indexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnLevelZero.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("indexedProperty", "Zero", indexDefinitions.get(0)); } @@ -82,16 +92,26 @@ public void indexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelOne.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("zero.indexedProperty", "One", indexDefinitions.get(0)); } + @Test // DATAMONGO-899, DATAMONGO-2188 + public void shouldResolveIndexViaClass() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + IndexResolver indexResolver = IndexResolver.create(mappingContext); + Iterable definitions = indexResolver.resolveIndexFor(IndexOnLevelOne.class); + + assertThat(definitions).isNotEmpty(); + } + @Test // DATAMONGO-899 - public void depplyNestedIndexPathIsResolvedCorrectly() { + public void deeplyNestedIndexPathIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(IndexOnLevelTwo.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("one.zero.indexedProperty", "Two", indexDefinitions.get(0)); } @@ -101,7 +121,7 @@ public void resolvesIndexPathNameForNamedPropertiesCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnLevelOneWithExplicitlyNamedField.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("customZero.customFieldName", "indexOnLevelOneWithExplicitlyNamedField", indexDefinitions.get(0)); } @@ -113,7 +133,7 @@ public void resolvesIndexDefinitionCorrectly() { IndexOnLevelZero.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new org.bson.Document().append("name", "indexedProperty"))); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document("name", "indexedProperty")); } @Test // DATAMONGO-899 @@ -123,10 +143,8 @@ public void resolvesIndexDefinitionOptionsCorrectly() { WithOptionsOnIndexedProperty.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), - equalTo( - new org.bson.Document().append("name", "indexedProperty").append("unique", true) - .append("sparse", true).append("background", true).append("expireAfterSeconds", 10L))); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document().append("name", "indexedProperty") + .append("unique", true).append("sparse", true).append("background", true).append("expireAfterSeconds", 10L)); } @Test // DATAMONGO-1297 @@ -134,9 +152,9 @@ public void resolvesIndexOnDbrefWhenDefined() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(WithDbRef.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("withDbRef")); - assertThat(indexDefinitions.get(0).getIndexKeys(), equalTo(new org.bson.Document().append("indexedDbRef", 1))); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getCollection()).isEqualTo("withDbRef"); + assertThat(indexDefinitions.get(0).getIndexKeys()).isEqualTo(new org.bson.Document("indexedDbRef", 1)); } @Test // DATAMONGO-1297 @@ -145,10 +163,9 @@ public void resolvesIndexOnDbrefWhenDefinedOnNestedElement() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( WrapperOfWithDbRef.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("wrapperOfWithDbRef")); - assertThat(indexDefinitions.get(0).getIndexKeys(), - equalTo(new org.bson.Document().append("nested.indexedDbRef", 1))); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getCollection()).isEqualTo("wrapperOfWithDbRef"); + assertThat(indexDefinitions.get(0).getIndexKeys()).isEqualTo(new org.bson.Document("nested.indexedDbRef", 1)); } @Test // DATAMONGO-1163 @@ -157,9 +174,9 @@ public void resolveIndexDefinitionInMetaAnnotatedFields() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexOnMetaAnnotatedField.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getCollection(), equalTo("indexOnMetaAnnotatedField")); - assertThat(indexDefinitions.get(0).getIndexOptions(), equalTo(new org.bson.Document().append("name", "_name"))); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getCollection()).isEqualTo("indexOnMetaAnnotatedField"); + assertThat(indexDefinitions.get(0).getIndexOptions()).isEqualTo(new org.bson.Document("name", "_name")); } @Test // DATAMONGO-1373 @@ -168,13 +185,15 @@ public void resolveIndexDefinitionInComposedAnnotatedFields() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexedDocumentWithComposedAnnotations.class); - assertThat(indexDefinitions, hasSize(2)); + assertThat(indexDefinitions).hasSize(2); IndexDefinitionHolder indexDefinitionHolder = indexDefinitions.get(1); - assertThat(indexDefinitionHolder.getIndexKeys(), isBsonObject().containing("fieldWithMyIndexName", 1)); - assertThat(indexDefinitionHolder.getIndexOptions(), - isBsonObject().containing("sparse", true).containing("unique", true).containing("name", "my_index_name")); + assertThat(indexDefinitionHolder.getIndexKeys()).containsEntry("fieldWithMyIndexName", 1); + assertThat(indexDefinitionHolder.getIndexOptions()) // + .containsEntry("sparse", true) // + .containsEntry("unique", true) // + .containsEntry("name", "my_index_name"); } @Test // DATAMONGO-1373 @@ -183,56 +202,143 @@ public void resolveIndexDefinitionInCustomComposedAnnotatedFields() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( IndexedDocumentWithComposedAnnotations.class); - assertThat(indexDefinitions, hasSize(2)); + assertThat(indexDefinitions).hasSize(2); IndexDefinitionHolder indexDefinitionHolder = indexDefinitions.get(0); - assertThat(indexDefinitionHolder.getIndexKeys(), isBsonObject().containing("fieldWithDifferentIndexName", 1)); - assertThat(indexDefinitionHolder.getIndexOptions(), - isBsonObject().containing("sparse", true).containing("name", "different_name").notContaining("unique")); + assertThat(indexDefinitionHolder.getIndexKeys()).containsEntry("fieldWithDifferentIndexName", 1); + assertThat(indexDefinitionHolder.getIndexOptions()) // + .containsEntry("sparse", true) // + .containsEntry("name", "different_name") // + .doesNotContainKey("unique"); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromString() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterAsPlainString.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 600L); + } + + @Test // GH-4844 + public void shouldResolveZeroTimeoutFromString() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterZeroSecondsAsPlainString.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 0L); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromIso8601String() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithIso8601Style.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 86400L); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromExpression() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterAsExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 11L); + } + + @Test // DATAMONGO-2112 + public void shouldResolveTimeoutFromExpressionReturningDuration() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithExpireAfterAsExpressionResultingInDuration.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("expireAfterSeconds", 100L); + } + + @Test // DATAMONGO-2112 + public void shouldErrorOnInvalidTimeoutExpression() { + + MongoMappingContext mappingContext = prepareMappingContext(WithInvalidExpireAfter.class); + MongoPersistentEntityIndexResolver indexResolver = new MongoPersistentEntityIndexResolver(mappingContext); + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> indexResolver + .resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(WithInvalidExpireAfter.class))); + } + + @Test // DATAMONGO-2112 + public void shouldErrorOnDuplicateTimeoutExpression() { + + MongoMappingContext mappingContext = prepareMappingContext(WithDuplicateExpiry.class); + MongoPersistentEntityIndexResolver indexResolver = new MongoPersistentEntityIndexResolver(mappingContext); + + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> indexResolver + .resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(WithDuplicateExpiry.class))); + } + + @Test // DATAMONGO-2112 + public void resolveExpressionIndexName() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithIndexNameAsExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "my1st"); + } + + @Test // DATAMONGO-1569 + public void resolvesPartialFilter() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithPartialFilter.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("partialFilterExpression", + org.bson.Document.parse("{'value': {'$exists': true}}")); } @Document("Zero") - static class IndexOnLevelZero { + class IndexOnLevelZero { @Indexed String indexedProperty; } @Document("One") - static class IndexOnLevelOne { + class IndexOnLevelOne { IndexOnLevelZero zero; } @Document("Two") - static class IndexOnLevelTwo { + class IndexOnLevelTwo { IndexOnLevelOne one; } @Document("WithOptionsOnIndexedProperty") - static class WithOptionsOnIndexedProperty { + class WithOptionsOnIndexedProperty { - @Indexed(background = true, direction = IndexDirection.DESCENDING, - dropDups = true, expireAfterSeconds = 10, sparse = true, unique = true) // + @Indexed(background = true, direction = IndexDirection.DESCENDING, expireAfterSeconds = 10, sparse = true, + unique = true) // String indexedProperty; } @Document - static class IndexOnLevelOneWithExplicitlyNamedField { + class IndexOnLevelOneWithExplicitlyNamedField { @Field("customZero") IndexOnLevelZeroWithExplicityNamedField zero; } - static class IndexOnLevelZeroWithExplicityNamedField { + class IndexOnLevelZeroWithExplicityNamedField { - @Indexed @Field("customFieldName") String namedProperty; + @Indexed + @Field("customFieldName") String namedProperty; } @Document - static class WrapperOfWithDbRef { + class WrapperOfWithDbRef { WithDbRef nested; } @Document - static class WithDbRef { + class WithDbRef { @Indexed // @DBRef // @@ -240,12 +346,12 @@ static class WithDbRef { } @Document("no-index") - static class NoIndex { + class NoIndex { @Id String id; } @Document - static class IndexedDocumentWithComposedAnnotations { + class IndexedDocumentWithComposedAnnotations { @Id String id; @CustomIndexedAnnotation String fieldWithDifferentIndexName; @@ -255,13 +361,13 @@ static class IndexedDocumentWithComposedAnnotations { @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD }) @ComposedIndexedAnnotation(indexName = "different_name", beUnique = false) - static @interface CustomIndexedAnnotation { + @interface CustomIndexedAnnotation { } @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Indexed - static @interface ComposedIndexedAnnotation { + @interface ComposedIndexedAnnotation { @AliasFor(annotation = Indexed.class, attribute = "unique") boolean beUnique() default true; @@ -276,23 +382,68 @@ static class IndexedDocumentWithComposedAnnotations { @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @org.springframework.data.mongodb.core.mapping.Field - static @interface ComposedFieldAnnotation { + @interface ComposedFieldAnnotation { @AliasFor(annotation = org.springframework.data.mongodb.core.mapping.Field.class, attribute = "value") String name() default "_id"; } + + @Document + class WithExpireAfterAsPlainString { + @Indexed(expireAfter = "10m") String withTimeout; + } + + @Document + class WithExpireAfterZeroSecondsAsPlainString { + @Indexed(expireAfter = "0s") String withTimeout; + } + + @Document + class WithIso8601Style { + @Indexed(expireAfter = "P1D") String withTimeout; + } + + @Document + class WithExpireAfterAsExpression { + @Indexed(expireAfter = "#{10 + 1 + 's'}") String withTimeout; + } + + @Document + class WithExpireAfterAsExpressionResultingInDuration { + @Indexed(expireAfter = "#{T(java.time.Duration).ofSeconds(100)}") String withTimeout; + } + + @Document + class WithInvalidExpireAfter { + @Indexed(expireAfter = "123ops") String withTimeout; + } + + @Document + class WithDuplicateExpiry { + @Indexed(expireAfter = "1s", expireAfterSeconds = 2) String withTimeout; + } + + @Document + class WithIndexNameAsExpression { + @Indexed(name = "#{'my' + 1 + 'st'}") String spelIndexName; + } + + @Document + class WithPartialFilter { + @Indexed(partialFilter = "{'value': {'$exists': true}}") String withPartialFilter; + } } @Target({ ElementType.FIELD }) @Retention(RetentionPolicy.RUNTIME) @Indexed @interface IndexedFieldAnnotation { - } @Document - static class IndexOnMetaAnnotatedField { - @Field("_name") @IndexedFieldAnnotation String lastname; + class IndexOnMetaAnnotatedField { + @Field("_name") + @IndexedFieldAnnotation String lastname; } /** @@ -308,7 +459,7 @@ public void geoSpatialIndexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( GeoSpatialIndexOnLevelZero.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("geoIndexedProperty", "Zero", indexDefinitions.get(0)); } @@ -318,7 +469,7 @@ public void geoSpatialIndexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( GeoSpatialIndexOnLevelOne.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("zero.geoIndexedProperty", "One", indexDefinitions.get(0)); } @@ -328,7 +479,7 @@ public void depplyNestedGeoSpatialIndexPathIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( GeoSpatialIndexOnLevelTwo.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("one.zero.geoIndexedProperty", "Two", indexDefinitions.get(0)); } @@ -340,8 +491,8 @@ public void resolvesIndexDefinitionOptionsCorrectly() { IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo( - new org.bson.Document().append("name", "location").append("min", 1).append("max", 100).append("bits", 2))); + assertThat(indexDefinition.getIndexOptions()).isEqualTo( + new org.bson.Document().append("name", "location").append("min", 1).append("max", 100).append("bits", 2)); } @Test // DATAMONGO-1373 @@ -352,37 +503,45 @@ public void resolvesComposedAnnotationIndexDefinitionOptionsCorrectly() { IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexKeys(), - isBsonObject().containing("location", "geoHaystack").containing("What light?", 1)); - assertThat(indexDefinition.getIndexOptions(), - isBsonObject().containing("name", "my_geo_index_name").containing("bucketSize", 2.0)); + assertThat(indexDefinition.getIndexKeys()).containsEntry("location", "geoHaystack").containsEntry("What light?", + 1); + assertThat(indexDefinition.getIndexOptions()).containsEntry("name", "my_geo_index_name") + .containsEntry("bucketSize", 2.0); + } + + @Test // DATAMONGO-2112 + public void resolveExpressionIndexNameForGeoIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + GeoIndexWithNameAsExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "my1st"); } @Document("Zero") - static class GeoSpatialIndexOnLevelZero { + class GeoSpatialIndexOnLevelZero { @GeoSpatialIndexed Point geoIndexedProperty; } @Document("One") - static class GeoSpatialIndexOnLevelOne { + class GeoSpatialIndexOnLevelOne { GeoSpatialIndexOnLevelZero zero; } @Document("Two") - static class GeoSpatialIndexOnLevelTwo { + class GeoSpatialIndexOnLevelTwo { GeoSpatialIndexOnLevelOne one; } @Document("WithOptionsOnGeoSpatialIndexProperty") - static class WithOptionsOnGeoSpatialIndexProperty { + class WithOptionsOnGeoSpatialIndexProperty { - @GeoSpatialIndexed(bits = 2, max = 100, min = 1, - type = GeoSpatialIndexType.GEO_2D) // + @GeoSpatialIndexed(bits = 2, max = 100, min = 1, type = GeoSpatialIndexType.GEO_2D) // Point location; } @Document("WithComposedAnnotation") - static class GeoSpatialIndexedDocumentWithComposedAnnotation { + class GeoSpatialIndexedDocumentWithComposedAnnotation { @ComposedGeoSpatialIndexed // Point location; @@ -406,6 +565,11 @@ static class GeoSpatialIndexedDocumentWithComposedAnnotation { GeoSpatialIndexType indexType() default GeoSpatialIndexType.GEO_HAYSTACK; } + @Document + class GeoIndexWithNameAsExpression { + @GeoSpatialIndexed(name = "#{'my' + 1 + 'st'}") Point spelIndexName; + } + } /** @@ -421,7 +585,7 @@ public void compoundIndexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelZero.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0)); } @@ -432,9 +596,9 @@ public void compoundIndexOptionsResolvedCorrectly() { CompoundIndexOnLevelZero.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new org.bson.Document().append("name", "compound_index") - .append("unique", true).append("sparse", true).append("background", true))); - assertThat(indexDefinition.getIndexKeys(), equalTo(new org.bson.Document().append("foo", 1).append("bar", -1))); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document("name", "compound_index") + .append("unique", true).append("sparse", true).append("background", true)); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1).append("bar", -1)); } @Test // DATAMONGO-909 @@ -444,9 +608,9 @@ public void compoundIndexOnSuperClassResolvedCorrectly() { IndexDefinedOnSuperClass.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new org.bson.Document().append("name", "compound_index") - .append("unique", true).append("sparse", true).append("background", true))); - assertThat(indexDefinition.getIndexKeys(), equalTo(new org.bson.Document().append("foo", 1).append("bar", -1))); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document().append("name", "compound_index") + .append("unique", true).append("sparse", true).append("background", true)); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1).append("bar", -1)); } @Test // DATAMONGO-827 @@ -456,9 +620,9 @@ public void compoundIndexDoesNotSpecifyNameWhenUsingGenerateName() { ComountIndexWithAutogeneratedName.class); IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); - assertThat(indexDefinition.getIndexOptions(), equalTo(new org.bson.Document().append("unique", true) - .append("sparse", true).append("background", true))); - assertThat(indexDefinition.getIndexKeys(), equalTo(new org.bson.Document().append("foo", 1).append("bar", -1))); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("unique", true).append("sparse", true).append("background", true)); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1).append("bar", -1)); } @Test // DATAMONGO-929 @@ -467,7 +631,7 @@ public void compoundIndexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelOne.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "zero.foo", "zero.bar" }, "CompoundIndexOnLevelOne", indexDefinitions.get(0)); } @@ -478,7 +642,7 @@ public void emptyCompoundIndexPathOnLevelOneIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexOnLevelOneWithEmptyIndexDefinition.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "zero" }, "CompoundIndexOnLevelZeroWithEmptyIndexDef", indexDefinitions.get(0)); } @@ -489,7 +653,7 @@ public void singleCompoundIndexPathOnLevelZeroIsResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( SingleCompoundIndex.class); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "foo", "bar" }, "CompoundIndexOnLevelZero", indexDefinitions.get(0)); } @@ -499,54 +663,133 @@ public void singleCompoundIndexUsingComposedAnnotationsOnTypeResolvedCorrectly() List indexDefinitions = prepareMappingContextAndResolveIndexForType( CompoundIndexDocumentWithComposedAnnotation.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getIndexKeys(), isBsonObject().containing("foo", 1).containing("bar", -1)); - assertThat(indexDefinitions.get(0).getIndexOptions(), isBsonObject().containing("name", "my_compound_index_name") - .containing("unique", true).containing("background", true)); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexKeys()).containsEntry("foo", 1).containsEntry("bar", -1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "my_compound_index_name") + .containsEntry("unique", true).containsEntry("background", true); + } + + @Test // DATAMONGO-2112 + public void resolveExpressionIndexNameForCompoundIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + CompoundIndexWithNameExpression.class); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "cmp2name"); + } + + @Test // DATAMONGO-2112 + public void resolveExpressionDefForCompoundIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + CompoundIndexWithDefExpression.class); + + assertThat(indexDefinitions).hasSize(1); + assertIndexPathAndCollection(new String[] { "foo", "bar" }, "compoundIndexWithDefExpression", + indexDefinitions.get(0)); + } + + @Test // DATAMONGO-2067 + public void shouldIdentifyRepeatedAnnotationCorrectly() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + RepeatedCompoundIndex.class); + + assertThat(indexDefinitions).hasSize(2); + assertIndexPathAndCollection(new String[] { "firstname", "lastname" }, "repeatedCompoundIndex", + indexDefinitions.get(0)); + assertIndexPathAndCollection(new String[] { "address.city", "address.street" }, "repeatedCompoundIndex", + indexDefinitions.get(1)); + } + + @Test // DATAMONGO-1569 + public void singleIndexWithPartialFilter() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + SingleCompoundIndexWithPartialFilter.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexKeys()).containsEntry("foo", 1).containsEntry("bar", -1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "compound_index_with_partial") + .containsEntry("unique", true).containsEntry("background", true); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("partialFilterExpression", + org.bson.Document.parse("{'value': {'$exists': true}}")); + } + + @Test // GH-3002 + public void compoundIndexWithCollation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + CompoundIndexWithCollation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "compound_index_with_collation").append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1)); + } + + @Test // GH-3002 + public void compoundIndexWithCollationFromDocumentAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithCompoundCollationFromDocument.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "compound_index_with_collation").append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1)); + } + + @Test // GH-3002 + public void compoundIndexWithEvaluatedCollationFromAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithEvaluatedCollationFromCompoundIndex.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "compound_index_with_collation").append("collation", + new org.bson.Document().append("locale", "de_AT"))); + assertThat(indexDefinition.getIndexKeys()).isEqualTo(new org.bson.Document().append("foo", 1)); } @Document("CompoundIndexOnLevelOne") - static class CompoundIndexOnLevelOne { + class CompoundIndexOnLevelOne { CompoundIndexOnLevelZero zero; } @Document("CompoundIndexOnLevelZeroWithEmptyIndexDef") - static class CompoundIndexOnLevelOneWithEmptyIndexDefinition { + class CompoundIndexOnLevelOneWithEmptyIndexDefinition { CompoundIndexOnLevelZeroWithEmptyIndexDef zero; } @Document("CompoundIndexOnLevelZero") @CompoundIndexes({ @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, - dropDups = true, sparse = true, unique = true) }) - static class CompoundIndexOnLevelZero {} + sparse = true, unique = true) }) + class CompoundIndexOnLevelZero {} - @CompoundIndexes({ - @CompoundIndex(name = "compound_index", background = true, dropDups = true, sparse = true, unique = true) }) - static class CompoundIndexOnLevelZeroWithEmptyIndexDef {} + @CompoundIndexes({ @CompoundIndex(name = "compound_index", background = true, sparse = true, unique = true) }) + class CompoundIndexOnLevelZeroWithEmptyIndexDef {} @Document("CompoundIndexOnLevelZero") - @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, dropDups = true, - sparse = true, unique = true) - static class SingleCompoundIndex {} + @CompoundIndex(name = "compound_index", def = "{'foo': 1, 'bar': -1}", background = true, sparse = true, + unique = true) + class SingleCompoundIndex {} - static class IndexDefinedOnSuperClass extends CompoundIndexOnLevelZero { - - } + class IndexDefinedOnSuperClass extends CompoundIndexOnLevelZero {} @Document("ComountIndexWithAutogeneratedName") @CompoundIndexes({ @CompoundIndex(useGeneratedName = true, def = "{'foo': 1, 'bar': -1}", background = true, - dropDups = true, sparse = true, unique = true) }) - static class ComountIndexWithAutogeneratedName { - - } + sparse = true, unique = true) }) + class ComountIndexWithAutogeneratedName {} @Document("WithComposedAnnotation") @ComposedCompoundIndex - static class CompoundIndexDocumentWithComposedAnnotation { - - } + class CompoundIndexDocumentWithComposedAnnotation {} @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) @@ -570,6 +813,37 @@ static class CompoundIndexDocumentWithComposedAnnotation { } + @Document + @CompoundIndex(name = "#{'cmp' + 2 + 'name'}", def = "{'foo': 1, 'bar': -1}") + class CompoundIndexWithNameExpression {} + + @Document + @CompoundIndex(def = "#{T(org.bson.Document).parse(\"{ 'foo': 1, 'bar': -1 }\")}") + class CompoundIndexWithDefExpression {} + + @Document + @CompoundIndex(name = "cmp-idx-one", def = "{'firstname': 1, 'lastname': -1}") + @CompoundIndex(name = "cmp-idx-two", def = "{'address.city': -1, 'address.street': 1}") + class RepeatedCompoundIndex {} + + @Document("SingleCompoundIndexWithPartialFilter") + @CompoundIndex(name = "compound_index_with_partial", def = "{'foo': 1, 'bar': -1}", background = true, + unique = true, partialFilter = "{'value': {'$exists': true}}") + class SingleCompoundIndexWithPartialFilter {} + + @Document + @CompoundIndex(name = "compound_index_with_collation", def = "{'foo': 1}", + collation = "{'locale': 'en_US', 'strength': 2}") + class CompoundIndexWithCollation {} + + @Document(collation = "{'locale': 'en_US', 'strength': 2}") + @CompoundIndex(name = "compound_index_with_collation", def = "{'foo': 1}") + class WithCompoundCollationFromDocument {} + + @Document(collation = "{'locale': 'en_US', 'strength': 2}") + @CompoundIndex(name = "compound_index_with_collation", def = "{'foo': 1}", + collation = "#{{ 'locale' : 'de' + '_' + 'AT' }}") + class WithEvaluatedCollationFromCompoundIndex {} } public static class TextIndexedResolutionTests { @@ -579,17 +853,31 @@ public void shouldResolveSingleFieldTextIndexCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnSinglePropertyInRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection("bar", "textIndexOnSinglePropertyInRoot", indexDefinitions.get(0)); + assertThat(indexDefinitions.get(0).getIndexOptions()).doesNotContainKey("collation"); + } + + @Test // DATAMONGO-2316 + public void shouldEnforceSimpleCollationOnTextIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + TextIndexWithCollation.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("collation", + new org.bson.Document("locale", "simple")); } @Test // DATAMONGO-937 public void shouldResolveMultiFieldTextIndexCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( - TextIndexOnMutiplePropertiesInRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); - assertIndexPathAndCollection(new String[] { "foo", "bar" }, "textIndexOnMutiplePropertiesInRoot", + TextIndexOnMultiplePropertiesInRoot.class); + + assertThat(indexDefinitions).hasSize(1); + assertIndexPathAndCollection(new String[] { "foo", "bar" }, "textIndexOnMultiplePropertiesInRoot", indexDefinitions.get(0)); } @@ -598,7 +886,7 @@ public void shouldResolveTextIndexOnElementCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnNestedRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "nested.foo" }, "textIndexOnNestedRoot", indexDefinitions.get(0)); } @@ -607,12 +895,12 @@ public void shouldResolveTextIndexOnElementWithWeightCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnNestedWithWeightRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "nested.foo" }, "textIndexOnNestedWithWeightRoot", indexDefinitions.get(0)); org.bson.Document weights = DocumentTestUtils.getAsDocument(indexDefinitions.get(0).getIndexOptions(), "weights"); - assertThat(weights.get("nested.foo"), is((Object) 5F)); + assertThat(weights.get("nested.foo")).isEqualTo(5F); } @Test // DATAMONGO-937 @@ -620,13 +908,13 @@ public void shouldResolveTextIndexOnElementWithMostSpecificWeightCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( TextIndexOnNestedWithMostSpecificValueRoot.class); - assertThat(indexDefinitions.size(), equalTo(1)); + assertThat(indexDefinitions).hasSize(1); assertIndexPathAndCollection(new String[] { "nested.foo", "nested.bar" }, "textIndexOnNestedWithMostSpecificValueRoot", indexDefinitions.get(0)); org.bson.Document weights = DocumentTestUtils.getAsDocument(indexDefinitions.get(0).getIndexOptions(), "weights"); - assertThat(weights.get("nested.foo"), is((Object) 5F)); - assertThat(weights.get("nested.bar"), is((Object) 10F)); + assertThat(weights.get("nested.foo")).isEqualTo(5F); + assertThat(weights.get("nested.bar")).isEqualTo(10F); } @Test // DATAMONGO-937 @@ -634,7 +922,7 @@ public void shouldSetDefaultLanguageCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithDefaultLanguage.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("default_language"), is((Object) "spanish")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("default_language", "spanish"); } @Test // DATAMONGO-937, DATAMONGO-1049 @@ -642,7 +930,7 @@ public void shouldResolveTextIndexLanguageOverrideCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithLanguageOverride.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is((Object) "lang")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("language_override", "lang"); } @Test // DATAMONGO-1049 @@ -650,7 +938,7 @@ public void shouldIgnoreTextIndexLanguageOverrideOnNestedElements() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithLanguageOverrideOnNestedElement.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is(nullValue())); + assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override")).isNull(); } @Test // DATAMONGO-1049 @@ -658,7 +946,8 @@ public void shouldNotCreateIndexDefinitionWhenOnlyLanguageButNoTextIndexPresent( List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNoTextIndexPropertyButReservedFieldLanguage.class); - assertThat(indexDefinitions, is(empty())); + + assertThat(indexDefinitions).isEmpty(); } @Test // DATAMONGO-1049 @@ -666,7 +955,8 @@ public void shouldNotCreateIndexDefinitionWhenOnlyAnnotatedLanguageButNoTextInde List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated.class); - assertThat(indexDefinitions, is(empty())); + + assertThat(indexDefinitions).isEmpty(); } @Test // DATAMONGO-1049 @@ -674,7 +964,8 @@ public void shouldPreferExplicitlyAnnotatedLanguageProperty() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithOverlappingLanguageProps.class); - assertThat(indexDefinitions.get(0).getIndexOptions().get("language_override"), is((Object) "lang")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("language_override", "lang"); } @Test // DATAMONGO-1373 @@ -684,19 +975,25 @@ public void shouldResolveComposedAnnotationCorrectly() { TextIndexedDocumentWithComposedAnnotation.class); org.bson.Document weights = DocumentTestUtils.getAsDocument(indexDefinitions.get(0).getIndexOptions(), "weights"); - assertThat(weights, isBsonObject().containing("foo", 99f)); + assertThat(weights).containsEntry("foo", 99f); } @Document - static class TextIndexOnSinglePropertyInRoot { + class TextIndexOnSinglePropertyInRoot { String foo; @TextIndexed String bar; } + @Document(collation = "de_AT") + class TextIndexWithCollation { + + @TextIndexed String foo; + } + @Document - static class TextIndexOnMutiplePropertiesInRoot { + class TextIndexOnMultiplePropertiesInRoot { @TextIndexed String foo; @@ -704,48 +1001,48 @@ static class TextIndexOnMutiplePropertiesInRoot { } @Document - static class TextIndexOnNestedRoot { + class TextIndexOnNestedRoot { String bar; @TextIndexed TextIndexOnNested nested; } - static class TextIndexOnNested { + class TextIndexOnNested { String foo; } @Document - static class TextIndexOnNestedWithWeightRoot { + class TextIndexOnNestedWithWeightRoot { @TextIndexed(weight = 5) TextIndexOnNested nested; } @Document - static class TextIndexOnNestedWithMostSpecificValueRoot { + class TextIndexOnNestedWithMostSpecificValueRoot { @TextIndexed(weight = 5) TextIndexOnNestedWithMostSpecificValue nested; } - static class TextIndexOnNestedWithMostSpecificValue { + class TextIndexOnNestedWithMostSpecificValue { String foo; @TextIndexed(weight = 10) String bar; } @Document(language = "spanish") - static class DocumentWithDefaultLanguage { + class DocumentWithDefaultLanguage { @TextIndexed String foo; } @Document - static class DocumentWithLanguageOverrideOnNestedElement { + class DocumentWithLanguageOverrideOnNestedElement { DocumentWithLanguageOverride nested; } @Document - static class DocumentWithLanguageOverride { + class DocumentWithLanguageOverride { @TextIndexed String foo; @@ -753,19 +1050,19 @@ static class DocumentWithLanguageOverride { } @Document - static class DocumentWithNoTextIndexPropertyButReservedFieldLanguage { + class DocumentWithNoTextIndexPropertyButReservedFieldLanguage { String language; } @Document - static class DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated { + class DocumentWithNoTextIndexPropertyButReservedFieldLanguageAnnotated { @Field("language") String lang; } @Document - static class DocumentWithOverlappingLanguageProps { + class DocumentWithOverlappingLanguageProps { @TextIndexed String foo; String language; @@ -773,7 +1070,7 @@ static class DocumentWithOverlappingLanguageProps { } @Document - static class TextIndexedDocumentWithComposedAnnotation { + class TextIndexedDocumentWithComposedAnnotation { @ComposedTextIndexedAnnotation String foo; String lang; @@ -782,7 +1079,7 @@ static class TextIndexedDocumentWithComposedAnnotation { @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @TextIndexed - static @interface ComposedTextIndexedAnnotation { + @interface ComposedTextIndexedAnnotation { @AliasFor(annotation = TextIndexed.class, attribute = "weight") float heavyweight() default 99f; @@ -796,25 +1093,27 @@ public void multipleIndexesResolvedCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(MixedIndexRoot.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat(indexDefinitions.get(0).getIndexDefinition(), instanceOf(Index.class)); - assertThat(indexDefinitions.get(1).getIndexDefinition(), instanceOf(GeospatialIndex.class)); + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexDefinition()).isInstanceOf(Index.class); + assertThat(indexDefinitions.get(1).getIndexDefinition()).isInstanceOf(GeospatialIndex.class); } @Test // DATAMONGO-899 public void cyclicPropertyReferenceOverDBRefShouldNotBeTraversed() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(Inner.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat(indexDefinitions.get(0).getIndexDefinition().getIndexKeys(), - equalTo(new org.bson.Document().append("outer", 1))); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexDefinition().getIndexKeys()) + .isEqualTo(new org.bson.Document().append("outer", 1)); } @Test // DATAMONGO-899 public void associationsShouldNotBeTraversed() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(Outer.class); - assertThat(indexDefinitions, empty()); + + assertThat(indexDefinitions).isEmpty(); } @Test // DATAMONGO-926 @@ -822,7 +1121,8 @@ public void shouldNotRunIntoStackOverflow() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( CycleStartingInBetween.class); - assertThat(indexDefinitions, hasSize(1)); + + assertThat(indexDefinitions).hasSize(1); } @Test // DATAMONGO-926 @@ -831,7 +1131,7 @@ public void indexShouldBeFoundEvenForCyclePropertyReferenceOnLevelZero() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleLevelZero.class); assertIndexPathAndCollection("indexedProperty", "cycleLevelZero", indexDefinitions.get(0)); assertIndexPathAndCollection("cyclicReference.indexedProperty", "cycleLevelZero", indexDefinitions.get(1)); - assertThat(indexDefinitions, hasSize(2)); + assertThat(indexDefinitions).hasSize(2); } @Test // DATAMONGO-926 @@ -839,7 +1139,7 @@ public void indexShouldBeFoundEvenForCyclePropertyReferenceOnLevelOne() { List indexDefinitions = prepareMappingContextAndResolveIndexForType(CycleOnLevelOne.class); assertIndexPathAndCollection("reference.indexedProperty", "cycleOnLevelOne", indexDefinitions.get(0)); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); } @Test // DATAMONGO-926 @@ -847,11 +1147,12 @@ public void indexBeResolvedCorrectlyWhenPropertiesOfDifferentTypesAreNamedEquall List indexDefinitions = prepareMappingContextAndResolveIndexForType( NoCycleButIdenticallyNamedProperties.class); + + assertThat(indexDefinitions).hasSize(3); assertIndexPathAndCollection("foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(0)); assertIndexPathAndCollection("reference.foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(1)); assertIndexPathAndCollection("reference.deep.foo", "noCycleButIdenticallyNamedProperties", indexDefinitions.get(2)); - assertThat(indexDefinitions, hasSize(3)); } @Test // DATAMONGO-949 @@ -860,7 +1161,7 @@ public void shouldNotDetectCycleInSimilarlyNamedProperties() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( SimilarityHolingBean.class); assertIndexPathAndCollection("norm", "similarityHolingBean", indexDefinitions.get(0)); - assertThat(indexDefinitions, hasSize(1)); + assertThat(indexDefinitions).hasSize(1); } @Test // DATAMONGO-962 @@ -868,7 +1169,8 @@ public void shouldDetectSelfCycleViaCollectionTypeCorrectly() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( SelfCyclingViaCollectionType.class); - assertThat(indexDefinitions, empty()); + + assertThat(indexDefinitions).isEmpty(); } @Test // DATAMONGO-962 @@ -876,14 +1178,15 @@ public void shouldNotDetectCycleWhenTypeIsUsedMoreThanOnce() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( MultipleObjectsOfSameType.class); - assertThat(indexDefinitions, empty()); + + assertThat(indexDefinitions).isEmpty(); } @Test // DATAMONGO-962 @SuppressWarnings({ "rawtypes", "unchecked" }) public void shouldCatchCyclicReferenceExceptionOnRoot() { - MongoPersistentEntity entity = new BasicMongoPersistentEntity(ClassTypeInformation.from(Object.class)); + MongoPersistentEntity entity = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(Object.class)); MongoPersistentProperty propertyMock = mock(MongoPersistentProperty.class); when(propertyMock.isEntity()).thenReturn(true); @@ -891,7 +1194,7 @@ public void shouldCatchCyclicReferenceExceptionOnRoot() { when(propertyMock.getActualType()).thenThrow( new MongoPersistentEntityIndexResolver.CyclicPropertyReferenceException("foo", Object.class, "bar")); - MongoPersistentEntity selfCyclingEntity = new BasicMongoPersistentEntity( + MongoPersistentEntity selfCyclingEntity = new BasicMongoPersistentEntity<>( ClassTypeInformation.from(SelfCyclingViaCollectionType.class)); new MongoPersistentEntityIndexResolver(prepareMappingContext(SelfCyclingViaCollectionType.class)) @@ -904,9 +1207,9 @@ public void shouldAllowMultiplePathsToDeeplyType() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( NoCycleManyPathsToDeepValueObject.class); + assertThat(indexDefinitions).hasSize(2); assertIndexPathAndCollection("l3.valueObject.value", "rules", indexDefinitions.get(0)); assertIndexPathAndCollection("l2.l3.valueObject.value", "rules", indexDefinitions.get(1)); - assertThat(indexDefinitions, hasSize(2)); } @Test // DATAMONGO-1025 @@ -914,8 +1217,9 @@ public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedCompoundIn List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedDocumentHavingNamedCompoundIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedCompoundIndex.c_index"); } @Test // DATAMONGO-1025 @@ -923,8 +1227,8 @@ public void shouldUseIndexNameForNestedTypesWithNamedCompoundIndexDefinition() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedTypeHavingNamedCompoundIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedCompoundIndex.c_index")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedCompoundIndex.c_index"); } @Test // DATAMONGO-1025 @@ -932,8 +1236,9 @@ public void shouldUsePathIndexAsIndexNameForDocumentsHavingNamedNestedIndexFixed List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedDocumentHavingNamedIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedIndex.property_index")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedIndex.property_index"); } @Test // DATAMONGO-1025 @@ -941,8 +1246,9 @@ public void shouldUseIndexNameForNestedTypesWithNamedIndexDefinition() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNestedTypeHavingNamedIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("propertyOfTypeHavingNamedIndex.property_index")); + + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "propertyOfTypeHavingNamedIndex.property_index"); } @Test // DATAMONGO-1025 @@ -950,7 +1256,7 @@ public void shouldUseIndexNameOnRootLevel() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( DocumentWithNamedIndex.class); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("property_index")); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "property_index"); } @Test // DATAMONGO-1087 @@ -959,9 +1265,9 @@ public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnRoo List indexDefinitions = prepareMappingContextAndResolveIndexForType( MultiplePropertiesOfSameTypeWithMatchingStartLetters.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("name.component")); - assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("nameLast.component")); + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "name.component"); + assertThat(indexDefinitions.get(1).getIndexOptions()).containsEntry("name", "nameLast.component"); } @Test // DATAMONGO-1087 @@ -970,9 +1276,9 @@ public void shouldAllowMultiplePropertiesOfSameTypeWithMatchingStartLettersOnNes List indexDefinitions = prepareMappingContextAndResolveIndexForType( MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("component.nameLast")); - assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), equalTo("component.name")); + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "component.nameLast"); + assertThat(indexDefinitions.get(1).getIndexOptions()).containsEntry("name", "component.name"); } @Test // DATAMONGO-1121 @@ -981,11 +1287,10 @@ public void shouldOnlyConsiderEntitiesAsPotentialCycleCandidates() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths.class); - assertThat(indexDefinitions, hasSize(2)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), equalTo("path1.foo")); - assertThat((String) indexDefinitions.get(1).getIndexOptions().get("name"), - equalTo("path2.propertyWithIndexedStructure.foo")); - + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", "path1.foo"); + assertThat(indexDefinitions.get(1).getIndexOptions()).containsEntry("name", + "path2.propertyWithIndexedStructure.foo"); } @Test // DATAMONGO-1263 @@ -994,110 +1299,312 @@ public void shouldConsiderGenericTypeArgumentsOfCollectionElements() { List indexDefinitions = prepareMappingContextAndResolveIndexForType( EntityWithGenericTypeWrapperAsElement.class); - assertThat(indexDefinitions, hasSize(1)); - assertThat((String) indexDefinitions.get(0).getIndexOptions().get("name"), - equalTo("listWithGeneircTypeElement.entity.property_index")); + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0).getIndexOptions()).containsEntry("name", + "listWithGeneircTypeElement.entity.property_index"); + } + + @Test // DATAMONGO-1183 + public void hashedIndexOnId() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithHashedIndexOnId.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).hasSize(1).containsEntry("_id", "hashed"); + }); + } + + @Test // DATAMONGO-1183 + public void hashedIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType(WithHashedIndex.class); + + assertThat(indexDefinitions).hasSize(1); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).hasSize(1).containsEntry("value", "hashed"); + }); + } + + @Test // DATAMONGO-1183 + public void hashedIndexAndIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithHashedIndexAndIndex.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", 1); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", "hashed"); + }); + } + + @Test // DATAMONGO-1183 + public void hashedIndexAndIndexViaComposedAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithComposedHashedIndexAndIndex.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", 1); + assertThat(it.getIndexOptions()).containsEntry("name", "idx-name"); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value", "hashed"); + }); + } + + @Test // DATAMONGO-1902 + public void resolvedIndexOnUnwrappedType() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType(WithUnwrapped.class, + UnwrappableType.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("stringValue", 1); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("with-at-field-annotation", 1); + }); + } + + @Test // DATAMONGO-1902 + public void resolvedIndexOnNestedUnwrappedType() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WrapperAroundWithUnwrapped.class, WithUnwrapped.class, UnwrappableType.class); + + assertThat(indexDefinitions).hasSize(2); + assertThat(indexDefinitions.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withEmbedded.stringValue", 1); + }); + assertThat(indexDefinitions.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withEmbedded.with-at-field-annotation", 1); + }); + } + + @Test // DATAMONGO-1902 + public void errorsOnIndexOnEmbedded() { + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> prepareMappingContextAndResolveIndexForType(InvalidIndexOnUnwrapped.class)); + + } + + @Test // GH-3225 + public void resolvesWildcardOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).isEmpty(); + }); + } + + @Test // GH-3225 + public void resolvesWildcardWithProjectionOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexHavingProjectionOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).containsEntry("wildcardProjection", + org.bson.Document.parse("{'_id' : 1, 'value' : 0}")); + }); + } + + @Test // GH-3225 + public void resolvesWildcardOnProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnProperty.class); + assertThat(indices).hasSize(3); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + }); + assertThat(indices.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("the_field.$**", 1); + }); + assertThat(indices.get(2)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withOptions.$**", 1); + assertThat(it.getIndexOptions()).containsEntry("name", "withOptions.idx") + .containsEntry("collation", new org.bson.Document("locale", "en_US")) + .containsEntry("partialFilterExpression", new org.bson.Document("$eq", 1)); + }); + } + + @Test // GH-3225 + public void resolvesWildcardTypeOfNestedProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardOnEntityOfNested.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + assertThat(it.getIndexOptions()).hasSize(1).containsKey("name"); + }); + } + + @Test // GH-3225 + public void rejectsWildcardProjectionOnNestedPaths() { + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> { + prepareMappingContextAndResolveIndexForType(WildcardIndexedProjectionOnNestedPath.class); + }); + } + + @Test // GH-3914 + public void shouldSkipMapStructuresUnlessAnnotatedWithWildcardIndex() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithMapStructures.class); + + assertThat(indexDefinitions).hasSize(1); + } + + @Test // GH-3002 + public void indexedWithCollation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithCollationFromIndexedAnnotation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "value").append("unique", true).append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + } + + @Test // GH-3002 + public void indexedWithCollationFromDocumentAnnotation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithCollationFromDocumentAnnotation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()) + .isEqualTo(new org.bson.Document().append("name", "value").append("unique", true).append("collation", + new org.bson.Document().append("locale", "en_US").append("strength", 2))); + } + + @Test // GH-3002 + public void indexedWithEvaluatedCollation() { + + List indexDefinitions = prepareMappingContextAndResolveIndexForType( + WithEvaluatedCollationFromIndexedAnnotation.class); + + IndexDefinition indexDefinition = indexDefinitions.get(0).getIndexDefinition(); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new org.bson.Document().append("name", "value") + .append("collation", new org.bson.Document().append("locale", "de_AT"))); } @Document - static class MixedIndexRoot { + class MixedIndexRoot { @Indexed String first; NestedGeoIndex nestedGeo; } - static class NestedGeoIndex { + class NestedGeoIndex { @GeoSpatialIndexed Point location; } @Document - static class Outer { + class Outer { @DBRef Inner inner; } @Document - static class Inner { + class Inner { @Indexed Outer outer; } @Document - static class CycleLevelZero { + class CycleLevelZero { @Indexed String indexedProperty; CycleLevelZero cyclicReference; } @Document - static class CycleOnLevelOne { + class CycleOnLevelOne { CycleOnLevelOneReferenced reference; } - static class CycleOnLevelOneReferenced { + class CycleOnLevelOneReferenced { @Indexed String indexedProperty; CycleOnLevelOne cyclicReference; } @Document - public static class CycleStartingInBetween { + static class CycleStartingInBetween { CycleOnLevelOne referenceToCycleStart; } @Document - static class NoCycleButIdenticallyNamedProperties { + class NoCycleButIdenticallyNamedProperties { @Indexed String foo; NoCycleButIdenticallyNamedPropertiesNested reference; } - static class NoCycleButIdenticallyNamedPropertiesNested { + class NoCycleButIdenticallyNamedPropertiesNested { @Indexed String foo; NoCycleButIndenticallNamedPropertiesDeeplyNested deep; } - static class NoCycleButIndenticallNamedPropertiesDeeplyNested { + class NoCycleButIndenticallNamedPropertiesDeeplyNested { @Indexed String foo; } @Document("rules") - static class NoCycleManyPathsToDeepValueObject { + class NoCycleManyPathsToDeepValueObject { private NoCycleLevel3 l3; private NoCycleLevel2 l2; } - static class NoCycleLevel2 { + class NoCycleLevel2 { private NoCycleLevel3 l3; } - static class NoCycleLevel3 { + class NoCycleLevel3 { private ValueObject valueObject; } - static class ValueObject { + class ValueObject { @Indexed private String value; } @Document - static class SimilarityHolingBean { + class SimilarityHolingBean { - @Indexed @Field("norm") String normalProperty; + @Indexed + @Field("norm") String normalProperty; @Field("similarityL") private List listOfSimilarilyNamedEntities = null; } - static class SimilaritySibling { + class SimilaritySibling { @Field("similarity") private String similarThoughNotEqualNamedProperty; } @Document - static class MultipleObjectsOfSameType { + class MultipleObjectsOfSameType { SelfCyclingViaCollectionType cycleOne; @@ -1105,7 +1612,7 @@ static class MultipleObjectsOfSameType { } @Document - static class SelfCyclingViaCollectionType { + class SelfCyclingViaCollectionType { List cyclic; @@ -1113,55 +1620,55 @@ static class SelfCyclingViaCollectionType { @Document @CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }") - static class DocumentWithNamedCompoundIndex { + class DocumentWithNamedCompoundIndex { String property; } @Document - static class DocumentWithNamedIndex { + class DocumentWithNamedIndex { @Indexed(name = "property_index") String property; } - static class TypeWithNamedIndex { + class TypeWithNamedIndex { @Indexed(name = "property_index") String property; } @Document - static class DocumentWithNestedDocumentHavingNamedCompoundIndex { + class DocumentWithNestedDocumentHavingNamedCompoundIndex { DocumentWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex; } @CompoundIndex(name = "c_index", def = "{ foo:1, bar:1 }") - static class TypeWithNamedCompoundIndex { + class TypeWithNamedCompoundIndex { String property; } @Document - static class DocumentWithNestedTypeHavingNamedCompoundIndex { + class DocumentWithNestedTypeHavingNamedCompoundIndex { TypeWithNamedCompoundIndex propertyOfTypeHavingNamedCompoundIndex; } @Document - static class DocumentWithNestedDocumentHavingNamedIndex { + class DocumentWithNestedDocumentHavingNamedIndex { DocumentWithNamedIndex propertyOfTypeHavingNamedIndex; } @Document - static class DocumentWithNestedTypeHavingNamedIndex { + class DocumentWithNestedTypeHavingNamedIndex { TypeWithNamedIndex propertyOfTypeHavingNamedIndex; } @Document - public class MultiplePropertiesOfSameTypeWithMatchingStartLetters { + class MultiplePropertiesOfSameTypeWithMatchingStartLetters { - public class NameComponent { + class NameComponent { @Indexed String component; } @@ -1171,9 +1678,9 @@ public class NameComponent { } @Document - public class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty { + class MultiplePropertiesOfSameTypeWithMatchingStartLettersOnNestedProperty { - public class NameComponent { + class NameComponent { @Indexed String nameLast; @Indexed String name; @@ -1183,37 +1690,182 @@ public class NameComponent { } @Document - public static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths { + static class OuterDocumentReferingToIndexedPropertyViaDifferentNonCyclingPaths { NoCycleButIndenticallNamedPropertiesDeeplyNested path1; AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument path2; } - public static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument { + @Document + static class WrapperAroundWithUnwrapped { + + String id; + WithUnwrapped withEmbedded; + } + + @Document + static class WithUnwrapped { + + String id; + + @Unwrapped.Nullable UnwrappableType unwrappableType; + } + + @Document + class InvalidIndexOnUnwrapped { + + @Indexed // + @Unwrapped.Nullable // + UnwrappableType unwrappableType; + + } + + static class UnwrappableType { + + @Indexed String stringValue; + + List listValue; + + @Indexed // + @Field("with-at-field-annotation") // + String atFieldAnnotatedValue; + } + + static class AlternatePathToNoCycleButIndenticallNamedPropertiesDeeplyNestedDocument { NoCycleButIndenticallNamedPropertiesDeeplyNested propertyWithIndexedStructure; } - static class GenericEntityWrapper { + class GenericEntityWrapper { T entity; } @Document - static class EntityWithGenericTypeWrapperAsElement { + class WithMapStructures { + Map rootMap; + NestedInMapWithStructures nested; + ValueObject plainValue; + } + + class NestedInMapWithStructures { + Map nestedMap; + } + + @Document + class EntityWithGenericTypeWrapperAsElement { List> listWithGeneircTypeElement; } + + @Document + class WithHashedIndexOnId { + + @HashIndexed + @Id String id; + } + + @Document + class WithHashedIndex { + + @HashIndexed String value; + } + + @Document + @WildcardIndexed + class WithWildCardIndexOnEntity { + + String value; + } + + @Document + @WildcardIndexed(wildcardProjection = "{'_id' : 1, 'value' : 0}") + class WithWildCardIndexHavingProjectionOnEntity { + + String value; + } + + @Document + class WithWildCardIndexOnProperty { + + @WildcardIndexed // + Map value; + + @WildcardIndexed // + @Field("the_field") // + Map renamedField; + + @WildcardIndexed(name = "idx", partialFilter = "{ '$eq' : 1 }", collation = "en_US") // + Map withOptions; + + } + + @Document + class WildcardIndexedProjectionOnNestedPath { + + @WildcardIndexed(wildcardProjection = "{}") String foo; + } + + @Document + class WithWildCardOnEntityOfNested { + + WithWildCardIndexHavingProjectionOnEntity value; + + } + + @Document + class WithHashedIndexAndIndex { + + @Indexed // + @HashIndexed // + String value; + } + + @Document + class WithComposedHashedIndexAndIndex { + + @ComposedHashIndexed(name = "idx-name") String value; + } + + @Document + class WithCollationFromIndexedAnnotation { + + @Indexed(collation = "{'locale': 'en_US', 'strength': 2}", unique = true) // + private String value; + } + + @Document(collation = "{'locale': 'en_US', 'strength': 2}") + class WithCollationFromDocumentAnnotation { + + @Indexed(unique = true) // + private String value; + } + + @Document(collation = "en_US") + class WithEvaluatedCollationFromIndexedAnnotation { + + @Indexed(collation = "#{{'locale' : 'de' + '_' + 'AT'}}") // + private String value; + } + + @HashIndexed + @Indexed + @Retention(RetentionPolicy.RUNTIME) + @interface ComposedHashIndexed { + + @AliasFor(annotation = Indexed.class, attribute = "name") + String name() default ""; + } } - private static List prepareMappingContextAndResolveIndexForType(Class type) { + private static List prepareMappingContextAndResolveIndexForType(Class... types) { - MongoMappingContext mappingContext = prepareMappingContext(type); + MongoMappingContext mappingContext = prepareMappingContext(types); MongoPersistentEntityIndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext); - return resolver.resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type)); + return resolver.resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(types[0])); } - private static MongoMappingContext prepareMappingContext(Class type) { + private static MongoMappingContext prepareMappingContext(Class... types) { MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(Collections.singleton(type)); + mappingContext.setInitialEntitySet(new LinkedHashSet<>(Arrays.asList(types))); mappingContext.initialize(); return mappingContext; @@ -1228,9 +1880,9 @@ private static void assertIndexPathAndCollection(String[] expectedPaths, String IndexDefinitionHolder holder) { for (String expectedPath : expectedPaths) { - assertThat(holder.getIndexDefinition().getIndexKeys().containsKey(expectedPath), equalTo(true)); + assertThat(holder.getIndexDefinition().getIndexKeys()).containsKey(expectedPath); } - assertThat(holder.getCollection(), equalTo(expectedCollection)); + assertThat(holder.getCollection()).isEqualTo(expectedCollection); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java index ccf455e33f..d8f6b9b698 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/PathUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import org.junit.Before; @@ -25,6 +24,7 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; + import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -54,9 +54,9 @@ public void shouldIdentifyCycle() { Path path = Path.of(foo).append(bar).append(bar); - assertThat(path.isCycle(), is(true)); - assertThat(path.toCyclePath(), is(equalTo("bar -> bar"))); - assertThat(path.toString(), is(equalTo("foo -> bar -> bar"))); + assertThat(path.isCycle()).isTrue(); + assertThat(path.toCyclePath()).isEqualTo("bar -> bar"); + assertThat(path.toString()).isEqualTo("foo -> bar -> bar"); } @Test // DATAMONGO-1782 @@ -67,9 +67,9 @@ public void isCycleShouldReturnFalseWhenNoCyclePresent() { Path path = Path.of(foo).append(bar); - assertThat(path.isCycle(), is(false)); - assertThat(path.toCyclePath(), is(equalTo(""))); - assertThat(path.toString(), is(equalTo("foo -> bar"))); + assertThat(path.isCycle()).isFalse(); + assertThat(path.toCyclePath()).isEqualTo(""); + assertThat(path.toString()).isEqualTo("foo -> bar"); } @Test // DATAMONGO-1782 @@ -79,7 +79,7 @@ public void isCycleShouldReturnFalseCycleForNonEqualProperties() { MongoPersistentProperty bar = createPersistentPropertyMock(entityMock, "bar"); MongoPersistentProperty bar2 = createPersistentPropertyMock(mock(MongoPersistentEntity.class), "bar"); - assertThat(Path.of(foo).append(bar).append(bar2).isCycle(), is(false)); + assertThat(Path.of(foo).append(bar).append(bar2).isCycle()).isFalse(); } @SuppressWarnings({ "rawtypes", "unchecked" }) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreatorUnitTests.java new file mode 100644 index 0000000000..4b4693ed75 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/ReactiveMongoPersistentEntityIndexCreatorUnitTests.java @@ -0,0 +1,152 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.dao.DataAccessResourceFailureException; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.MongoException; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Unit tests for {@link ReactiveMongoPersistentEntityIndexCreator}. + * + * @author Mark Paluch + * @author Mathieu Ouellet + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class ReactiveMongoPersistentEntityIndexCreatorUnitTests { + + private ReactiveIndexOperations indexOperations; + + @Mock ReactiveMongoDatabaseFactory factory; + @Mock MongoDatabase db; + @Mock MongoCollection collection; + + private ArgumentCaptor keysCaptor; + private ArgumentCaptor optionsCaptor; + private ArgumentCaptor collectionCaptor; + + @BeforeEach + @SuppressWarnings("unchecked") + void setUp() { + + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(factory.getMongoDatabase()).thenReturn(Mono.just(db)); + when(db.getCollection(any(), any(Class.class))).thenReturn(collection); + + indexOperations = new ReactiveMongoTemplate(factory).indexOps("foo"); + + keysCaptor = ArgumentCaptor.forClass(org.bson.Document.class); + optionsCaptor = ArgumentCaptor.forClass(IndexOptions.class); + collectionCaptor = ArgumentCaptor.forClass(String.class); + + when(collection.createIndex(keysCaptor.capture(), optionsCaptor.capture())).thenReturn(Mono.just("OK")); + } + + @Test // DATAMONGO-1928 + void buildsIndexDefinitionUsingFieldName() { + + MongoMappingContext mappingContext = prepareMappingContext(Person.class); + + Mono publisher = checkForIndexes(mappingContext); + + verifyNoInteractions(collection); + + publisher.as(StepVerifier::create).verifyComplete(); + + assertThat(keysCaptor.getValue()).isNotNull().containsKey("fieldname"); + assertThat(optionsCaptor.getValue().getName()).isEqualTo("indexName"); + assertThat(optionsCaptor.getValue().isBackground()).isFalse(); + assertThat(optionsCaptor.getValue().getExpireAfter(TimeUnit.SECONDS)).isNull(); + } + + @Test // DATAMONGO-1928 + void createIndexShouldUsePersistenceExceptionTranslatorForNonDataIntegrityConcerns() { + + when(collection.createIndex(any(org.bson.Document.class), any(IndexOptions.class))) + .thenReturn(Mono.error(new MongoException(6, "HostUnreachable"))); + + MongoMappingContext mappingContext = prepareMappingContext(Person.class); + + Mono publisher = checkForIndexes(mappingContext); + + publisher.as(StepVerifier::create).expectError(DataAccessResourceFailureException.class).verify(); + } + + @Test // DATAMONGO-1928 + void createIndexShouldNotConvertUnknownExceptionTypes() { + + when(collection.createIndex(any(org.bson.Document.class), any(IndexOptions.class))) + .thenReturn(Mono.error(new ClassCastException("o_O"))); + + MongoMappingContext mappingContext = prepareMappingContext(Person.class); + + Mono publisher = checkForIndexes(mappingContext); + + publisher.as(StepVerifier::create).expectError(ClassCastException.class).verify(); + } + + private static MongoMappingContext prepareMappingContext(Class type) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setInitialEntitySet(Collections.singleton(type)); + mappingContext.initialize(); + + return mappingContext; + } + + private Mono checkForIndexes(MongoMappingContext mappingContext) { + + return new ReactiveMongoPersistentEntityIndexCreator(mappingContext, it -> indexOperations) + .checkForIndexes(mappingContext.getRequiredPersistentEntity(Person.class)); + } + + @Document + static class Person { + + @Indexed(name = "indexName") // + @Field("fieldname") // + String field; + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java index a099c4cb08..e419a75012 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SampleEntity.java @@ -6,9 +6,7 @@ @Document public class SampleEntity { - @Id - String id; + @Id String id; - @Indexed - String prop; + @Indexed String prop; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SearchIndexInfoUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SearchIndexInfoUnitTests.java new file mode 100644 index 0000000000..1d7e5b63b6 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/SearchIndexInfoUnitTests.java @@ -0,0 +1,90 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +/** + * @author Christoph Strobl + */ +class SearchIndexInfoUnitTests { + + @ParameterizedTest + @ValueSource(strings = { """ + { + "id": "679b7637a580c270015ef6fb", + "name": "vector_index", + "type": "vectorSearch", + "status": "READY", + "queryable": true, + "latestVersion": 0, + "latestDefinition": { + "fields": [ + { + "type": "vector", + "path": "plot_embedding", + "numDimensions": 1536, + "similarity": "euclidean" + } + ] + } + }""", """ + { + id: '648b4ad4d697b73bf9d2e5e1', + name: 'search-index', + status: 'PENDING', + queryable: false, + latestDefinition: { + mappings: { dynamic: false, fields: { text: { type: 'string' } } } + } + }""", """ + { + name: 'search-index-not-yet-created', + definition: { + mappings: { dynamic: false, fields: { text: { type: 'string' } } } + } + }""", """ + { + name: 'vector-index-with-filter', + type: "vectorSearch", + definition: { + fields: [ + { + type: "vector", + path: "plot_embedding", + numDimensions: 1536, + similarity: "euclidean" + }, { + type: "filter", + path: "year" + } + ] + } + }""" }) + void parsesIndexInfo(String indexInfoSource) { + + SearchIndexInfo indexInfo = SearchIndexInfo.parse(indexInfoSource); + + if (indexInfo.getId() != null) { + assertThat(indexInfo.getId()).isInstanceOf(String.class); + } + assertThat(indexInfo.getStatus()).isNotNull(); + assertThat(indexInfo.getIndexDefinition()).isNotNull(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java index 752ea249ec..aa37b8bced 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/TextIndexTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,73 +15,80 @@ */ package org.springframework.data.mongodb.core.index; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.domain.Sort.Direction; -import org.springframework.data.mongodb.config.AbstractIntegrationTests; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.CollectionOptions; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Language; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; - -import com.mongodb.WriteConcern; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; /** * @author Christoph Strobl + * @author Mark Paluch */ -public class TextIndexTests extends AbstractIntegrationTests { +@ExtendWith(MongoTemplateExtension.class) +public class TextIndexTests { - public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6)); + @Template(initialEntitySet = TextIndexedDocumentRoot.class) + static MongoTestTemplate template; - private @Autowired MongoTemplate template; private IndexOperations indexOps; - @Before - public void setUp() throws Exception { + @BeforeEach + public void beforeEach() throws Exception { - template.setWriteConcern(WriteConcern.FSYNC_SAFE); this.indexOps = template.indexOps(TextIndexedDocumentRoot.class); + + template.dropDatabase(); + + template.createCollection(TextIndexedDocumentRoot.class, + CollectionOptions.empty().collation(Collation.of("de_AT"))); } - @Test // DATAMONGO-937 + @Test // DATAMONGO-937, DATAMONGO-2316 public void indexInfoShouldHaveBeenCreatedCorrectly() { + IndexResolver indexResolver = IndexResolver.create(template.getConverter().getMappingContext()); + + for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(TextIndexedDocumentRoot.class)) { + indexOps.ensureIndex(indexDefinition); + } + List indexInfos = indexOps.getIndexInfo(); - assertThat(indexInfos.size(), is(2)); + assertThat(indexInfos.size()).isEqualTo(2); List fields = indexInfos.get(0).getIndexFields(); - assertThat(fields.size(), is(1)); - assertThat(fields, hasItem(IndexField.create("_id", Direction.ASC))); + assertThat(fields).containsExactly(IndexField.create("_id", Direction.ASC)); IndexInfo textIndexInfo = indexInfos.get(1); List textIndexFields = textIndexInfo.getIndexFields(); - assertThat(textIndexFields.size(), is(4)); - assertThat(textIndexFields, hasItem(IndexField.text("textIndexedPropertyWithDefaultWeight", 1F))); - assertThat(textIndexFields, hasItem(IndexField.text("textIndexedPropertyWithWeight", 5F))); - assertThat(textIndexFields, hasItem(IndexField.text("nestedDocument.textIndexedPropertyInNestedDocument", 1F))); - assertThat(textIndexFields, hasItem(IndexField.create("_ftsx", Direction.ASC))); - assertThat(textIndexInfo.getLanguage(), is("spanish")); + assertThat(textIndexFields).hasSize(4).contains(IndexField.text("textIndexedPropertyWithDefaultWeight", 1F), + IndexField.text("textIndexedPropertyWithWeight", 5F), + IndexField.text("nestedDocument.textIndexedPropertyInNestedDocument", 1F), + IndexField.create("_ftsx", Direction.ASC)); + assertThat(textIndexInfo.getLanguage()).isEqualTo("spanish"); } - @Document(language = "spanish") + @Document(language = "spanish", collation = "de_AT") static class TextIndexedDocumentRoot { @TextIndexed String textIndexedPropertyWithDefaultWeight; @TextIndexed(weight = 5) String textIndexedPropertyWithWeight; - TextIndexedDocumentWihtLanguageOverride nestedDocument; + TextIndexedDocumentWithLanguageOverride nestedDocument; } - static class TextIndexedDocumentWihtLanguageOverride { + static class TextIndexedDocumentWithLanguageOverride { @Language String lang; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/VectorIndexIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/VectorIndexIntegrationTests.java new file mode 100644 index 0000000000..dcd447f81a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/VectorIndexIntegrationTests.java @@ -0,0 +1,223 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import static org.assertj.core.api.Assertions.*; +import static org.awaitility.Awaitility.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.index.VectorIndex.SimilarityFunction; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.AtlasContainer; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.lang.Nullable; + +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; + +import com.mongodb.ConnectionString; +import com.mongodb.client.AggregateIterable; + +/** + * Integration tests for vector index creation. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@Testcontainers(disabledWithoutDocker = true) +class VectorIndexIntegrationTests { + + private static final @Container AtlasContainer atlasLocal = AtlasContainer.bestMatch(); + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + cfg.configureDatabaseFactory(ctx -> { + ctx.client(MongoTestUtils.client(new ConnectionString(atlasLocal.getConnectionString()))); + }); + cfg.configureMappingContext(ctx -> { + ctx.initialEntitySet(Movie.class); + }); + }); + + SearchIndexOperations indexOps; + + @BeforeEach + void init() { + template.createCollection(Movie.class); + indexOps = template.searchIndexOps(Movie.class); + } + + @AfterEach + void cleanup() { + + template.searchIndexOps(Movie.class).dropAllIndexes(); + template.dropCollection(Movie.class); + } + + @ParameterizedTest // GH-4706 + @ValueSource(strings = { "euclidean", "cosine", "dotProduct" }) + void createsSimpleVectorIndex(String similarityFunction) { + + VectorIndex idx = new VectorIndex("vector_index").addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity(similarityFunction)); + + indexOps.createIndex(idx); + + await().untilAsserted(() -> { + Document raw = readRawIndexInfo(idx.getName()); + assertThat(raw).containsEntry("name", idx.getName()) // + .containsEntry("type", "vectorSearch") // + .containsEntry("latestDefinition.fields.[0].type", "vector") // + .containsEntry("latestDefinition.fields.[0].path", "plot_embedding") // + .containsEntry("latestDefinition.fields.[0].numDimensions", 1536) // + .containsEntry("latestDefinition.fields.[0].similarity", similarityFunction); // + }); + } + + @Test // GH-4706 + void dropIndex() { + + VectorIndex idx = new VectorIndex("vector_index").addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + template.awaitIndexCreation(Movie.class, idx.getName()); + + indexOps.dropIndex(idx.getName()); + + assertThat(readRawIndexInfo(idx.getName())).isNull(); + } + + @Test // GH-4706 + void statusChanges() throws InterruptedException { + + String indexName = "vector_index"; + assertThat(indexOps.status(indexName)).isEqualTo(SearchIndexStatus.DOES_NOT_EXIST); + + VectorIndex idx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + assertThat(indexOps.status(indexName)).isIn(SearchIndexStatus.PENDING, SearchIndexStatus.BUILDING, + SearchIndexStatus.READY); + } + + @Test // GH-4706 + void exists() throws InterruptedException { + + String indexName = "vector_index"; + assertThat(indexOps.exists(indexName)).isFalse(); + + VectorIndex idx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + assertThat(indexOps.exists(indexName)).isTrue(); + } + + @Test // GH-4706 + void updatesVectorIndex() throws InterruptedException { + + String indexName = "vector_index"; + VectorIndex idx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity("cosine")); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + await().untilAsserted(() -> { + Document raw = readRawIndexInfo(idx.getName()); + assertThat(raw).containsEntry("name", idx.getName()) // + .containsEntry("type", "vectorSearch") // + .containsEntry("latestDefinition.fields.[0].type", "vector") // + .containsEntry("latestDefinition.fields.[0].path", "plot_embedding") // + .containsEntry("latestDefinition.fields.[0].numDimensions", 1536) // + .containsEntry("latestDefinition.fields.[0].similarity", "cosine"); // + }); + + VectorIndex updatedIdx = new VectorIndex(indexName).addVector("plotEmbedding", + builder -> builder.dimensions(1536).similarity(SimilarityFunction.DOT_PRODUCT)); + + // updating vector index does currently not work, one needs to delete and recreat + assertThatRuntimeException().isThrownBy(() -> indexOps.updateIndex(updatedIdx)); + } + + @Test // GH-4706 + void createsVectorIndexWithFilters() throws InterruptedException { + + VectorIndex idx = new VectorIndex("vector_index") + .addVector("plotEmbedding", builder -> builder.dimensions(1536).cosine()).addFilter("description") + .addFilter("year"); + + indexOps.createIndex(idx); + + // without synchronization, the container might crash. + Thread.sleep(500); + + await().untilAsserted(() -> { + Document raw = readRawIndexInfo(idx.getName()); + assertThat(raw).containsEntry("name", idx.getName()) // + .containsEntry("type", "vectorSearch") // + .containsEntry("latestDefinition.fields.[0].type", "vector") // + .containsEntry("latestDefinition.fields.[1].type", "filter") // + .containsEntry("latestDefinition.fields.[1].path", "plot") // + .containsEntry("latestDefinition.fields.[2].type", "filter") // + .containsEntry("latestDefinition.fields.[2].path", "year"); // + }); + } + + @Nullable + private Document readRawIndexInfo(String name) { + + AggregateIterable indexes = template.execute(Movie.class, collection -> { + return collection.aggregate(List.of(new Document("$listSearchIndexes", new Document("name", name)))); + }); + + return indexes.first(); + } + + static class Movie { + + @Id String id; + String title; + + @Field("plot") String description; + int year; + + @Field("plot_embedding") Double[] plotEmbedding; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java index 3c45717c0e..d9d59c081b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Account.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ package org.springframework.data.mongodb.core.mapping; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java index 0a5ca8d45f..e50bffc1fc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/AccountPojo.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java index ec242622e8..ed7ba975ff 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Address.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,8 +20,7 @@ */ public class Address implements Comparable
          { - @SuppressWarnings("unused") - private String id; + @SuppressWarnings("unused") private String id; private String[] lines; private String city; private String provinceOrState; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java index ab09160ff0..3f3a326873 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasePerson.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,8 @@ /** * {@link QuerySupertype} is necessary for Querydsl 2.2.0-beta4 to compile the query classes directly. Can be removed as - * soon as https://bugs.launchpad.net/querydsl/+bug/776219 is fixed. + * soon as https://bugs.launchpad.net/querydsl/+bug/776219 + * is fixed. * * @see https://bugs.launchpad.net/querydsl/+bug/776219 * @author Jon Brisbin diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java index af744e4a9c..9a39042349 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,101 +15,123 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.SetSystemProperty; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.context.ApplicationContext; import org.springframework.core.annotation.AliasFor; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; import org.springframework.data.mapping.MappingException; -import org.springframework.data.util.ClassTypeInformation; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.util.TypeInformation; +import org.springframework.mock.env.MockEnvironment; /** * Unit tests for {@link BasicMongoPersistentEntity}. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class BasicMongoPersistentEntityUnitTests { @Mock ApplicationContext context; @Mock MongoPersistentProperty propertyMock; @Test - public void subclassInheritsAtDocumentAnnotation() { + void subclassInheritsAtDocumentAnnotation() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(Person.class)); - assertThat(entity.getCollection(), is("contacts")); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(Person.class)); + assertThat(entity.getCollection()).isEqualTo("contacts"); } @Test - public void evaluatesSpELExpression() { + void evaluatesSpELExpression() { - MongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(Company.class)); - assertThat(entity.getCollection(), is("35")); + MongoPersistentEntity entity = new BasicMongoPersistentEntity<>(TypeInformation.of(Company.class)); + assertThat(entity.getCollection()).isEqualTo("35"); } @Test // DATAMONGO-65, DATAMONGO-1108 - public void collectionAllowsReferencingSpringBean() { + void collectionAllowsReferencingSpringBean() { CollectionProvider provider = new CollectionProvider(); provider.collectionName = "reference"; when(context.getBean("myBean")).thenReturn(provider); - when(context.containsBean("myBean")).thenReturn(true); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DynamicallyMapped.class)); - entity.setApplicationContext(context); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DynamicallyMapped.class)); + entity.setEvaluationContextProvider(new ExtensionAwareEvaluationContextProvider(context)); - assertThat(entity.getCollection(), is("reference")); + assertThat(entity.getCollection()).isEqualTo("reference"); provider.collectionName = "otherReference"; - assertThat(entity.getCollection(), is("otherReference")); + assertThat(entity.getCollection()).isEqualTo("otherReference"); } - @Test // DATAMONGO-937 - public void shouldDetectLanguageCorrectly() { + @Test // GH-2764 + void collectionAllowsReferencingProperties() { + + MockEnvironment environment = new MockEnvironment(); + environment.setProperty("collectionName", "reference"); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DocumentWithLanguage.class)); - assertThat(entity.getLanguage(), is("spanish")); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DynamicallyMappedUsingPropertyPlaceholder.class)); + entity.setEnvironment(environment); + + assertThat(entity.getCollection()).isEqualTo("reference_cat"); } - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(expected = MappingException.class) // DATAMONGO-1053 - public void verifyShouldThrowExceptionForInvalidTypeOfExplicitLanguageProperty() { + @Test // DATAMONGO-937 + void shouldDetectLanguageCorrectly() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DocumentWithLanguage.class)); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); + assertThat(entity.getLanguage()).isEqualTo("spanish"); + } - when(propertyMock.isExplicitLanguageProperty()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) Number.class); + @Test // DATAMONGO-1053 + void verifyShouldThrowExceptionForInvalidTypeOfExplicitLanguageProperty() { + doReturn(true).when(propertyMock).isExplicitLanguageProperty(); + doReturn(Number.class).when(propertyMock).getActualType(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); - entity.verify(); + + assertThatExceptionOfType(MappingException.class).isThrownBy(entity::verify); } - @SuppressWarnings({ "unchecked", "rawtypes" }) @Test // DATAMONGO-1053 - public void verifyShouldPassForStringAsExplicitLanguageProperty() { + void verifyShouldPassForStringAsExplicitLanguageProperty() { + + doReturn(true).when(propertyMock).isExplicitLanguageProperty(); + doReturn(String.class).when(propertyMock).getActualType(); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); - when(propertyMock.isExplicitLanguageProperty()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) String.class); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); entity.verify(); @@ -118,12 +140,11 @@ public void verifyShouldPassForStringAsExplicitLanguageProperty() { verify(propertyMock, times(1)).getActualType(); } - @SuppressWarnings({ "unchecked", "rawtypes" }) @Test // DATAMONGO-1053 - public void verifyShouldIgnoreNonExplicitLanguageProperty() { + void verifyShouldIgnoreNonExplicitLanguageProperty() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); when(propertyMock.isExplicitLanguageProperty()).thenReturn(false); entity.addPersistentProperty(propertyMock); @@ -133,92 +154,179 @@ public void verifyShouldIgnoreNonExplicitLanguageProperty() { verify(propertyMock, never()).getActualType(); } - @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test(expected = MappingException.class) // DATAMONGO-1157 - public void verifyShouldThrowErrorForLazyDBRefOnFinalClass() { + @Test // DATAMONGO-1157 + void verifyShouldThrowErrorForLazyDBRefOnFinalClass() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) Class.class); + + doReturn(Class.class).when(propertyMock).getActualType(); + doReturn(true).when(propertyMock).isDbReference(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(true).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); - entity.verify(); + assertThatExceptionOfType(MappingException.class).isThrownBy(entity::verify); } - @Test(expected = MappingException.class) // DATAMONGO-1157 - public void verifyShouldThrowErrorForLazyDBRefArray() { + @Test // DATAMONGO-1157 + void verifyShouldThrowErrorForLazyDBRefArray() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(true); - when(propertyMock.isArray()).thenReturn(true); + + doReturn(true).when(propertyMock).isDbReference(); + doReturn(true).when(propertyMock).isArray(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(true).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); entity.addPersistentProperty(propertyMock); - entity.verify(); + assertThatExceptionOfType(MappingException.class).isThrownBy(entity::verify); } @Test // DATAMONGO-1157 - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void verifyShouldPassForLazyDBRefOnNonArrayNonFinalClass() { + void verifyShouldPassForLazyDBRefOnNonArrayNonFinalClass() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(true); - when(propertyMock.getActualType()).thenReturn((Class) Object.class); - entity.addPersistentProperty(propertyMock); + doReturn(true).when(propertyMock).isDbReference(); + doReturn(Object.class).when(propertyMock).getActualType(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(true).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); + entity.addPersistentProperty(propertyMock); entity.verify(); verify(propertyMock, times(1)).isDbReference(); } @Test // DATAMONGO-1157 - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void verifyShouldPassForNonLazyDBRefOnFinalClass() { + void verifyShouldPassForNonLazyDBRefOnFinalClass() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(AnyDocument.class)); org.springframework.data.mongodb.core.mapping.DBRef dbRefMock = mock( org.springframework.data.mongodb.core.mapping.DBRef.class); - when(propertyMock.isDbReference()).thenReturn(true); - when(propertyMock.getDBRef()).thenReturn(dbRefMock); - when(dbRefMock.lazy()).thenReturn(false); - entity.addPersistentProperty(propertyMock); + doReturn(true).when(propertyMock).isDbReference(); + doReturn(dbRefMock).when(propertyMock).getDBRef(); + doReturn(false).when(dbRefMock).lazy(); + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(AnyDocument.class)); + entity.addPersistentProperty(propertyMock); entity.verify(); verify(dbRefMock, times(1)).lazy(); } @Test // DATAMONGO-1291 - public void metaInformationShouldBeReadCorrectlyFromInheritedDocumentAnnotation() { + void metaInformationShouldBeReadCorrectlyFromInheritedDocumentAnnotation() { - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DocumentWithCustomAnnotation.class)); + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DocumentWithCustomAnnotation.class)); - assertThat(entity.getCollection(), is("collection-1")); + assertThat(entity.getCollection()).isEqualTo("collection-1"); } @Test // DATAMONGO-1373 - public void metaInformationShouldBeReadCorrectlyFromComposedDocumentAnnotation() { + void metaInformationShouldBeReadCorrectlyFromComposedDocumentAnnotation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(DocumentWithComposedAnnotation.class)); + + assertThat(entity.getCollection()).isEqualTo("custom-collection"); + } + + @Test // DATAMONGO-1874 + void usesEvaluationContextExtensionInDynamicDocumentName() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(MappedWithExtension.class)); + entity.setEvaluationContextProvider( + new ExtensionAwareEvaluationContextProvider(Collections.singletonList(new SampleExtension()))); + + assertThat(entity.getCollection()).isEqualTo("collectionName"); + } + + @Test // GH-4634 + @SetSystemProperty(key = "mongo.entity.collection", value = "collectionName") + void readsCollectionNameFromSystemProperty() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(MappedWithExtensionPropertyPlaceholderStyle.class)); + entity.setEnvironment(new StandardEnvironment()); + + assertThat(entity.getCollection()).isEqualTo("collectionName"); + } + + @Test // DATAMONGO-1854 + void readsSimpleCollation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(WithSimpleCollation.class)); - BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity( - ClassTypeInformation.from(DocumentWithComposedAnnotation.class)); + assertThat(entity.getCollation()).isEqualTo(org.springframework.data.mongodb.core.query.Collation.of("en_US")); + } + + @Test // DATAMONGO-1854 + void readsDocumentCollation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(WithDocumentCollation.class)); + + assertThat(entity.getCollation()).isEqualTo(org.springframework.data.mongodb.core.query.Collation.of("en_US")); + } + + @Test // DATAMONGO-2565 + void usesCorrectExpressionsForCollectionAndCollation() { + + BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( + TypeInformation.of(WithCollectionAndCollationFromSpEL.class)); + entity.setEvaluationContextProvider( + new ExtensionAwareEvaluationContextProvider(Collections.singletonList(new SampleExtension()))); + + assertThat(entity.getCollection()).isEqualTo("collectionName"); + assertThat(entity.getCollation()).isEqualTo(Collation.of("en_US")); + } + + @Test // DATAMONGO-2341 + void detectsShardedEntityCorrectly() { - assertThat(entity.getCollection(), is("custom-collection")); + assertThat(entityOf(WithDefaultShardKey.class).isSharded()).isTrue(); + assertThat(entityOf(Contact.class).isSharded()).isFalse(); + } + + @Test // DATAMONGO-2341 + void readsDefaultShardKey() { + + assertThat(entityOf(WithDefaultShardKey.class).getShardKey().getDocument()) + .isEqualTo(new org.bson.Document("_id", 1)); + } + + @Test // DATAMONGO-2341 + void readsSingleShardKey() { + + assertThat(entityOf(WithSingleShardKey.class).getShardKey().getDocument()) + .isEqualTo(new org.bson.Document("country", 1)); + } + + @Test // DATAMONGO-2341 + void readsMultiShardKey() { + + assertThat(entityOf(WithMultiShardKey.class).getShardKey().getDocument()) + .isEqualTo(new org.bson.Document("country", 1).append("userid", 1)); + } + + static BasicMongoPersistentEntity entityOf(Class type) { + return new BasicMongoPersistentEntity<>(TypeInformation.of(type)); } @Document("contacts") @@ -229,9 +337,12 @@ class Person extends Contact {} @Document("#{35}") class Company {} - @Document("#{myBean.collectionName}") + @Document("#{@myBean.collectionName}") class DynamicallyMapped {} + @Document("${collectionName}_cat") + class DynamicallyMappedUsingPropertyPlaceholder {} + class CollectionProvider { String collectionName; @@ -243,13 +354,13 @@ public String getCollectionName() { @Document(language = "spanish") static class DocumentWithLanguage {} - static class AnyDocument {} + private static class AnyDocument {} @CustomDocumentAnnotation - static class DocumentWithCustomAnnotation {} + private static class DocumentWithCustomAnnotation {} @ComposedDocumentAnnotation - static class DocumentWithComposedAnnotation {} + private static class DocumentWithComposedAnnotation {} @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.TYPE }) @@ -265,4 +376,52 @@ static class DocumentWithComposedAnnotation {} @AliasFor(annotation = Document.class, attribute = "collection") String name() default "custom-collection"; } + + // DATAMONGO-1874 + @Document("#{myProperty}") + class MappedWithExtension {} + + @Document("${mongo.entity.collection}") + class MappedWithExtensionPropertyPlaceholderStyle {} + + @Document("${value.from.file}") + class MappedWithValue {} + + @Document(collation = "#{myCollation}") + class WithCollationFromSpEL {} + + @Document(collection = "#{myProperty}", collation = "#{myCollation}") + class WithCollectionAndCollationFromSpEL {} + + @Document(collation = "en_US") + class WithSimpleCollation {} + + @Document(collation = "{ 'locale' : 'en_US' }") + class WithDocumentCollation {} + + @Sharded + private class WithDefaultShardKey {} + + @Sharded("country") + private class WithSingleShardKey {} + + @Sharded({ "country", "userid" }) + private class WithMultiShardKey {} + + static class SampleExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "sampleExtension"; + } + + @Override + public Map getProperties() { + + Map properties = new LinkedHashMap<>(); + properties.put("myProperty", "collectionName"); + properties.put("myCollation", "en_US"); + return properties; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index 26380d884a..116505143e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,7 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.*; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -28,10 +26,11 @@ import java.util.List; import java.util.Locale; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.jmolecules.ddd.annotation.Identity; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.MappingException; @@ -49,74 +48,73 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava */ public class BasicMongoPersistentPropertyUnitTests { - MongoPersistentEntity entity; + private MongoPersistentEntity entity; - @Rule public ExpectedException exception = ExpectedException.none(); - - @Before - public void setup() { - entity = new BasicMongoPersistentEntity(ClassTypeInformation.from(Person.class)); + @BeforeEach + void setup() { + entity = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(Person.class)); } @Test - public void usesAnnotatedFieldName() { + void usesAnnotatedFieldName() { Field field = ReflectionUtils.findField(Person.class, "firstname"); - assertThat(getPropertyFor(field).getFieldName(), is("foo")); + assertThat(getPropertyFor(field).getFieldName()).isEqualTo("foo"); } @Test - public void returns_IdForIdProperty() { + void returns_IdForIdProperty() { Field field = ReflectionUtils.findField(Person.class, "id"); MongoPersistentProperty property = getPropertyFor(field); - assertThat(property.isIdProperty(), is(true)); - assertThat(property.getFieldName(), is("_id")); + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.getFieldName()).isEqualTo("_id"); } @Test - public void returnsPropertyNameForUnannotatedProperties() { + void returnsPropertyNameForUnannotatedProperties() { Field field = ReflectionUtils.findField(Person.class, "lastname"); - assertThat(getPropertyFor(field).getFieldName(), is("lastname")); + assertThat(getPropertyFor(field).getFieldName()).isEqualTo("lastname"); } @Test - public void preventsNegativeOrder() { + void preventsNegativeOrder() { getPropertyFor(ReflectionUtils.findField(Person.class, "ssn")); } @Test // DATAMONGO-553 - public void usesPropertyAccessForThrowableCause() { + void usesPropertyAccessForThrowableCause() { BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( ClassTypeInformation.from(Throwable.class)); MongoPersistentProperty property = getPropertyFor(entity, "cause"); - assertThat(property.usePropertyAccess(), is(true)); + assertThat(property.usePropertyAccess()).isTrue(); } @Test // DATAMONGO-607 - public void usesCustomFieldNamingStrategyByDefault() throws Exception { + void usesCustomFieldNamingStrategyByDefault() throws Exception { ClassTypeInformation type = ClassTypeInformation.from(Person.class); Field field = ReflectionUtils.findField(Person.class, "lastname"); MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(type, field), entity, SimpleTypeHolder.DEFAULT, UppercaseFieldNamingStrategy.INSTANCE); - assertThat(property.getFieldName(), is("LASTNAME")); + assertThat(property.getFieldName()).isEqualTo("LASTNAME"); field = ReflectionUtils.findField(Person.class, "firstname"); property = new BasicMongoPersistentProperty(Property.of(type, field), entity, SimpleTypeHolder.DEFAULT, UppercaseFieldNamingStrategy.INSTANCE); - assertThat(property.getFieldName(), is("foo")); + assertThat(property.getFieldName()).isEqualTo("foo"); } @Test // DATAMONGO-607 - public void rejectsInvalidValueReturnedByFieldNamingStrategy() { + void rejectsInvalidValueReturnedByFieldNamingStrategy() { ClassTypeInformation type = ClassTypeInformation.from(Person.class); Field field = ReflectionUtils.findField(Person.class, "lastname"); @@ -124,76 +122,73 @@ public void rejectsInvalidValueReturnedByFieldNamingStrategy() { MongoPersistentProperty property = new BasicMongoPersistentProperty(Property.of(type, field), entity, SimpleTypeHolder.DEFAULT, InvalidFieldNamingStrategy.INSTANCE); - exception.expect(MappingException.class); - exception.expectMessage(InvalidFieldNamingStrategy.class.getName()); - exception.expectMessage(property.toString()); - - property.getFieldName(); + assertThatExceptionOfType(MappingException.class).isThrownBy(property::getFieldName) + .withMessageContaining(InvalidFieldNamingStrategy.class.getName()).withMessageContaining(property.toString()); } @Test // DATAMONGO-937 - public void shouldDetectAnnotatedLanguagePropertyCorrectly() { + void shouldDetectAnnotatedLanguagePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithLanguageProperty.class, "lang"); - assertThat(property.isLanguageProperty(), is(true)); + assertThat(property.isLanguageProperty()).isTrue(); } @Test // DATAMONGO-937 - public void shouldDetectIplicitLanguagePropertyCorrectly() { + void shouldDetectImplicitLanguagePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithImplicitLanguageProperty.class, "language"); - assertThat(property.isLanguageProperty(), is(true)); + assertThat(property.isLanguageProperty()).isTrue(); } @Test // DATAMONGO-976 - public void shouldDetectTextScorePropertyCorrectly() { + void shouldDetectTextScorePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithTextScoreProperty.class, "score"); - assertThat(property.isTextScoreProperty(), is(true)); + assertThat(property.isTextScoreProperty()).isTrue(); } @Test // DATAMONGO-976 - public void shouldDetectTextScoreAsReadOnlyProperty() { + void shouldDetectTextScoreAsReadOnlyProperty() { MongoPersistentProperty property = getPropertyFor(DocumentWithTextScoreProperty.class, "score"); - assertThat(property.isWritable(), is(false)); + assertThat(property.isWritable()).isFalse(); } @Test // DATAMONGO-1050 - public void shouldNotConsiderExplicitlyNameFieldAsIdProperty() { + void shouldNotConsiderExplicitlyNameFieldAsIdProperty() { MongoPersistentProperty property = getPropertyFor(DocumentWithExplicitlyRenamedIdProperty.class, "id"); - assertThat(property.isIdProperty(), is(false)); + assertThat(property.isIdProperty()).isFalse(); } @Test // DATAMONGO-1050 - public void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExplicitlyNamePresent() { + void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExplicitlyNamePresent() { MongoPersistentProperty property = getPropertyFor(DocumentWithExplicitlyRenamedIdPropertyHavingIdAnnotation.class, "id"); - assertThat(property.isIdProperty(), is(true)); + assertThat(property.isIdProperty()).isTrue(); } @Test // DATAMONGO-1373 - public void shouldConsiderComposedAnnotationsForIdField() { + void shouldConsiderComposedAnnotationsForIdField() { MongoPersistentProperty property = getPropertyFor(DocumentWithComposedAnnotations.class, "myId"); - assertThat(property.isIdProperty(), is(true)); - assertThat(property.getFieldName(), is("_id")); + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.getFieldName()).isEqualTo("_id"); } @Test // DATAMONGO-1373 - public void shouldConsiderComposedAnnotationsForFields() { + void shouldConsiderComposedAnnotationsForFields() { MongoPersistentProperty property = getPropertyFor(DocumentWithComposedAnnotations.class, "myField"); - assertThat(property.getFieldName(), is("myField")); + assertThat(property.getFieldName()).isEqualTo("myField"); } @Test // DATAMONGO-1737 - public void honorsFieldOrderWhenIteratingOverProperties() { + void honorsFieldOrderWhenIteratingOverProperties() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity entity = context.getPersistentEntity(Sample.class); + MongoPersistentEntity entity = context.getPersistentEntity(Sample.class); List properties = new ArrayList<>(); @@ -202,12 +197,65 @@ public void honorsFieldOrderWhenIteratingOverProperties() { assertThat(properties).containsExactly("first", "second", "third"); } + @Test // GH-3407 + void shouldDetectWritability() { + + assertThat(getPropertyFor(WithFieldWrite.class, "fieldWithDefaults").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "fieldWithField").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "writeNonNull").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "writeAlways").writeNullValues()).isTrue(); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldReturnActualTypeForNonIdProperties() { + + MongoPersistentProperty property = getPropertyFor(Person.class, "lastname"); + assertThat(property.getFieldType()).isEqualTo(String.class); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithCommonsId() { + + MongoPersistentProperty property = getPropertyFor(Person.class, "id"); + assertThat(property.getFieldType()).isEqualTo(ObjectId.class); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldBeImplicitForPropertiesAnnotatedWithMongoId() { + + MongoPersistentProperty property = getPropertyFor(WithStringMongoId.class, "id"); + assertThat(property.getFieldType()).isEqualTo(String.class); + } + + @Test // DATAMONGO-1798 + void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithMongoIdAndTargetTypeObjectId() { + + MongoPersistentProperty property = getPropertyFor(WithStringMongoIdMappedToObjectId.class, "id"); + assertThat(property.getFieldType()).isEqualTo(ObjectId.class); + } + + @Test // DATAMONGO-2460 + void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTypeImplicit() { + + MongoPersistentProperty property = getPropertyFor(WithComplexId.class, "id"); + assertThat(property.getFieldType()).isEqualTo(Document.class); + } + + @Test // GH-3803 + void considersJMoleculesIdentityExplicitlyAnnotatedIdentifier() { + + MongoPersistentProperty property = getPropertyFor(WithJMoleculesIdentity.class, "identifier"); + + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.isExplicitIdProperty()).isTrue(); + } + private MongoPersistentProperty getPropertyFor(Field field) { return getPropertyFor(entity, field); } private static MongoPersistentProperty getPropertyFor(Class type, String fieldname) { - return getPropertyFor(new BasicMongoPersistentEntity(ClassTypeInformation.from(type)), fieldname); + return getPropertyFor(new BasicMongoPersistentEntity<>(ClassTypeInformation.from(type)), fieldname); } private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity entity, String fieldname) { @@ -215,8 +263,12 @@ private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity e } private static MongoPersistentProperty getPropertyFor(MongoPersistentEntity entity, Field field) { - return new BasicMongoPersistentProperty(Property.of(entity.getTypeInformation(), field), entity, - SimpleTypeHolder.DEFAULT, PropertyNameFieldNamingStrategy.INSTANCE); + BasicMongoPersistentProperty property = new BasicMongoPersistentProperty( + Property.of(entity.getTypeInformation(), field), entity, SimpleTypeHolder.DEFAULT, + PropertyNameFieldNamingStrategy.INSTANCE); + + entity.addPersistentProperty(property); + return property; } class Person { @@ -268,6 +320,18 @@ static class DocumentWithTextScoreProperty { @TextScore Float score; } + static class WithFieldWrite { + + int fieldWithDefaults; + @org.springframework.data.mongodb.core.mapping.Field int fieldWithField; + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Integer writeNonNull; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Integer writeAlways; + + } + static class DocumentWithExplicitlyRenamedIdProperty { @org.springframework.data.mongodb.core.mapping.Field("id") String id; @@ -275,12 +339,14 @@ static class DocumentWithExplicitlyRenamedIdProperty { static class DocumentWithExplicitlyRenamedIdPropertyHavingIdAnnotation { - @Id @org.springframework.data.mongodb.core.mapping.Field("id") String id; + @Id + @org.springframework.data.mongodb.core.mapping.Field("id") String id; } static class DocumentWithComposedAnnotations { - @ComposedIdAnnotation @ComposedFieldAnnotation String myId; + @ComposedIdAnnotation + @ComposedFieldAnnotation String myId; @ComposedFieldAnnotation(name = "myField") String myField; } @@ -298,4 +364,29 @@ static class DocumentWithComposedAnnotations { @Id static @interface ComposedIdAnnotation { } + + static class WithStringMongoId { + + @MongoId String id; + } + + static class WithStringMongoIdMappedToObjectId { + + @MongoId(FieldType.OBJECT_ID) String id; + } + + static class ComplexId { + + String value; + } + + static class WithComplexId { + + @Id + @org.springframework.data.mongodb.core.mapping.Field ComplexId id; + } + + static class WithJMoleculesIdentity { + @Identity ObjectId identifier; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java index 35f7925e63..44727cd81a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/CustomCollectionWithIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,10 +24,8 @@ @Document("foobar") public class CustomCollectionWithIndex { - @Id - private String id; - @Indexed - private String name; + @Id private String id; + @Indexed private String name; public CustomCollectionWithIndex(String name) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java index cece9bed87..1eb2628c75 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/DetectedCollectionWithIndex.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,10 +24,8 @@ @Document public class DetectedCollectionWithIndex { - @Id - private String id; - @Indexed - private String name; + @Id private String id; + @Indexed private String name; public DetectedCollectionWithIndex(String name) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java index cfe1e611c8..8259e3ed95 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeneratedId.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; + import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -25,10 +25,8 @@ @Document public class GeneratedId { - @Id - private ObjectId id; - @SuppressWarnings("unused") - private String name; + @Id private ObjectId id; + @SuppressWarnings("unused") private String name; public GeneratedId(String name) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java index 5d74c12d18..7933dbb70f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GenericMappingTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,17 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; @@ -35,16 +35,16 @@ * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class GenericMappingTests { +@ExtendWith(MockitoExtension.class) +class GenericMappingTests { - MongoMappingContext context; - MongoConverter converter; + private MongoMappingContext context; + private MongoConverter converter; @Mock DbRefResolver resolver; - @Before - public void setUp() throws Exception { + @BeforeEach + void setUp() throws Exception { context = new MongoMappingContext(); context.setInitialEntitySet(Collections.singleton(StringWrapper.class)); @@ -54,36 +54,36 @@ public void setUp() throws Exception { } @Test - public void writesGenericTypeCorrectly() { + void writesGenericTypeCorrectly() { StringWrapper wrapper = new StringWrapper(); - wrapper.container = new Container(); - wrapper.container.content = "Foo!"; + wrapper.container = new Container<>(); + wrapper.container.content = "Foo"; Document document = new Document(); converter.write(wrapper, document); Object container = document.get("container"); - assertThat(container, is(notNullValue())); - assertTrue(container instanceof Document); + assertThat(container).isNotNull(); + assertThat(container instanceof Document).isTrue(); Object content = ((Document) container).get("content"); - assertTrue(content instanceof String); - assertThat((String) content, is("Foo!")); + assertThat(content instanceof String).isTrue(); + assertThat((String) content).isEqualTo("Foo"); } @Test - public void readsGenericTypeCorrectly() { + void readsGenericTypeCorrectly() { - Document content = new Document("content", "Foo!"); + Document content = new Document("content", "Foo"); Document container = new Document("container", content); StringWrapper result = converter.read(StringWrapper.class, container); - assertThat(result.container, is(notNullValue())); - assertThat(result.container.content, is("Foo!")); + assertThat(result.container).isNotNull(); + assertThat(result.container.content).isEqualTo("Foo"); } - static class StringWrapper extends Wrapper { + private static class StringWrapper extends Wrapper { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java index 8d2084681d..0f58e6ace5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedAppConfig.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,18 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + import org.springframework.context.annotation.Bean; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; import org.springframework.data.mongodb.core.mapping.event.LoggingEventListener; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; -public class GeoIndexedAppConfig extends AbstractMongoConfiguration { +public class GeoIndexedAppConfig extends MongoClientClosingTestConfiguration { public static String GEO_DB = "database"; public static String GEO_COLLECTION = "geolocation"; @@ -35,16 +39,26 @@ public String getDatabaseName() { @Override @Bean public MongoClient mongoClient() { - return new MongoClient("127.0.0.1"); + return MongoTestUtils.client(); } @Override - public String getMappingBasePackage() { - return "org.springframework.data.mongodb.core.core.mapping"; + protected Collection getMappingBasePackages() { + return Collections.singleton("org.springframework.data.mongodb.core.core.mapping"); } @Bean public LoggingEventListener mappingEventsListener() { return new LoggingEventListener(); } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } + + @Override + protected boolean autoIndexCreation() { + return true; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java index 3267a77308..1e1978853f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoIndexedTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,12 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; +import org.bson.Document; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -30,21 +30,21 @@ import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.DB; -import org.bson.Document; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; /** * @author Jon Brisbin * @author Oliver Gierke + * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration(classes = GeoIndexedAppConfig.class) public class GeoIndexedTests { @@ -55,27 +55,30 @@ public class GeoIndexedTests { @Autowired MongoMappingContext mappingContext; @Before - public void setUp() throws Exception { + public void setUp() { cleanDb(); } @After - public void cleanUp() throws Exception { + public void cleanUp() { cleanDb(); } - private void cleanDb() throws UnknownHostException { + private void cleanDb() { + + try (MongoClient mongo = MongoTestUtils.client()) { - Mongo mongo = new MongoClient(); - DB db = mongo.getDB(GeoIndexedAppConfig.GEO_DB); + MongoDatabase db = mongo.getDatabase(GeoIndexedAppConfig.GEO_DB); - for (String coll : collectionsToDrop) { - db.getCollection(coll).drop(); + for (String coll : collectionsToDrop) { + db.getCollection(coll).drop(); + } } } @Test public void testGeoLocation() { + GeoLocation geo = new GeoLocation(new double[] { 40.714346, -74.005966 }); template.insert(geo); @@ -94,6 +97,6 @@ public Boolean doInCollection(MongoCollection collection) throws Mongo } }); - assertTrue(hasIndex); + assertThat(hasIndex).isTrue(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java index 2296f068e8..e1032e6d84 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/GeoLocation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,9 +16,9 @@ package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.index.GeoSpatialIndexed; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -26,10 +26,8 @@ @Document("geolocation") public class GeoLocation { - @Id - private ObjectId id; - @GeoSpatialIndexed - private double[] location; + @Id private ObjectId id; + @GeoSpatialIndexed private double[] location; public GeoLocation(double[] location) { this.location = location; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java index 0e9832be14..d9626343aa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Location.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java index cda3435651..eee407701c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MappingTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,7 @@ */ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.core.query.Update.*; @@ -28,23 +27,31 @@ import java.util.List; import java.util.Map; +import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.dao.DataAccessException; import org.springframework.dao.DuplicateKeyException; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.MongoCollectionUtils; -import org.springframework.data.mongodb.config.AbstractIntegrationTests; import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; -import org.bson.Document; import com.mongodb.MongoException; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; /** @@ -53,16 +60,30 @@ * @author Thomas Darimont * @author Mark Paluch */ -public class MappingTests extends AbstractIntegrationTests { +@ExtendWith(MongoTemplateExtension.class) +public class MappingTests { + + static final String DB_NAME = "mapping-tests"; + + static @Client MongoClient client; - @Autowired MongoOperations template; + @Template(database = DB_NAME, + initialEntitySet = { PersonWithDbRef.class, GeoLocation.class, PersonPojoStringId.class, Account.class, + DetectedCollectionWithIndex.class, Item.class, Container.class, Person.class, PersonCustomCollection1.class, + GeneratedId.class, PersonWithObjectId.class, PersonCustomIdName.class, PersonMapProperty.class }) // + static MongoTestTemplate template; + + @AfterEach + void afterEach() { + template.flush(); + } @Test public void testGeneratedId() { GeneratedId genId = new GeneratedId("test"); template.insert(genId); - assertNotNull(genId.getId()); + assertThat(genId.getId()).isNotNull(); } @Test @@ -70,12 +91,12 @@ public void testPersonPojo() throws Exception { PersonWithObjectId p = new PersonWithObjectId(12345, "Person", "Pojo"); template.insert(p); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); List result = template.find(new Query(Criteria.where("ssn").is(12345)), PersonWithObjectId.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getSsn(), is(12345)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getSsn()).isEqualTo(12345); } @Test @@ -86,24 +107,24 @@ public void testPersonWithCustomIdName() { List result = template.find(new Query(Criteria.where("lastName").is(p.getLastName())), PersonCustomIdName.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getFirstName(), is("Custom Id")); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getFirstName()).isEqualTo("Custom Id"); PersonCustomIdName p2 = new PersonCustomIdName(654321, "Custom Id", "LastName"); template.insert(p2); List result2 = template.find(new Query(Criteria.where("lastName").is("LastName")), PersonCustomIdName.class); - assertThat(result2.size(), is(1)); - assertNotNull(result2.get(0).getLastName()); - assertThat(result2.get(0).getLastName(), is("LastName")); + assertThat(result2.size()).isEqualTo(1); + assertThat(result2.get(0).getLastName()).isNotNull(); + assertThat(result2.get(0).getLastName()).isEqualTo("LastName"); // Test "in" query List result3 = template.find(new Query(Criteria.where("lastName").in("LastName")), PersonCustomIdName.class); - assertThat(result3.size(), is(1)); - assertNotNull(result3.get(0).getLastName()); - assertThat(result3.get(0).getLastName(), is("LastName")); + assertThat(result3.size()).isEqualTo(1); + assertThat(result3.get(0).getLastName()).isNotNull(); + assertThat(result3.get(0).getLastName()).isEqualTo("LastName"); } @Test @@ -119,13 +140,13 @@ public void testPersonMapProperty() { p.setAccounts(accounts); template.insert(p); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); List result = template.find(new Query(Criteria.where("ssn").is(1234567)), PersonMapProperty.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getAccounts().size(), is(2)); - assertThat(result.get(0).getAccounts().get("checking").getBalance(), is(1000.0f)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getAccounts().size()).isEqualTo(2); + assertThat(result.get(0).getAccounts().get("checking").getBalance()).isEqualTo(1000.0f); } @Test @@ -156,18 +177,23 @@ public void testWriteEntity() { accounts.add(newAcct); template.save(p, "person"); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); List result = template.find(new Query(Criteria.where("ssn").is(123456789)), Person.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getAddress().getCountry(), is("USA")); - assertThat(result.get(0).getAccounts(), notNullValue()); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getAddress().getCountry()).isEqualTo("USA"); + assertThat(result.get(0).getAccounts()).isNotNull(); } - @Test(expected = DuplicateKeyException.class) + @Test @SuppressWarnings({ "unchecked", "rawtypes" }) public void testUniqueIndex() { + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setAutoIndexCreation(true); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + Address addr = new Address(); addr.setLines(new String[] { "1234 W. 1st Street", "Apt. 12" }); addr.setCity("Anytown"); @@ -177,7 +203,7 @@ public void testUniqueIndex() { Person p1 = new Person(1234567890, "John", "Doe", 37, addr); Person p2 = new Person(1234567890, "Jane", "Doe", 38, addr); - template.insertAll(Arrays.asList(p1, p2)); + assertThatExceptionOfType(DuplicateKeyException.class).isThrownBy(() -> template.insertAll(Arrays.asList(p1, p2))); } @Test @@ -191,8 +217,8 @@ public void testCustomCollectionInList() { PersonCustomCollection1.class, "person1"); List p2Results = template.find(new Query(Criteria.where("ssn").is(66666)), PersonCustomCollection2.class, "person2"); - assertThat(p1Results.size(), is(1)); - assertThat(p2Results.size(), is(1)); + assertThat(p1Results.size()).isEqualTo(1); + assertThat(p2Results.size()).isEqualTo(1); } @Test @@ -201,15 +227,22 @@ public void testPrimitivesAndCustomCollectionName() { template.insert(loc); List result = template.find(new Query(Criteria.where("_id").is(loc.getId())), Location.class, "places"); - assertThat(result.size(), is(1)); + assertThat(result.size()).isEqualTo(1); } @Test public void testIndexesCreatedInRightCollection() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setAutoIndexCreation(true); + + MongoTemplate template = new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext)); + + CustomCollectionWithIndex ccwi = new CustomCollectionWithIndex("test"); template.insert(ccwi); - assertTrue(template.execute("foobar", new CollectionCallback() { + assertThat(template.execute("foobar", new CollectionCallback() { public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { List indexes = new ArrayList(); @@ -223,12 +256,12 @@ public Boolean doInCollection(MongoCollection collection) throws Mongo } return false; } - })); + })).isTrue(); DetectedCollectionWithIndex dcwi = new DetectedCollectionWithIndex("test"); template.insert(dcwi); - assertTrue(template.execute(MongoCollectionUtils.getPreferredCollectionName(DetectedCollectionWithIndex.class), + assertThat(template.execute(MongoCollectionUtils.getPreferredCollectionName(DetectedCollectionWithIndex.class), new CollectionCallback() { public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { @@ -244,7 +277,7 @@ public Boolean doInCollection(MongoCollection collection) } return false; } - })); + })).isTrue(); } @Test @@ -256,9 +289,9 @@ public void testMultiDimensionalArrayProperties() { template.insert(p); List result = template.find(new Query(Criteria.where("ssn").is(123)), PersonMultiDimArrays.class); - assertThat(result.size(), is(1)); + assertThat(result.size()).isEqualTo(1); - assertThat(result.get(0).getGrid().length, is(3)); + assertThat(result.get(0).getGrid().length).isEqualTo(3); } @Test @@ -276,9 +309,9 @@ public void testMultiDimensionalCollectionProperties() { List result = template.find(new Query(Criteria.where("ssn").is(321)), PersonMultiCollection.class); - assertThat(result.size(), is(1)); + assertThat(result.size()).isEqualTo(1); - assertThat(result.get(0).getGrid().size(), is(1)); + assertThat(result.get(0).getGrid().size()).isEqualTo(1); } @Test @@ -291,8 +324,8 @@ public void testDbRef() { template.insert(p); List result = template.find(new Query(Criteria.where("ssn").is(4321)), PersonWithDbRef.class); - assertThat(result.size(), is(1)); - assertThat(result.get(0).getHome().getLocation(), is(pos)); + assertThat(result.size()).isEqualTo(1); + assertThat(result.get(0).getHome().getLocation()).isEqualTo(pos); } @Test @@ -300,7 +333,7 @@ public void testPersonWithNullProperties() { PersonNullProperties p = new PersonNullProperties(); template.insert(p); - assertNotNull(p.getId()); + assertThat(p.getId()).isNotNull(); } @Test @@ -319,7 +352,7 @@ public void testQueryUpdate() { template.updateFirst(query(where("ssn").is(1111)), update("address", addr), Person.class); Person p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertThat(p2.getAddress().getCity(), is("New Town")); + assertThat(p2.getAddress().getCity()).isEqualTo("New Town"); } @Test @@ -332,19 +365,19 @@ public void testUpsert() { addr.setCountry("USA"); Person p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertNull(p2); + assertThat(p2).isNull(); template.upsert(query(where("ssn").is(1111).and("firstName").is("Query").and("lastName").is("Update")), update("address", addr), Person.class); p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertThat(p2.getAddress().getCity(), is("Anytown")); + assertThat(p2.getAddress().getCity()).isEqualTo("Anytown"); template.dropCollection(Person.class); template.upsert(query(where("ssn").is(1111).and("firstName").is("Query").and("lastName").is("Update")), update("address", addr), "person"); p2 = template.findOne(query(where("ssn").is(1111)), Person.class); - assertThat(p2.getAddress().getCity(), is("Anytown")); + assertThat(p2.getAddress().getCity()).isEqualTo("Anytown"); } @@ -358,9 +391,9 @@ public void testOrQuery() { List results = template .find(new Query(new Criteria().orOperator(where("ssn").is(1), where("ssn").is(2))), PersonWithObjectId.class); - assertNotNull(results); - assertThat(results.size(), is(2)); - assertThat(results.get(1).getSsn(), is(2)); + assertThat(results).isNotNull(); + assertThat(results.size()).isEqualTo(2); + assertThat(results.get(1).getSsn()).isEqualTo(2); } @Test @@ -371,7 +404,7 @@ public void testPrimitivesAsIds() { template.save(p); PrimitiveId p2 = template.findOne(query(where("id").is(1)), PrimitiveId.class); - assertNotNull(p2); + assertThat(p2).isNotNull(); } @Test @@ -381,13 +414,13 @@ public void testNoMappingAnnotationsUsingIntAsId() { template.updateFirst(query(where("id").is(1)), update("text", "New Text"), PersonPojoIntId.class); PersonPojoIntId p2 = template.findOne(query(where("id").is(1)), PersonPojoIntId.class); - assertEquals("New Text", p2.getText()); + assertThat(p2.getText()).isEqualTo("New Text"); p.setText("Different Text"); template.save(p); PersonPojoIntId p3 = template.findOne(query(where("id").is(1)), PersonPojoIntId.class); - assertEquals("Different Text", p3.getText()); + assertThat(p3.getText()).isEqualTo("Different Text"); } @@ -398,13 +431,13 @@ public void testNoMappingAnnotationsUsingLongAsId() { template.updateFirst(query(where("id").is(1)), update("text", "New Text"), PersonPojoLongId.class); PersonPojoLongId p2 = template.findOne(query(where("id").is(1)), PersonPojoLongId.class); - assertEquals("New Text", p2.getText()); + assertThat(p2.getText()).isEqualTo("New Text"); p.setText("Different Text"); template.save(p); PersonPojoLongId p3 = template.findOne(query(where("id").is(1)), PersonPojoLongId.class); - assertEquals("Different Text", p3.getText()); + assertThat(p3.getText()).isEqualTo("Different Text"); } @@ -416,13 +449,13 @@ public void testNoMappingAnnotationsUsingStringAsId() { template.updateFirst(query(where("id").is("1")), update("text", "New Text"), PersonPojoStringId.class); PersonPojoStringId p2 = template.findOne(query(where("id").is("1")), PersonPojoStringId.class); - assertEquals("New Text", p2.getText()); + assertThat(p2.getText()).isEqualTo("New Text"); p.setText("Different Text"); template.save(p); PersonPojoStringId p3 = template.findOne(query(where("id").is("1")), PersonPojoStringId.class); - assertEquals("Different Text", p3.getText()); + assertThat(p3.getText()).isEqualTo("Different Text"); PersonPojoStringId p4 = new PersonPojoStringId("2", "Text-2"); template.insert(p4); @@ -430,7 +463,7 @@ public void testNoMappingAnnotationsUsingStringAsId() { Query q = query(where("id").in("1", "2")); q.with(Sort.by(Direction.ASC, "id")); List people = template.find(q, PersonPojoStringId.class); - assertEquals(2, people.size()); + assertThat(people.size()).isEqualTo(2); } @@ -444,9 +477,9 @@ public void testPersonWithLongDBRef() { Query q = query(where("ssn").is(21)); PersonWithLongDBRef p2 = template.findOne(q, PersonWithLongDBRef.class); - assertNotNull(p2); - assertNotNull(p2.getPersonPojoLongId()); - assertEquals(12L, p2.getPersonPojoLongId().getId()); + assertThat(p2).isNotNull(); + assertThat(p2.getPersonPojoLongId()).isNotNull(); + assertThat(p2.getPersonPojoLongId().getId()).isEqualTo(12L); } @Test // DATADOC-275 @@ -467,9 +500,9 @@ public void readsAndWritesDBRefsCorrectly() { template.insert(container); Container result = template.findOne(query(where("id").is(container.id)), Container.class); - assertThat(result.item.id, is(item.id)); - assertThat(result.items.size(), is(1)); - assertThat(result.items.get(0).id, is(items.id)); + assertThat(result.item.id).isEqualTo(item.id); + assertThat(result.items.size()).isEqualTo(1); + assertThat(result.items.get(0).id).isEqualTo(items.id); } @Test // DATAMONGO-805 @@ -490,8 +523,8 @@ public void supportExcludeDbRefAssociation() { query.fields().exclude("item"); Container result = template.findOne(query, Container.class); - assertThat(result, is(notNullValue())); - assertThat(result.item, is(nullValue())); + assertThat(result).isNotNull(); + assertThat(result.item).isNull(); } @Test // DATAMONGO-805 @@ -512,14 +545,14 @@ public void shouldMapFieldsOfIterableEntity() { Query query = new Query(Criteria.where("id").is("foo")); Container result = template.findOne(query, Container.class); - assertThat(result, is(notNullValue())); - assertThat(result.item, is(notNullValue())); - assertThat(result.item.value, is("bar")); + assertThat(result).isNotNull(); + assertThat(result.item).isNotNull(); + assertThat(result.item.value).isEqualTo("bar"); } static class Container { - @Id final String id; + @Id String id; public Container() { id = new ObjectId().toString(); @@ -535,7 +568,7 @@ public Container(String id) { static class Item { - @Id final String id; + @Id String id; String value; public Item() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java index 89f0634f72..4a4dd54717 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,22 +17,23 @@ import static org.assertj.core.api.Assertions.*; +import java.time.temporal.ChronoUnit; import java.util.AbstractMap; import java.util.Collections; import java.util.Locale; import java.util.Map; +import java.util.Optional; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.context.ApplicationContext; import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.model.FieldNamingStrategy; -import org.springframework.data.mapping.MappingException; import com.mongodb.DBRef; @@ -44,15 +45,13 @@ * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class MongoMappingContextUnitTests { @Mock ApplicationContext applicationContext; - @Rule public ExpectedException exception = ExpectedException.none(); - @Test - public void addsSelfReferencingPersistentEntityCorrectly() throws Exception { + void addsSelfReferencingPersistentEntityCorrectly() throws Exception { MongoMappingContext context = new MongoMappingContext(); @@ -61,21 +60,21 @@ public void addsSelfReferencingPersistentEntityCorrectly() throws Exception { } @Test - public void doesNotReturnPersistentEntityForMongoSimpleType() { + void doesNotReturnPersistentEntityForMongoSimpleType() { MongoMappingContext context = new MongoMappingContext(); assertThat(context.getPersistentEntity(DBRef.class)).isNull(); } @Test // DATAMONGO-638 - public void doesNotCreatePersistentEntityForAbstractMap() { + void doesNotCreatePersistentEntityForAbstractMap() { MongoMappingContext context = new MongoMappingContext(); assertThat(context.getPersistentEntity(AbstractMap.class)).isNull(); } @Test // DATAMONGO-607 - public void populatesPersistentPropertyWithCustomFieldNamingStrategy() { + void populatesPersistentPropertyWithCustomFieldNamingStrategy() { MongoMappingContext context = new MongoMappingContext(); context.setApplicationContext(applicationContext); @@ -91,21 +90,18 @@ public String getFieldName(PersistentProperty property) { } @Test // DATAMONGO-607 - public void rejectsClassWithAmbiguousFieldMappings() { - - exception.expect(MappingException.class); - exception.expectMessage("firstname"); - exception.expectMessage("lastname"); - exception.expectMessage("foo"); - exception.expectMessage("@Field"); + void rejectsClassWithAmbiguousFieldMappings() { MongoMappingContext context = new MongoMappingContext(); context.setApplicationContext(applicationContext); - context.getPersistentEntity(InvalidPerson.class); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> context.getPersistentEntity(InvalidPerson.class)) + .withMessageContaining("firstname").withMessageContaining("lastname").withMessageContaining("foo") + .withMessageContaining("@Field"); } @Test // DATAMONGO-694 - public void doesNotConsiderOverrridenAccessorANewField() { + void doesNotConsiderOverrridenAccessorANewField() { MongoMappingContext context = new MongoMappingContext(); context.setApplicationContext(applicationContext); @@ -113,57 +109,89 @@ public void doesNotConsiderOverrridenAccessorANewField() { } @Test // DATAMONGO-688 - public void mappingContextShouldAcceptClassWithImplicitIdProperty() { + void mappingContextShouldAcceptClassWithImplicitIdProperty() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithImplicitId.class); + MongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithImplicitId.class); assertThat(pe).isNotNull(); assertThat(pe.isIdProperty(pe.getRequiredPersistentProperty("id"))).isTrue(); } @Test // DATAMONGO-688 - public void mappingContextShouldAcceptClassWithExplicitIdProperty() { + void mappingContextShouldAcceptClassWithExplicitIdProperty() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithExplicitId.class); + MongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithExplicitId.class); assertThat(pe).isNotNull(); assertThat(pe.isIdProperty(pe.getRequiredPersistentProperty("myId"))).isTrue(); } @Test // DATAMONGO-688 - public void mappingContextShouldAcceptClassWithExplicitAndImplicitIdPropertyByGivingPrecedenceToExplicitIdProperty() { + void mappingContextShouldAcceptClassWithExplicitAndImplicitIdPropertyByGivingPrecedenceToExplicitIdProperty() { MongoMappingContext context = new MongoMappingContext(); - BasicMongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithExplicitIdAndImplicitId.class); + MongoPersistentEntity pe = context.getRequiredPersistentEntity(ClassWithExplicitIdAndImplicitId.class); assertThat(pe).isNotNull(); } - @Test(expected = MappingException.class) // DATAMONGO-688 - public void rejectsClassWithAmbiguousExplicitIdPropertyFieldMappings() { + @Test // DATAMONGO-688 + void rejectsClassWithAmbiguousExplicitIdPropertyFieldMappings() { MongoMappingContext context = new MongoMappingContext(); - context.getPersistentEntity(ClassWithMultipleExplicitIds.class); + assertThatThrownBy(() -> context.getPersistentEntity(ClassWithMultipleExplicitIds.class)) + .isInstanceOf(MappingException.class); } - @Test(expected = MappingException.class) // DATAMONGO-688 - public void rejectsClassWithAmbiguousImplicitIdPropertyFieldMappings() { + @Test // DATAMONGO-688 + void rejectsClassWithAmbiguousImplicitIdPropertyFieldMappings() { MongoMappingContext context = new MongoMappingContext(); - context.getPersistentEntity(ClassWithMultipleImplicitIds.class); + assertThatThrownBy(() -> context.getPersistentEntity(ClassWithMultipleImplicitIds.class)) + .isInstanceOf(MappingException.class); } @Test // DATAMONGO-976 - public void shouldRejectClassWithInvalidTextScoreProperty() { + void shouldRejectClassWithInvalidTextScoreProperty() { + + MongoMappingContext context = new MongoMappingContext(); + + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> context.getPersistentEntity(ClassWithInvalidTextScoreProperty.class)) + .withMessageContaining("score").withMessageContaining("Float").withMessageContaining("Double"); + } + + @Test // DATAMONGO-2599 + void shouldNotCreateEntityForEnum() { + + MongoMappingContext context = new MongoMappingContext(); + + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithChronoUnit.class); - exception.expect(MappingException.class); - exception.expectMessage("score"); - exception.expectMessage("Float"); - exception.expectMessage("Double"); + assertThat(entity.getPersistentProperty("unit").isEntity()).isFalse(); + assertThat(context.hasPersistentEntityFor(ChronoUnit.class)).isFalse(); + assertThat(context.getPersistentEntity(ChronoUnit.class)).isNull(); + } + + @Test // GH-3656 + void shouldNotCreateEntityForOptionalGetter() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(InterfaceWithMethodReturningOptional.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); + } + + @Test // GH-3656 + void shouldNotCreateEntityForOptionalField() { MongoMappingContext context = new MongoMappingContext(); - context.getPersistentEntity(ClassWithInvalidTextScoreProperty.class); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithOptionalField.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); } public class SampleClass { @@ -232,4 +260,18 @@ class ClassWithInvalidTextScoreProperty { @TextScore Locale score; } + + class ClassWithChronoUnit { + + ChronoUnit unit; + } + + interface InterfaceWithMethodReturningOptional { + + Optional getPerson(); + } + + class ClassWithOptionalField { + Optional person; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java index 73b52f3767..9ebeb68ceb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoPersistentPropertyComparatorUnitTests.java @@ -1,40 +1,35 @@ package org.springframework.data.mongodb.core.mapping; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity.MongoPersistentPropertyComparator; +import org.mockito.junit.jupiter.MockitoExtension; -import static org.mockito.Mockito.*; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity.MongoPersistentPropertyComparator; /** * Unit tests for {@link MongoPersistentPropertyComparator}. - * + * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class MongoPersistentPropertyComparatorUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoPersistentPropertyComparatorUnitTests { - @Mock - MongoPersistentProperty firstName; + @Mock MongoPersistentProperty firstName; - @Mock - MongoPersistentProperty lastName; + @Mock MongoPersistentProperty lastName; - @Mock - MongoPersistentProperty ssn; + @Mock MongoPersistentProperty ssn; @Test - public void ordersPropertiesCorrectly() { + void ordersPropertiesCorrectly() { when(ssn.getFieldOrder()).thenReturn(10); when(firstName.getFieldOrder()).thenReturn(20); @@ -43,8 +38,8 @@ public void ordersPropertiesCorrectly() { List properties = Arrays.asList(firstName, lastName, ssn); Collections.sort(properties, MongoPersistentPropertyComparator.INSTANCE); - assertThat(properties.get(0), is(ssn)); - assertThat(properties.get(1), is(firstName)); - assertThat(properties.get(2), is(lastName)); + assertThat(properties.get(0)).isEqualTo(ssn); + assertThat(properties.get(1)).isEqualTo(firstName); + assertThat(properties.get(2)).isEqualTo(lastName); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoVectorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoVectorUnitTests.java new file mode 100644 index 0000000000..31eeebdb83 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoVectorUnitTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.bson.BinaryVector; +import org.bson.Float32BinaryVector; +import org.junit.jupiter.api.Test; + +import org.springframework.data.domain.Vector; + +/** + * Unit tests for {@link MongoVector}. + * + * @author Mark Paluch + */ +class MongoVectorUnitTests { + + @Test // GH-4706 + void shouldReturnInt8AsFloatingPoints() { + + MongoVector vector = MongoVector.ofInt8(new byte[] { 1, 2, 3 }); + + assertThat(vector.toDoubleArray()).contains(1, 2, 3); + assertThat(vector.toFloatArray()).contains(1, 2, 3); + } + + @Test // GH-4706 + void shouldReturnFloatAsFloatingPoints() { + + MongoVector vector = MongoVector.ofFloat(1f, 2f, 3f); + + assertThat(vector.toDoubleArray()).contains(1, 2, 3); + assertThat(vector.toFloatArray()).contains(1, 2, 3); + } + + @Test // GH-4706 + void ofFloatIsNotEqualToVectorOf() { + + MongoVector mv = MongoVector.ofFloat(1f, 2f, 3f); + Vector v = Vector.of(1f, 2f, 3f); + + assertThat(v).isNotEqualTo(mv); + } + + @Test // GH-4706 + void mongoVectorCanAdaptToFloatVector() { + + Vector v = Vector.of(1f, 2f, 3f); + MongoVector mv = MongoVector.fromFloat(v); + + assertThat(mv.toFloatArray()).isEqualTo(v.toFloatArray()); + assertThat(mv.getSource()).isInstanceOf(Float32BinaryVector.class); + } + + @Test // GH-4706 + void shouldNotReturnFloatsForPackedBit() { + + MongoVector vector = MongoVector.of(BinaryVector.packedBitVector(new byte[] { 1, 2, 3 }, (byte) 1)); + + assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(vector::toFloatArray); + assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(vector::toDoubleArray); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java index 3454794b2a..06f0db6c35 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/Person.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,8 +23,6 @@ import org.springframework.data.mongodb.core.index.CompoundIndex; import org.springframework.data.mongodb.core.index.CompoundIndexes; import org.springframework.data.mongodb.core.index.Indexed; -import org.springframework.data.mongodb.core.mapping.DBRef; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -33,18 +31,13 @@ @CompoundIndexes({ @CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") }) public class Person { - @Id - private String id; - @Indexed(unique = true) - private Integer ssn; + @Id private String id; + @Indexed(unique = true) private Integer ssn; private String firstName; - @Indexed - private String lastName; + @Indexed private String lastName; private Integer age; - @Transient - private Integer accountTotal; - @DBRef - private List accounts; + @Transient private Integer accountTotal; + @DBRef private List accounts; private T address; public Person(Integer ssn) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java index 96cfff4212..f45c07984a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection1.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,8 +23,7 @@ @Document("person1") public class PersonCustomCollection1 extends BasePerson { - @Id - private String id; + @Id private String id; public PersonCustomCollection1(Integer ssn, String firstName, String lastName) { super(ssn, firstName, lastName); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java index 091d2dca7b..581c56ca7a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomCollection2.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,8 +23,7 @@ @Document("person2") public class PersonCustomCollection2 extends BasePerson { - @Id - private String id; + @Id private String id; public PersonCustomCollection2(Integer ssn, String firstName, String lastName) { super(ssn, firstName, lastName); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java index b867e5d0d1..a68fe0d531 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonCustomIdName.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java index e80b510cb4..ce746eed10 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMapProperty.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +18,6 @@ import java.util.Map; import org.bson.types.ObjectId; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java index e467007136..6ce07bbae8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiCollection.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java index 4bf09a9107..d98c617c1f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonMultiDimArrays.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java index 4cd8f9cdf8..8846fb161a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonNullProperties.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core.mapping; import org.bson.types.ObjectId; + import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -25,11 +25,9 @@ @Document public class PersonNullProperties extends BasePerson { - @Id - private ObjectId id; + @Id private ObjectId id; - public PersonNullProperties() { - } + public PersonNullProperties() {} public ObjectId getId() { return id; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java index 892daff483..6133f8f5a9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoIntId.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java index b945405798..e3034e0f73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoLongId.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java index 0970b98135..29d8682355 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonPojoStringId.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java index 62308a959c..e228c5b832 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonSimpleList.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java index 0e852a5bf2..86baa78a2b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithDbRef.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,6 @@ */ package org.springframework.data.mongodb.core.mapping; - /** * @author Jon Brisbin */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java index b7ec39b538..68cfa2653b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PersonWithObjectId.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java index 11a665a265..f21e1ade23 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/PrimitiveId.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ package org.springframework.data.mongodb.core.mapping; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; /** * @author Jon Brisbin @@ -24,8 +23,7 @@ @Document public class PrimitiveId { - @Id - int id; + @Id int id; String text; public PrimitiveId(Integer id) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java index d238cc969f..7ce3a8b9c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AbstractMongoEventListenerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,20 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.data.mongodb.core.mapping.Account; import org.springframework.data.mongodb.repository.Contact; import org.springframework.data.mongodb.repository.Person; +import com.mongodb.BasicDBObject; + /** * Unit tests for {@link AbstractMongoEventListener}. * @@ -40,7 +43,7 @@ public void invokesCallbackForEventForPerson() { MongoMappingEvent event = new BeforeConvertEvent(new Person("Dave", "Matthews"), "collection-1"); SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeConvert, is(true)); + assertThat(listener.invokedOnBeforeConvert).isTrue(); } @Test @@ -53,11 +56,11 @@ public void dropsEventIfNotForCorrectDomainType() { context.addApplicationListener(listener); context.publishEvent(new BeforeConvertEvent(new Person("Dave", "Matthews"), "collection-1")); - assertThat(listener.invokedOnBeforeConvert, is(true)); + assertThat(listener.invokedOnBeforeConvert).isTrue(); listener.invokedOnBeforeConvert = false; context.publishEvent(new BeforeConvertEvent("Test", "collection-1")); - assertThat(listener.invokedOnBeforeConvert, is(false)); + assertThat(listener.invokedOnBeforeConvert).isFalse(); context.close(); } @@ -67,7 +70,7 @@ public void afterLoadEffectGetsHandledCorrectly() { SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); - assertThat(listener.invokedOnAfterLoad, is(true)); + assertThat(listener.invokedOnAfterLoad).isTrue(); } @Test // DATAMONGO-289 @@ -78,8 +81,8 @@ public void afterLoadEventGetsFilteredForDomainType() { personListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); accountListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); - assertThat(personListener.invokedOnAfterLoad, is(true)); - assertThat(accountListener.invokedOnAfterLoad, is(false)); + assertThat(personListener.invokedOnAfterLoad).isTrue(); + assertThat(accountListener.invokedOnAfterLoad).isFalse(); } @Test // DATAMONGO-289 @@ -90,8 +93,8 @@ public void afterLoadEventGetsFilteredForDomainTypeWorksForSubtypes() { personListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); contactListener.onApplicationEvent(new AfterLoadEvent(new Document(), Person.class, "collection-1")); - assertThat(personListener.invokedOnAfterLoad, is(true)); - assertThat(contactListener.invokedOnAfterLoad, is(true)); + assertThat(personListener.invokedOnAfterLoad).isTrue(); + assertThat(contactListener.invokedOnAfterLoad).isTrue(); } @Test // DATAMONGO-289 @@ -102,8 +105,8 @@ public void afterLoadEventGetsFilteredForDomainTypeWorksForSubtypes2() { personListener.onApplicationEvent(new AfterLoadEvent(new Document(), Contact.class, "collection-1")); contactListener.onApplicationEvent(new AfterLoadEvent(new Document(), Contact.class, "collection-1")); - assertThat(personListener.invokedOnAfterLoad, is(false)); - assertThat(contactListener.invokedOnAfterLoad, is(true)); + assertThat(personListener.invokedOnAfterLoad).isFalse(); + assertThat(contactListener.invokedOnAfterLoad).isTrue(); } @Test // DATAMONGO-333 @@ -121,7 +124,7 @@ public void invokeContactCallbackForPersonEvent() { SampleContactEventListener listener = new SampleContactEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(true)); + assertThat(listener.invokedOnBeforeDelete).isTrue(); } @Test // DATAMONGO-545 @@ -131,7 +134,7 @@ public void invokePersonCallbackForPersonEvent() { SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(true)); + assertThat(listener.invokedOnBeforeDelete).isTrue(); } @Test // DATAMONGO-545 @@ -141,7 +144,7 @@ public void dontInvokePersonCallbackForAccountEvent() { SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(false)); + assertThat(listener.invokedOnBeforeDelete).isFalse(); } @Test // DATAMONGO-545 @@ -151,7 +154,15 @@ public void donInvokePersonCallbackForUntypedEvent() { SamplePersonEventListener listener = new SamplePersonEventListener(); listener.onApplicationEvent(event); - assertThat(listener.invokedOnBeforeDelete, is(false)); + assertThat(listener.invokedOnBeforeDelete).isFalse(); + } + + @Test // GH-3968 + public void debugLogShouldNotFailMongoDBCodecError() { + + MongoMappingEvent event = new BeforeConvertEvent<>(new BasicDBObject("date", Instant.now()), "collection-1"); + UntypedEventListener listener = new UntypedEventListener(); + listener.onApplicationEvent(event); } class SamplePersonEventListener extends AbstractMongoEventListener { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java index 26d376c62a..bef2ae92fa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AfterSaveListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,11 +20,11 @@ import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationListener; -public class AfterSaveListener implements ApplicationListener> { +public class AfterSaveListener implements ApplicationListener> { public final ArrayList seenEvents = new ArrayList(); - public void onApplicationEvent(AfterSaveEvent event) { + public void onApplicationEvent(AfterSaveEvent event) { this.seenEvents.add(event); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java index dcca0927a0..9bc1dc78aa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,29 +20,33 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.springframework.context.ApplicationContext; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.PersonPojoStringId; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.annotation.DirtiesContext; -import com.mongodb.MongoClient; import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; /** @@ -53,6 +57,7 @@ * @author Jordi Llach * @author Mark Paluch */ +@ExtendWith({ MongoClientExtension.class }) public class ApplicationContextEventTests { private static final String COLLECTION_NAME = "personPojoStringId"; @@ -62,40 +67,33 @@ public class ApplicationContextEventTests { private final String[] collectionsToDrop = new String[] { COLLECTION_NAME, ROOT_COLLECTION_NAME, RELATED_COLLECTION_NAME }; - private static MongoClient mongo; - private ApplicationContext applicationContext; + static @Client MongoClient mongoClient; + + private ConfigurableApplicationContext applicationContext; private MongoTemplate template; private SimpleMappingEventListener listener; - @BeforeClass - public static void beforeClass() { - mongo = new MongoClient(); - } - - @AfterClass - public static void afterClass() { - mongo.close(); - } - - @Before + @BeforeEach public void setUp() { cleanDb(); applicationContext = new AnnotationConfigApplicationContext(ApplicationContextEventTestsAppConfig.class); template = applicationContext.getBean(MongoTemplate.class); - template.setWriteConcern(WriteConcern.FSYNC_SAFE); + template.setWriteConcern(WriteConcern.JOURNALED); listener = applicationContext.getBean(SimpleMappingEventListener.class); } - @After + @AfterEach public void cleanUp() { + cleanDb(); + applicationContext.close(); } private void cleanDb() { - MongoDatabase db = mongo.getDatabase("database"); + MongoDatabase db = mongoClient.getDatabase("database"); for (String coll : collectionsToDrop) { db.getCollection(coll).drop(); } @@ -126,8 +124,8 @@ public void beforeSaveEvent() { assertThat(listener.onBeforeSaveEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); assertThat(listener.onAfterSaveEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); - Assert.assertTrue(personBeforeSaveListener.seenEvents.get(0) instanceof BeforeSaveEvent); - Assert.assertTrue(afterSaveListener.seenEvents.get(0) instanceof AfterSaveEvent); + assertThat(personBeforeSaveListener.seenEvents.get(0) instanceof BeforeSaveEvent).isTrue(); + assertThat(afterSaveListener.seenEvents.get(0) instanceof AfterSaveEvent).isTrue(); BeforeSaveEvent beforeSaveEvent = (BeforeSaveEvent) personBeforeSaveListener.seenEvents .get(0); @@ -137,7 +135,7 @@ public void beforeSaveEvent() { comparePersonAndDocument(p, p2, document); AfterSaveEvent afterSaveEvent = (AfterSaveEvent) afterSaveListener.seenEvents.get(0); - Assert.assertTrue(afterSaveEvent.getSource() instanceof PersonPojoStringId); + assertThat(afterSaveEvent.getSource() instanceof PersonPojoStringId).isTrue(); p2 = (PersonPojoStringId) afterSaveEvent.getSource(); document = beforeSaveEvent.getDocument(); @@ -162,6 +160,22 @@ public void loadAndConvertEvents() { assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); } + @Test // GH-4107 + @DirtiesContext + public void configurationShouldDisableLifecycleEvents() { + + template.setEntityLifecycleEventsEnabled(false); + + PersonPojoStringId entity = new PersonPojoStringId("1", "Text"); + template.insert(entity); + + template.findOne(query(where("id").is(entity.getId())), PersonPojoStringId.class); + + assertThat(listener.onAfterLoadEvents).isEmpty(); + assertThat(listener.onBeforeConvertEvents).isEmpty(); + assertThat(listener.onAfterConvertEvents).isEmpty(); + } + @Test // DATAMONGO-1256 public void loadEventsOnAggregation() { @@ -386,6 +400,29 @@ public void publishesAfterConvertEventForFindQueriesUsingProjections() { assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo(COLLECTION_NAME); } + @Test // DATAMONGO-700, DATAMONGO-1185, DATAMONGO-1848 + public void publishesEventsForQuerydslFindQueries() { + + template.dropCollection(Person.class); + + template.save(new Person("Boba", "Fett", 40)); + + MongoRepositoryFactory factory = new MongoRepositoryFactory(template); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + QuerydslMongoPredicateExecutor executor = new QuerydslMongoPredicateExecutor<>(entityInformation, template); + + executor.findOne(QPerson.person.lastname.startsWith("Fe")); + + assertThat(listener.onAfterLoadEvents).hasSize(1); + assertThat(listener.onAfterLoadEvents.get(0).getCollectionName()).isEqualTo("person"); + + assertThat(listener.onBeforeConvertEvents).hasSize(1); + assertThat(listener.onBeforeConvertEvents.get(0).getCollectionName()).isEqualTo("person"); + + assertThat(listener.onAfterConvertEvents).hasSize(1); + assertThat(listener.onAfterConvertEvents.get(0).getCollectionName()).isEqualTo("person"); + } + private void comparePersonAndDocument(PersonPojoStringId p, PersonPojoStringId p2, org.bson.Document document) { assertThat(p2.getId()).isEqualTo(p.getId()); @@ -396,7 +433,6 @@ private void comparePersonAndDocument(PersonPojoStringId p, PersonPojoStringId p assertTypeHint(document, PersonPojoStringId.class); } - @Data @org.springframework.data.mongodb.core.mapping.Document public static class Root { @@ -410,13 +446,100 @@ public static class Root { @DBRef Map mapOfReferences; @DBRef(lazy = true) Map lazyMapOfReferences; + + public Long getId() { + return this.id; + } + + public Related getReference() { + return this.reference; + } + + public Related getLazyReference() { + return this.lazyReference; + } + + public List getListOfReferences() { + return this.listOfReferences; + } + + public List getLazyListOfReferences() { + return this.lazyListOfReferences; + } + + public Map getMapOfReferences() { + return this.mapOfReferences; + } + + public Map getLazyMapOfReferences() { + return this.lazyMapOfReferences; + } + + public void setId(Long id) { + this.id = id; + } + + public void setReference(Related reference) { + this.reference = reference; + } + + public void setLazyReference(Related lazyReference) { + this.lazyReference = lazyReference; + } + + public void setListOfReferences(List listOfReferences) { + this.listOfReferences = listOfReferences; + } + + public void setLazyListOfReferences(List lazyListOfReferences) { + this.lazyListOfReferences = lazyListOfReferences; + } + + public void setMapOfReferences(Map mapOfReferences) { + this.mapOfReferences = mapOfReferences; + } + + public void setLazyMapOfReferences(Map lazyMapOfReferences) { + this.lazyMapOfReferences = lazyMapOfReferences; + } + + public String toString() { + return "ApplicationContextEventTests.Root(id=" + this.getId() + ", reference=" + this.getReference() + + ", lazyReference=" + this.getLazyReference() + ", listOfReferences=" + this.getListOfReferences() + + ", lazyListOfReferences=" + this.getLazyListOfReferences() + ", mapOfReferences=" + + this.getMapOfReferences() + ", lazyMapOfReferences=" + this.getLazyMapOfReferences() + ")"; + } } - @Data @org.springframework.data.mongodb.core.mapping.Document public static class Related { - final @Id Long id; - final String description; + @Id Long id; + String description; + + public Related(Long id, String description) { + this.id = id; + this.description = description; + } + + public Long getId() { + return this.id; + } + + public String getDescription() { + return this.description; + } + + public void setId(Long id) { + this.id = id; + } + + public void setDescription(String description) { + this.description = description; + } + + public String toString() { + return "ApplicationContextEventTests.Related(id=" + this.getId() + ", description=" + this.getDescription() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java index 78758676c6..8c5aad8b1a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ApplicationContextEventTestsAppConfig.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,12 +17,13 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; @Configuration -public class ApplicationContextEventTestsAppConfig extends AbstractMongoConfiguration { +public class ApplicationContextEventTestsAppConfig extends MongoClientClosingTestConfiguration { @Override public String getDatabaseName() { @@ -32,7 +33,7 @@ public String getDatabaseName() { @Override @Bean public MongoClient mongoClient() { - return new MongoClient("127.0.0.1"); + return MongoTestUtils.client(); } @Bean diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallbackUnitTests.java new file mode 100644 index 0000000000..7d01c30345 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEntityCallbackUnitTests.java @@ -0,0 +1,207 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.Arrays; +import java.util.Date; +import java.util.Objects; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.core.Ordered; +import org.springframework.data.annotation.CreatedDate; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.LastModifiedDate; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Unwrapped; + +/** + * Unit tests for {@link AuditingEntityCallback}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class AuditingEntityCallbackUnitTests { + + private final MongoMappingContext mappingContext = new MongoMappingContext(); + + private IsNewAwareAuditingHandler handler; + private AuditingEntityCallback callback; + + @BeforeEach + void setUp() { + + mappingContext.getPersistentEntity(Sample.class); + + handler = spy(new IsNewAwareAuditingHandler(new PersistentEntities(Arrays.asList(mappingContext)))); + + callback = new AuditingEntityCallback(() -> handler); + } + + @Test // DATAMONGO-2261 + void rejectsNullAuditingHandler() { + assertThatIllegalArgumentException().isThrownBy(() -> new AuditingEntityCallback(null)); + } + + @Test // DATAMONGO-2261 + void triggersCreationMarkForObjectWithEmptyId() { + + Sample sample = new Sample(); + callback.onBeforeConvert(sample, "foo"); + + verify(handler, times(1)).markCreated(sample); + verify(handler, times(0)).markModified(any()); + } + + @Test // DATAMONGO-2261 + void triggersModificationMarkForObjectWithSetId() { + + Sample sample = new Sample(); + sample.id = "id"; + callback.onBeforeConvert(sample, "foo"); + + verify(handler, times(0)).markCreated(any()); + verify(handler, times(1)).markModified(sample); + } + + @Test // DATAMONGO-2261 + void hasExplicitOrder() { + + assertThat(callback).isInstanceOf(Ordered.class); + assertThat(callback.getOrder()).isEqualTo(100); + } + + @Test // DATAMONGO-2261 + void propagatesChangedInstanceToEvent() { + + ImmutableSample sample = new ImmutableSample(); + + ImmutableSample newSample = new ImmutableSample(); + IsNewAwareAuditingHandler handler = mock(IsNewAwareAuditingHandler.class); + doReturn(newSample).when(handler).markAudited(eq(sample)); + + AuditingEntityCallback listener = new AuditingEntityCallback(() -> handler); + Object result = listener.onBeforeConvert(sample, "foo"); + + assertThat(result).isSameAs(newSample); + } + + @Test // GH-4732 + void shouldApplyAuditingToUnwrappedImmutableObject() { + + WithUnwrapped sample = new WithUnwrapped(); + sample.auditingData = new MyAuditingData(null, null); + + IsNewAwareAuditingHandler handler = new IsNewAwareAuditingHandler(PersistentEntities.of(mappingContext)); + + AuditingEntityCallback listener = new AuditingEntityCallback(() -> handler); + WithUnwrapped result = (WithUnwrapped) listener.onBeforeConvert(sample, "foo"); + + assertThat(result.auditingData.created).isNotNull(); + assertThat(result.auditingData.modified).isNotNull(); + } + + static class Sample { + + @Id String id; + @CreatedDate Date created; + @LastModifiedDate Date modified; + } + + static class WithUnwrapped { + + @Id String id; + + @Unwrapped(onEmpty = Unwrapped.OnEmpty.USE_NULL) MyAuditingData auditingData; + + } + + record MyAuditingData(@CreatedDate Date created, @LastModifiedDate Date modified) { + + } + + private static final class ImmutableSample { + + @Id private final String id; + @CreatedDate private final Date created; + @LastModifiedDate private final Date modified; + + public ImmutableSample() { + this(null, null, null); + } + + public ImmutableSample(String id, Date created, Date modified) { + this.id = id; + this.created = created; + this.modified = modified; + } + + public String getId() { + return this.id; + } + + public Date getCreated() { + return this.created; + } + + public Date getModified() { + return this.modified; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ImmutableSample that = (ImmutableSample) o; + return Objects.equals(id, that.id) && Objects.equals(created, that.created) + && Objects.equals(modified, that.modified); + } + + @Override + public int hashCode() { + return Objects.hash(id, created, modified); + } + + public String toString() { + return "AuditingEntityCallbackUnitTests.ImmutableSample(id=" + this.getId() + ", created=" + this.getCreated() + + ", modified=" + this.getModified() + ")"; + } + + public ImmutableSample withId(String id) { + return this.id == id ? this : new ImmutableSample(id, this.created, this.modified); + } + + public ImmutableSample withCreated(Date created) { + return this.created == created ? this : new ImmutableSample(this.id, created, this.modified); + } + + public ImmutableSample withModified(Date modified) { + return this.modified == modified ? this : new ImmutableSample(this.id, this.created, modified); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListenerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListenerUnitTests.java deleted file mode 100644 index de0c32fab2..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/AuditingEventListenerUnitTests.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2012-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapping.event; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -import java.util.Arrays; -import java.util.Date; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.core.Ordered; -import org.springframework.data.annotation.CreatedDate; -import org.springframework.data.annotation.Id; -import org.springframework.data.annotation.LastModifiedDate; -import org.springframework.data.auditing.IsNewAwareAuditingHandler; -import org.springframework.data.mapping.context.PersistentEntities; -import org.springframework.data.mongodb.core.mapping.MongoMappingContext; - -/** - * Unit tests for {@link AuditingEventListener}. - * - * @author Oliver Gierke - * @author Thomas Darimont - */ -@RunWith(MockitoJUnitRunner.class) -public class AuditingEventListenerUnitTests { - - IsNewAwareAuditingHandler handler; - AuditingEventListener listener; - - @Before - public void setUp() { - - MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.getPersistentEntity(Sample.class); - - handler = spy(new IsNewAwareAuditingHandler(new PersistentEntities(Arrays.asList(mappingContext)))); - doNothing().when(handler).markCreated(any()); - doNothing().when(handler).markModified(any()); - - listener = new AuditingEventListener(() -> handler); - } - - @Test(expected = IllegalArgumentException.class) // DATAMONGO-577 - public void rejectsNullAuditingHandler() { - new AuditingEventListener(null); - } - - @Test // DATAMONGO-577 - public void triggersCreationMarkForObjectWithEmptyId() { - - Sample sample = new Sample(); - listener.onApplicationEvent(new BeforeConvertEvent(sample, "collection-1")); - - verify(handler, times(1)).markCreated(sample); - verify(handler, times(0)).markModified(any()); - } - - @Test // DATAMONGO-577 - public void triggersModificationMarkForObjectWithSetId() { - - Sample sample = new Sample(); - sample.id = "id"; - listener.onApplicationEvent(new BeforeConvertEvent(sample, "collection-1")); - - verify(handler, times(0)).markCreated(any()); - verify(handler, times(1)).markModified(sample); - } - - @Test - public void hasExplicitOrder() { - - assertThat(listener, is(instanceOf(Ordered.class))); - assertThat(listener.getOrder(), is(100)); - } - - static class Sample { - - @Id String id; - @CreatedDate Date created; - @LastModifiedDate Date modified; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java index 63c209952a..772ed3cecb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/LoggingEventListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,16 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import static org.hamcrest.core.StringStartsWith.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.read.ListAppender; import org.bson.Document; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.LoggerFactory; /** @@ -40,10 +39,10 @@ public class LoggingEventListenerTests { ch.qos.logback.classic.Logger logger; LoggingEventListener listener; - @Before + @BeforeEach public void setUp() { - appender = new ListAppender(); + appender = new ListAppender<>(); // set log level for LoggingEventListener to "info" and set up an appender capturing events. logger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(LoggingEventListener.class); @@ -57,7 +56,7 @@ public void setUp() { listener = new LoggingEventListener(); } - @After + @AfterEach public void tearDown() { // cleanup @@ -78,7 +77,7 @@ public void shouldSerializeAfterConvertEventCorrectly() { listener.onAfterConvert(new AfterConvertEvent(new Document("foo", new Foo()), this, "collection")); - assertThat(appender.list.get(0).getFormattedMessage(), startsWith("onAfterConvert: { \"foo\"")); + assertThat(appender.list.get(0).getFormattedMessage()).startsWith("onAfterConvert: { \"foo\""); } @Test // DATAMONGO-1645 @@ -86,8 +85,8 @@ public void shouldSerializeBeforeSaveEventEventCorrectly() { listener.onBeforeSave(new BeforeSaveEvent(new Foo(), new Document("foo", new Foo()), "collection")); - assertThat(appender.list.get(0).getFormattedMessage(), - startsWith("onBeforeSave: org.springframework.data.mongodb.core.")); + assertThat(appender.list.get(0).getFormattedMessage()) + .startsWith("onBeforeSave: org.springframework.data.mongodb.core."); } @Test // DATAMONGO-1645 @@ -95,8 +94,8 @@ public void shouldSerializeAfterSaveEventEventCorrectly() { listener.onAfterSave(new AfterSaveEvent(new Foo(), new Document("foo", new Foo()), "collection")); - assertThat(appender.list.get(0).getFormattedMessage(), - startsWith("onAfterSave: org.springframework.data.mongodb.core.")); + assertThat(appender.list.get(0).getFormattedMessage()) + .startsWith("onAfterSave: org.springframework.data.mongodb.core."); } @Test // DATAMONGO-1645 @@ -104,7 +103,7 @@ public void shouldSerializeBeforeDeleteEventEventCorrectly() { listener.onBeforeDelete(new BeforeDeleteEvent(new Document("foo", new Foo()), Object.class, "collection")); - assertThat(appender.list.get(0).getFormattedMessage(), startsWith("onBeforeDelete: { \"foo\"")); + assertThat(appender.list.get(0).getFormattedMessage()).startsWith("onBeforeDelete: { \"foo\""); } @Test // DATAMONGO-1645 @@ -112,7 +111,7 @@ public void shouldSerializeAfterDeleteEventEventCorrectly() { listener.onAfterDelete(new AfterDeleteEvent(new Document("foo", new Foo()), Object.class, "collection")); - assertThat(appender.list.get(0).getFormattedMessage(), startsWith("onAfterDelete: { \"foo\"")); + assertThat(appender.list.get(0).getFormattedMessage()).startsWith("onAfterDelete: { \"foo\""); } static class Foo { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java index 2aa30d476c..e05efb6a45 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/PersonBeforeSaveListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,10 +25,6 @@ public class PersonBeforeSaveListener extends AbstractMongoEventListener seenEvents = new ArrayList(); - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener#onBeforeSave(java.lang.Object, com.mongodb.Document) - */ @Override public void onBeforeSave(BeforeSaveEvent event) { seenEvents.add(event); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallbackUnitTests.java new file mode 100644 index 0000000000..c0db92a3d9 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ReactiveValidatingEntityCallbackUnitTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validation; +import jakarta.validation.ValidatorFactory; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotNull; +import reactor.test.StepVerifier; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ReactiveValidatingEntityCallback}. + * + * @author Mark Paluch + * @author Rene Felgenträger + */ +class ReactiveValidatingEntityCallbackUnitTests { + + private ReactiveValidatingEntityCallback callback; + + @BeforeEach + void setUp() { + try (ValidatorFactory factory = Validation.buildDefaultValidatorFactory()) { + callback = new ReactiveValidatingEntityCallback(factory.getValidator()); + } + } + + @Test // GH-4910 + void validationThrowsException() { + + Coordinates coordinates = new Coordinates(-1, -1); + + callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates") // + .as(StepVerifier::create) // + .verifyError(ConstraintViolationException.class); + } + + @Test // GH-4910 + void validateSuccessful() { + + Coordinates coordinates = new Coordinates(0, 0); + + callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates") // + .as(StepVerifier::create) // + .expectNext(coordinates) // + .verifyComplete(); + } + + record Coordinates(@NotNull @Min(0) Integer x, @NotNull @Min(0) Integer y) { + + Document toDocument() { + return Document.parse(""" + { + "x": %d, + "y": %d + } + """.formatted(x, y)); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java index fcdf74e5ec..1d77bb0e2f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/SimpleMappingEventListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java index 707a5e848e..8727f1dfe2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/User.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import javax.validation.constraints.Min; -import javax.validation.constraints.Size; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.Size; /** * Class used to test JSR-303 validation @@ -26,11 +26,9 @@ */ public class User { - @Size(min = 10) - private String name; + @Size(min = 10) private String name; - @Min(18) - private Integer age; + @Min(18) private Integer age; public User(String name, Integer age) { this.name = name; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallbackUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallbackUnitTests.java new file mode 100644 index 0000000000..e20da176b3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingEntityCallbackUnitTests.java @@ -0,0 +1,78 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping.event; + +import static org.assertj.core.api.Assertions.*; + +import jakarta.validation.ConstraintViolationException; +import jakarta.validation.Validation; +import jakarta.validation.ValidatorFactory; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotNull; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link ValidatingEntityCallback}. + * + * @author Rene Felgenträger + * @author Mark Paluch + */ +class ValidatingEntityCallbackUnitTests { + + private ValidatingEntityCallback callback; + + @BeforeEach + void setUp() { + try (ValidatorFactory factory = Validation.buildDefaultValidatorFactory()) { + callback = new ValidatingEntityCallback(factory.getValidator()); + } + } + + @Test // GH-4910 + void validationThrowsException() { + + Coordinates coordinates = new Coordinates(-1, -1); + + assertThatExceptionOfType(ConstraintViolationException.class).isThrownBy( + () -> callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates")) + .satisfies(e -> assertThat(e.getConstraintViolations()).hasSize(2)); + } + + @Test // GH-4910 + void validateSuccessful() { + + Coordinates coordinates = new Coordinates(0, 0); + Object entity = callback.onBeforeSave(coordinates, coordinates.toDocument(), "coordinates"); + + assertThat(entity).isEqualTo(coordinates); + } + + record Coordinates(@NotNull @Min(0) Integer x, @NotNull @Min(0) Integer y) { + + Document toDocument() { + return Document.parse(""" + { + "x": %d, + "y": %d + } + """.formatted(x, y)); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java index 6d71db0879..a1253bf98a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,17 @@ */ package org.springframework.data.mongodb.core.mapping.event; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import javax.validation.ConstraintViolationException; +import jakarta.validation.ConstraintViolationException; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Integration test for {@link ValidatingMongoEventListener}. @@ -37,29 +34,25 @@ * @author Oliver Gierke * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration -public class ValidatingMongoEventListenerTests { - - public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6)); +class ValidatingMongoEventListenerTests { @Autowired MongoTemplate mongoTemplate; @Test // DATAMONGO-36 - public void shouldThrowConstraintViolationException() { + void shouldThrowConstraintViolationException() { User user = new User("john", 17); - try { - mongoTemplate.save(user); - fail(); - } catch (ConstraintViolationException e) { - assertThat(e.getConstraintViolations().size(), equalTo(2)); - } + assertThatExceptionOfType(ConstraintViolationException.class).isThrownBy(() -> mongoTemplate.save(user)) + .satisfies(e -> { + assertThat(e.getConstraintViolations()).hasSize(2); + }); } @Test - public void shouldNotThrowAnyExceptions() { + void shouldNotThrowAnyExceptions() { mongoTemplate.save(new User("john smith", 18)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/GroupByTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/GroupByTests.java deleted file mode 100644 index f84d68cb39..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/GroupByTests.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.mapreduce; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.mapreduce.GroupBy.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; - -import org.bson.Document; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.mongodb.client.MongoCollection; - -/** - * Integration tests for group-by operations. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Christoph Strobl - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") -public class GroupByTests { - - @Autowired MongoTemplate mongoTemplate; - - @Before - public void setUp() { - cleanDb(); - } - - @After - public void cleanUp() { - cleanDb(); - } - - protected void cleanDb() { - mongoTemplate.dropCollection(mongoTemplate.getCollectionName(XObject.class)); - mongoTemplate.dropCollection("group_test_collection"); - } - - @Test - public void singleKeyCreation() { - - Document gc = new GroupBy("a").getGroupByObject(); - - assertThat(gc, is(Document.parse("{ \"key\" : { \"a\" : 1} , \"$reduce\" : null , \"initial\" : null }"))); - } - - @Test - public void multipleKeyCreation() { - - Document gc = GroupBy.key("a", "b").getGroupByObject(); - - assertThat(gc, - is(Document.parse("{ \"key\" : { \"a\" : 1 , \"b\" : 1} , \"$reduce\" : null , \"initial\" : null }"))); - } - - @Test - public void keyFunctionCreation() { - - Document gc = GroupBy.keyFunction("classpath:keyFunction.js").getGroupByObject(); - - assertThat(gc, is( - Document.parse("{ \"$keyf\" : \"classpath:keyFunction.js\" , \"$reduce\" : null , \"initial\" : null }"))); - } - - @Test - public void simpleGroupFunction() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group("group_test_collection", GroupBy.key("x") - .initialDocument(new Document("count", 0)).reduceFunction("function(doc, prev) { prev.count += 1 }"), - XObject.class); - - assertMapReduceResults(results); - } - - @Test - public void simpleGroupWithKeyFunction() { - - createGroupByData(); - GroupByResults results = mongoTemplate - .group( - "group_test_collection", GroupBy.keyFunction("function(doc) { return { x : doc.x }; }") - .initialDocument("{ count: 0 }").reduceFunction("function(doc, prev) { prev.count += 1 }"), - XObject.class); - - assertMapReduceResults(results); - } - - @Test - public void simpleGroupWithFunctionsAsResources() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group("group_test_collection", - GroupBy.keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }") - .reduceFunction("classpath:groupReduce.js"), - XObject.class); - - assertMapReduceResults(results); - } - - @Test - public void simpleGroupWithQueryAndFunctionsAsResources() { - - createGroupByData(); - GroupByResults results = mongoTemplate.group(where("x").gt(0), "group_test_collection", - keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }") - .reduceFunction("classpath:groupReduce.js"), - XObject.class); - - assertMapReduceResults(results); - } - - private void assertMapReduceResults(GroupByResults results) { - - int numResults = 0; - for (XObject xObject : results) { - if (xObject.getX() == 1) { - Assert.assertEquals(2, xObject.getCount(), 0.001); - } - if (xObject.getX() == 2) { - Assert.assertEquals(1, xObject.getCount(), 0.001); - } - if (xObject.getX() == 3) { - Assert.assertEquals(3, xObject.getCount(), 0.001); - } - numResults++; - } - assertThat(numResults, is(3)); - assertThat(results.getKeys(), is(3)); - assertEquals(6, results.getCount(), 0.001); - } - - private void createGroupByData() { - - MongoCollection c = mongoTemplate.getDb().getCollection("group_test_collection", Document.class); - - c.insertOne(new Document("x", 1)); - c.insertOne(new Document("x", 1)); - c.insertOne(new Document("x", 2)); - c.insertOne(new Document("x", 3)); - c.insertOne(new Document("x", 3)); - c.insertOne(new Document("x", 3)); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java index 9bd7141d91..687786456c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceCountsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,9 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link MapReduceCounts}. @@ -33,9 +32,9 @@ public void equalsForSameNumberValues() { MapReduceCounts left = new MapReduceCounts(1L, 1L, 1L); MapReduceCounts right = new MapReduceCounts(1L, 1L, 1L); - assertThat(left, is(right)); - assertThat(right, is(left)); - assertThat(left.hashCode(), is(right.hashCode())); + assertThat(left).isEqualTo(right); + assertThat(right).isEqualTo(left); + assertThat(left.hashCode()).isEqualTo(right.hashCode()); } @Test // DATACMNS-378 @@ -44,8 +43,8 @@ public void notEqualForDifferentNumberValues() { MapReduceCounts left = new MapReduceCounts(1L, 1L, 1L); MapReduceCounts right = new MapReduceCounts(1L, 2L, 1L); - assertThat(left, is(not(right))); - assertThat(right, is(not(left))); - assertThat(left.hashCode(), is(not(right.hashCode()))); + assertThat(left).isNotEqualTo(right); + assertThat(right).isNotEqualTo(left); + assertThat(left.hashCode()).isNotEqualTo(right.hashCode()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java index 9a73663938..a3a2161845 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceOptionsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,9 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * @author Mark Pollack @@ -38,11 +37,11 @@ public void limitShouldBeIncludedCorrectly() { MapReduceOptions options = new MapReduceOptions(); options.limit(10); - assertThat(options.getOptionsObject(), isBsonObject().containing("limit", 10)); + assertThat(options.getOptionsObject()).containsEntry("limit", 10); } @Test // DATAMONGO-1334 public void limitShouldNotBePresentInDocumentWhenNotSet() { - assertThat(new MapReduceOptions().getOptionsObject(), isBsonObject().notContaining("limit")); + assertThat(new MapReduceOptions().getOptionsObject()).doesNotContainKey("limit"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java index f6144b711a..c34fa32be0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceResultsUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,12 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collections; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link MapReduceResults}. @@ -36,7 +35,7 @@ public void resolvesOutputCollectionForPlainResult() { Document rawResult = new Document("result", "FOO"); MapReduceResults results = new MapReduceResults(Collections.emptyList(), rawResult); - assertThat(results.getOutputCollection(), is("FOO")); + assertThat(results.getOutputCollection()).isEqualTo("FOO"); } @Test // DATAMONGO-428 @@ -45,7 +44,7 @@ public void resolvesOutputCollectionForDocumentResult() { Document rawResult = new Document("result", new Document("collection", "FOO")); MapReduceResults results = new MapReduceResults(Collections.emptyList(), rawResult); - assertThat(results.getOutputCollection(), is("FOO")); + assertThat(results.getOutputCollection()).isEqualTo("FOO"); } @Test // DATAMONGO-378 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java index 03cb53ffe9..c265a9b739 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/MapReduceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core.mapreduce; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; @@ -28,7 +28,6 @@ import org.bson.Document; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; @@ -36,7 +35,7 @@ import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.query.Query; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; import com.mongodb.client.MongoCollection; @@ -45,13 +44,15 @@ * * @author Mark Pollack * @author Thomas Darimont + * @author Mark Paluch + * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class MapReduceTests { - private String mapFunction = "function(){ for ( var i=0; i results = mongoTemplate.mapReduce("jmr1", mapFunction, reduceFunction, - ValueObject.class); - for (ValueObject valueObject : results) { - System.out.println(valueObject); - } - } - - @Test + @Test // DATAMONGO-260 public void testIssue260() { + createContentAndVersionData(); String map = "function () { emit(this.document_id, this.version); }"; String reduce = "function (key, values) { return Math.max.apply(Math, values); }"; + MapReduceResults results = mongoTemplate.mapReduce("jmr2", map, reduce, new MapReduceOptions().outputCollection("jmr2_out"), ContentAndVersion.class); - int size = 0; + assertThat(results).hasSize(3); for (ContentAndVersion cv : results) { + if ("Resume".equals(cv.getId())) { - assertEquals(6, cv.getValue().longValue()); + assertThat(cv.getValue().longValue()).isEqualTo(6); } if ("Schema".equals(cv.getId())) { - assertEquals(2, cv.getValue().longValue()); + assertThat(cv.getValue().longValue()).isEqualTo(2); } if ("mongoDB How-To".equals(cv.getId())) { - assertEquals(2, cv.getValue().longValue()); + assertThat(cv.getValue().longValue()).isEqualTo(2); } - size++; } - assertEquals(3, size); + } - @Test + @Test // DATAMONGO-260 public void testIssue260Part2() { + createNumberAndVersionData(); String map = "function () { emit(this.number, this.version); }"; String reduce = "function (key, values) { return Math.max.apply(Math, values); }"; + MapReduceResults results = mongoTemplate.mapReduce("jmr2", map, reduce, new MapReduceOptions().outputCollection("jmr2_out"), NumberAndVersion.class); - int size = 0; + for (NumberAndVersion nv : results) { if ("1".equals(nv.getId())) { - assertEquals(2, nv.getValue().longValue()); + assertThat(nv.getValue().longValue()).isEqualTo(2); } if ("2".equals(nv.getId())) { - assertEquals(6, nv.getValue().longValue()); + assertThat(nv.getValue().longValue()).isEqualTo(6); } if ("3".equals(nv.getId())) { - assertEquals(2, nv.getValue().longValue()); + assertThat(nv.getValue().longValue()).isEqualTo(2); } - size++; } - assertEquals(3, size); - } - - private void createNumberAndVersionData() { - NumberAndVersion nv1 = new NumberAndVersion(); - nv1.setNumber(1L); - nv1.setVersion(1L); - template.save(nv1, "jmr2"); - - NumberAndVersion nv2 = new NumberAndVersion(); - nv2.setNumber(1L); - nv2.setVersion(2L); - template.save(nv2, "jmr2"); - - NumberAndVersion nv3 = new NumberAndVersion(); - nv3.setNumber(2L); - nv3.setVersion(6L); - template.save(nv3, "jmr2"); - - NumberAndVersion nv4 = new NumberAndVersion(); - nv4.setNumber(3L); - nv4.setVersion(1L); - template.save(nv4, "jmr2"); - - NumberAndVersion nv5 = new NumberAndVersion(); - nv5.setNumber(3L); - nv5.setVersion(2L); - template.save(nv5, "jmr2"); + assertThat(results).hasSize(3); } - private void createContentAndVersionData() { - /* - { "_id" : 1, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1 } - { "_id" : 2, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1.1 } - { "_id" : 3, "document_id" : "Resume", "author" : "Author", "content" : "...", "version" : 6 } - { "_id" : 4, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 0.9 } - { "_id" : 5, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 1 } + @Test // DATADOC-7, DATAMONGO-2027 + public void testMapReduce() { - */ - ContentAndVersion cv1 = new ContentAndVersion(); - cv1.setDocumentId("mongoDB How-To"); - cv1.setAuthor("Amos King"); - cv1.setContent("..."); - cv1.setVersion(1L); - template.save(cv1, "jmr2"); + performMapReduce(false, false); - ContentAndVersion cv2 = new ContentAndVersion(); - cv2.setDocumentId("mongoDB How-To"); - cv2.setAuthor("Amos King"); - cv2.setContent("..."); - cv2.setVersion(2L); - template.save(cv2, "jmr2"); + List results = mongoTemplate.find(new Query(), ValueObject.class, "jmr1_out"); + assertMapReduceResults(copyToMap(results)); + } - ContentAndVersion cv3 = new ContentAndVersion(); - cv3.setDocumentId("Resume"); - cv3.setAuthor("Author"); - cv3.setContent("..."); - cv3.setVersion(6L); - template.save(cv3, "jmr2"); + @Test // DATADOC-7, DATAMONGO-2027 + public void testMapReduceInline() { - ContentAndVersion cv4 = new ContentAndVersion(); - cv4.setDocumentId("Schema"); - cv4.setAuthor("Someone Else"); - cv4.setContent("..."); - cv4.setVersion(1L); - template.save(cv4, "jmr2"); + performMapReduce(true, false); + assertThat(template.collectionExists("jmr1_out")).isFalse(); + } - ContentAndVersion cv5 = new ContentAndVersion(); - cv5.setDocumentId("Schema"); - cv5.setAuthor("Someone Else"); - cv5.setContent("..."); - cv5.setVersion(2L); - template.save(cv5, "jmr2"); + @Test // DATAMONGO-2027 + public void mapReduceWithOutputDatabaseShouldWorkCorrectly() { - } + createMapReduceData(); - @Test - public void testMapReduce() { - performMapReduce(false, false); - } + mongoTemplate.mapReduce("jmr1", MAP_FUNCTION, REDUCE_FUNCTION, + options().outputDatabase("jmr1-out-db").outputCollection("jmr1-out"), ValueObject.class); - @Test - public void testMapReduceInline() { - performMapReduce(true, false); + assertThat( + template.getMongoDatabaseFactory().getMongoDatabase("jmr1-out-db").listCollectionNames().into(new ArrayList<>())) + .contains("jmr1-out"); } - @Test + @Test // DATADOC-7 public void testMapReduceWithQuery() { performMapReduce(false, true); } - @Test + @Test // DATADOC-7 public void testMapReduceInlineWithScope() { + createMapReduceData(); Map scopeVariables = new HashMap(); @@ -231,29 +173,30 @@ public void testMapReduceInlineWithScope() { String mapWithExcludeFunction = "function(){ for ( var i=0; i results = mongoTemplate.mapReduce("jmr1", mapWithExcludeFunction, reduceFunction, - new MapReduceOptions().scopeVariables(scopeVariables).outputTypeInline(), ValueObject.class); - Map m = copyToMap(results); - assertEquals(3, m.size()); - assertEquals(2, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); + MapReduceResults results = mongoTemplate.mapReduce("jmr1", mapWithExcludeFunction, REDUCE_FUNCTION, + new MapReduceOptions().scopeVariables(scopeVariables), ValueObject.class); + + assertThat(copyToMap(results)) // + .hasSize(3) // + .containsEntry("b", 2F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); } - @Test + @Test // DATADOC-7 public void testMapReduceExcludeQuery() { + createMapReduceData(); Query query = new Query(where("x").ne(new String[] { "a", "b" })); - MapReduceResults results = mongoTemplate.mapReduce(query, "jmr1", mapFunction, reduceFunction, + MapReduceResults results = mongoTemplate.mapReduce(query, "jmr1", MAP_FUNCTION, REDUCE_FUNCTION, ValueObject.class); - Map m = copyToMap(results); - assertEquals(3, m.size()); - assertEquals(1, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); - + assertThat(copyToMap(results)) // + .hasSize(3) // + .containsEntry("b", 1F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); } @Test // DATAMONGO-938 @@ -261,24 +204,25 @@ public void mapReduceShouldUseQueryMapper() { MongoCollection c = mongoTemplate.getDb().getCollection("jmrWithGeo", Document.class); - c.insertOne(new Document("x", Arrays.asList("a", "b")).append("loc", Arrays. asList(0D, 0D))); - c.insertOne(new Document("x", Arrays.asList("b", "c")).append("loc", Arrays. asList(0D, 0D))); - c.insertOne(new Document("x", Arrays.asList("c", "d")).append("loc", Arrays. asList(0D, 0D))); + c.insertOne(new Document("x", Arrays.asList("a", "b")).append("loc", Arrays.asList(0D, 0D))); + c.insertOne(new Document("x", Arrays.asList("b", "c")).append("loc", Arrays.asList(0D, 0D))); + c.insertOne(new Document("x", Arrays.asList("c", "d")).append("loc", Arrays.asList(0D, 0D))); Query query = new Query(where("x").ne(new String[] { "a", "b" }).and("loc") .within(new Box(new double[] { 0, 0 }, new double[] { 1, 1 }))); - MapReduceResults results = template.mapReduce(query, "jmrWithGeo", mapFunction, reduceFunction, + MapReduceResults results = template.mapReduce(query, "jmrWithGeo", MAP_FUNCTION, REDUCE_FUNCTION, ValueObject.class); - Map m = copyToMap(results); - assertEquals(3, m.size()); - assertEquals(1, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); + assertThat(copyToMap(results)) // + .hasSize(3) // + .containsEntry("b", 1F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); } private void performMapReduce(boolean inline, boolean withQuery) { + createMapReduceData(); MapReduceResults results; if (inline) { @@ -286,47 +230,124 @@ private void performMapReduce(boolean inline, boolean withQuery) { results = mongoTemplate.mapReduce(new Query(), "jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class); } else { - results = mongoTemplate.mapReduce("jmr1", mapFunction, reduceFunction, ValueObject.class); + results = mongoTemplate.mapReduce("jmr1", MAP_FUNCTION, REDUCE_FUNCTION, ValueObject.class); } } else { if (withQuery) { - results = mongoTemplate.mapReduce(new Query(), "jmr1", mapFunction, reduceFunction, + results = mongoTemplate.mapReduce(new Query(), "jmr1", MAP_FUNCTION, REDUCE_FUNCTION, options().outputCollection("jmr1_out"), ValueObject.class); } else { - results = mongoTemplate.mapReduce("jmr1", mapFunction, reduceFunction, + results = mongoTemplate.mapReduce("jmr1", MAP_FUNCTION, REDUCE_FUNCTION, new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class); } } - Map m = copyToMap(results); - assertMapReduceResults(m); + + assertMapReduceResults(copyToMap(results)); } private void createMapReduceData() { + MongoCollection c = mongoTemplate.getDb().getCollection("jmr1", Document.class); c.insertOne(new Document("x", Arrays.asList("a", "b"))); c.insertOne(new Document("x", Arrays.asList("b", "c"))); c.insertOne(new Document("x", Arrays.asList("c", "d"))); } - private Map copyToMap(MapReduceResults results) { - List valueObjects = new ArrayList(); + private Map copyToMap(Iterable results) { + + List valueObjects = new ArrayList<>(); for (ValueObject valueObject : results) { valueObjects.add(valueObject); } - Map m = new HashMap(); + Map m = new HashMap<>(); for (ValueObject vo : valueObjects) { m.put(vo.getId(), vo.getValue()); } return m; } - private void assertMapReduceResults(Map m) { - assertEquals(4, m.size()); - assertEquals(1, m.get("a").intValue()); - assertEquals(2, m.get("b").intValue()); - assertEquals(2, m.get("c").intValue()); - assertEquals(1, m.get("d").intValue()); + private void assertMapReduceResults(Map map) { + + assertThat(map) // + .hasSize(4) // + .containsEntry("a", 1F) // + .containsEntry("b", 2F) // + .containsEntry("c", 2F) // + .containsEntry("d", 1F); + } + + private void createNumberAndVersionData() { + + NumberAndVersion nv1 = new NumberAndVersion(); + nv1.setNumber(1L); + nv1.setVersion(1L); + template.save(nv1, "jmr2"); + + NumberAndVersion nv2 = new NumberAndVersion(); + nv2.setNumber(1L); + nv2.setVersion(2L); + template.save(nv2, "jmr2"); + + NumberAndVersion nv3 = new NumberAndVersion(); + nv3.setNumber(2L); + nv3.setVersion(6L); + template.save(nv3, "jmr2"); + + NumberAndVersion nv4 = new NumberAndVersion(); + nv4.setNumber(3L); + nv4.setVersion(1L); + template.save(nv4, "jmr2"); + + NumberAndVersion nv5 = new NumberAndVersion(); + nv5.setNumber(3L); + nv5.setVersion(2L); + template.save(nv5, "jmr2"); + } + private void createContentAndVersionData() { + /* + { "_id" : 1, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1 } + { "_id" : 2, "document_id" : "mongoDB How-To", "author" : "Amos King", "content" : "...", "version" : 1.1 } + { "_id" : 3, "document_id" : "Resume", "author" : "Author", "content" : "...", "version" : 6 } + { "_id" : 4, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 0.9 } + { "_id" : 5, "document_id" : "Schema", "author" : "Someone Else", "content" : "...", "version" : 1 } + + */ + ContentAndVersion cv1 = new ContentAndVersion(); + cv1.setDocumentId("mongoDB How-To"); + cv1.setAuthor("Amos King"); + cv1.setContent("..."); + cv1.setVersion(1L); + template.save(cv1, "jmr2"); + + ContentAndVersion cv2 = new ContentAndVersion(); + cv2.setDocumentId("mongoDB How-To"); + cv2.setAuthor("Amos King"); + cv2.setContent("..."); + cv2.setVersion(2L); + template.save(cv2, "jmr2"); + + ContentAndVersion cv3 = new ContentAndVersion(); + cv3.setDocumentId("Resume"); + cv3.setAuthor("Author"); + cv3.setContent("..."); + cv3.setVersion(6L); + template.save(cv3, "jmr2"); + + ContentAndVersion cv4 = new ContentAndVersion(); + cv4.setDocumentId("Schema"); + cv4.setAuthor("Someone Else"); + cv4.setContent("..."); + cv4.setVersion(1L); + template.save(cv4, "jmr2"); + + ContentAndVersion cv5 = new ContentAndVersion(); + cv5.setDocumentId("Schema"); + cv5.setAuthor("Someone Else"); + cv5.setContent("..."); + cv5.setVersion(2L); + template.save(cv5, "jmr2"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ReactiveMapReduceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ReactiveMapReduceTests.java new file mode 100644 index 0000000000..5bd7b284f5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ReactiveMapReduceTests.java @@ -0,0 +1,218 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapreduce; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.test.StepVerifier; + +import java.util.Arrays; + +import org.bson.Document; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * @author Christoph Strobl + * @author Mark Paluch + * @author Mathieu Ouellet + * @currentRead Beyond the Shadows - Brent Weeks + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:reactive-infrastructure.xml") +public class ReactiveMapReduceTests { + + @Autowired SimpleReactiveMongoDatabaseFactory factory; + @Autowired ReactiveMongoTemplate template; + + private String mapFunction = "function(){ for ( var i=0; i { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("a", 1), new ValueObject("b", 2), + new ValueObject("c", 2), new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2027 + public void shouldStoreResultInCollection() { + + createMapReduceData(); + + template.mapReduce(new Query(), Person.class, "jmr1", ValueObject.class, mapFunction, reduceFunction, // + MapReduceOptions.options().outputCollection("mapreduceout")).as(StepVerifier::create) // + .expectNextCount(4) // + .verifyComplete(); + + template.find(new Query(), ValueObject.class, "mapreduceout").buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("a", 1), new ValueObject("b", 2), + new ValueObject("c", 2), new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void mapReduceWithInlineAndFilterQuery() { + + createMapReduceData(); + + template + .mapReduce(query(where("x").ne(new String[] { "a", "b" })), ValueObject.class, "jmr1", ValueObject.class, + mapFunction, reduceFunction, MapReduceOptions.options()) + .buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("b", 1), new ValueObject("c", 2), + new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890, DATAMONGO-2027 + public void mapReduceWithOutputCollection() { + + createMapReduceData(); + + template + .mapReduce(new Query(), ValueObject.class, "jmr1", ValueObject.class, mapFunction, reduceFunction, + MapReduceOptions.options().outputCollection("jmr1_out")) + .as(StepVerifier::create).expectNextCount(4).verifyComplete(); + + template.find(new Query(), ValueObject.class, "jmr1_out").buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("a", 1), new ValueObject("b", 2), + new ValueObject("c", 2), new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2027 + public void mapReduceWithOutputDatabase() { + + createMapReduceData(); + + template + .mapReduce(new Query(), ValueObject.class, "jmr1", ValueObject.class, mapFunction, reduceFunction, + MapReduceOptions.options().outputDatabase("reactive-jrm1-out-db").outputCollection("jmr1_out")) + .as(StepVerifier::create).expectNextCount(4).verifyComplete(); + + factory.getMongoDatabase("reactive-jrm1-out-db").flatMapMany(MongoDatabase::listCollectionNames).buffer(10) + .map(list -> list.contains("jmr1_out")).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void mapReduceWithInlineAndMappedFilterQuery() { + + createMapReduceData(); + + template + .mapReduce(query(where("values").ne(new String[] { "a", "b" })), MappedFieldsValueObject.class, "jmr1", + ValueObject.class, mapFunction, reduceFunction, MapReduceOptions.options()) + .buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("b", 1), new ValueObject("c", 2), + new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void mapReduceWithInlineFilterQueryAndExtractedCollection() { + + createMapReduceData(); + + template + .mapReduce(query(where("values").ne(new String[] { "a", "b" })), MappedFieldsValueObject.class, + ValueObject.class, mapFunction, reduceFunction, MapReduceOptions.options()) + .buffer(4).as(StepVerifier::create) // + .consumeNextWith(result -> { + assertThat(result).containsExactlyInAnyOrder(new ValueObject("b", 1), new ValueObject("c", 2), + new ValueObject("d", 1)); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1890 + public void throwsExceptionWhenTryingToLoadFunctionsFromDisk() { + + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.mapReduce(new Query(), + Person.class, "foo", ValueObject.class, "classpath:map.js", "classpath:reduce.js", MapReduceOptions.options())) + .withMessageContaining("classpath:map.js"); + } + + private void createMapReduceData() { + + factory.getMongoDatabase() + .flatMapMany(db -> db.getCollection("jmr1", Document.class) + .insertMany(Arrays.asList(new Document("x", Arrays.asList("a", "b")), + new Document("x", Arrays.asList("b", "c")), new Document("x", Arrays.asList("c", "d"))))) + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @org.springframework.data.mongodb.core.mapping.Document("jmr1") + static class MappedFieldsValueObject { + + @Field("x") String[] values; + + public String[] getValues() { + return this.values; + } + + public void setValues(String[] values) { + this.values = values; + } + + public String toString() { + return "ReactiveMapReduceTests.MappedFieldsValueObject(values=" + Arrays.deepToString(this.getValues()) + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java index b491d1246a..34753e2172 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapreduce/ValueObject.java @@ -1,17 +1,50 @@ +/* + * Copyright 2011-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core.mapreduce; +import java.util.Objects; + +/** + * @author Mark Pollack + * @author Oliver Gierke + * @author Christoph Strobl + */ public class ValueObject { private String id; - public String getId() { - return id; + private float value; + + public ValueObject() {} + + public ValueObject(String id, float value) { + this.id = id; + this.value = value; } - private float value; + public String getId() { + return this.id; + } public float getValue() { - return value; + return this.value; + } + + public void setId(String id) { + this.id = id; } public void setValue(float value) { @@ -23,4 +56,20 @@ public String toString() { return "ValueObject [id=" + id + ", value=" + value + "]"; } + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValueObject that = (ValueObject) o; + return Float.compare(that.value, value) == 0 && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTaskUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTaskUnitTests.java new file mode 100644 index 0000000000..12b57ca47d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTaskUnitTests.java @@ -0,0 +1,148 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.messaging; + +import static org.mockito.Mockito.*; + +import java.util.UUID; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +import com.mongodb.client.ChangeStreamIterable; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.changestream.ChangeStreamDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; + +/** + * Unit tests for {@link ChangeStreamTask}. + * + * @author Christoph Strobl + * @author Myroslav Kosinskyi + */ +@ExtendWith(MockitoExtension.class) +@SuppressWarnings({ "unchecked", "rawtypes" }) +class ChangeStreamTaskUnitTests { + + @Mock MongoTemplate template; + @Mock MongoDatabase mongoDatabase; + @Mock MongoCollection mongoCollection; + @Mock ChangeStreamIterable changeStreamIterable; + + MongoConverter converter; + + @BeforeEach + void setUp() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + + when(template.getConverter()).thenReturn(converter); + when(template.getDb()).thenReturn(mongoDatabase); + + when(mongoDatabase.getCollection(any())).thenReturn(mongoCollection); + when(mongoCollection.watch(eq(Document.class))).thenReturn(changeStreamIterable); + when(changeStreamIterable.fullDocument(any())).thenReturn(changeStreamIterable); + } + + @Test // DATAMONGO-2258 + void shouldNotBreakLovelaceBehavior() { + + BsonDocument resumeToken = new BsonDocument("token", new BsonString(UUID.randomUUID().toString())); + when(changeStreamIterable.resumeAfter(any())).thenReturn(changeStreamIterable); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .resumeToken(resumeToken) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).resumeAfter(resumeToken); + } + + @Test // DATAMONGO-2258 + void shouldApplyResumeAfterToChangeStream() { + + when(changeStreamIterable.resumeAfter(any())).thenReturn(changeStreamIterable); + + BsonDocument resumeToken = new BsonDocument("token", new BsonString(UUID.randomUUID().toString())); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .resumeAfter(resumeToken) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).resumeAfter(resumeToken); + } + + @Test // DATAMONGO-2258 + void shouldApplyStartAfterToChangeStream() { + + when(changeStreamIterable.startAfter(any())).thenReturn(changeStreamIterable); + + BsonDocument resumeToken = new BsonDocument("token", new BsonString(UUID.randomUUID().toString())); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .startAfter(resumeToken) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).startAfter(resumeToken); + } + + @Test // GH-4495 + void shouldApplyFullDocumentBeforeChangeToChangeStream() { + + when(changeStreamIterable.fullDocumentBeforeChange(any())).thenReturn(changeStreamIterable); + + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("start-wars") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED) // + .publishTo(message -> {}) // + .build(); + + initTask(request, Document.class); + + verify(changeStreamIterable).fullDocumentBeforeChange(FullDocumentBeforeChange.REQUIRED); + } + + private MongoCursor> initTask(ChangeStreamRequest request, Class targetType) { + + ChangeStreamTask task = new ChangeStreamTask(template, request, targetType, er -> {}); + return task.initCursor(template, request.getRequestOptions(), targetType); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java index 9408dbc219..53d093897e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/ChangeStreamTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,9 +21,10 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - +import java.time.Duration; +import java.time.Instant; import java.util.List; +import java.util.Objects; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; @@ -31,27 +32,33 @@ import org.bson.BsonDocument; import org.bson.Document; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.TestRule; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.RepeatFailedTest; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.ChangeStreamOptions; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.CollectionOptions; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; +import org.springframework.data.mongodb.core.messaging.ChangeStreamTask.ChangeStreamEventMessage; import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; -import org.springframework.data.mongodb.core.messaging.SubscriptionUtils.*; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.test.util.ReplicaSet; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.MongoVersion; +import org.springframework.data.mongodb.test.util.Template; -import com.mongodb.MongoClient; import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; /** * Integration test for subscribing to a {@link com.mongodb.operation.ChangeStreamBatchCursor} inside the @@ -59,28 +66,31 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi */ -public class ChangeStreamTests { +@ExtendWith({ MongoTemplateExtension.class }) +@EnableIfReplicaSetAvailable +class ChangeStreamTests { + + private static ThreadPoolExecutor executor; - public static @ClassRule TestRule replSet = ReplicaSet.required(); + @Template(initialEntitySet = User.class, replicaSet = true) // + private static MongoTestTemplate template; - static ThreadPoolExecutor executor; - MongoTemplate template; - MessageListenerContainer container; + private MessageListenerContainer container; - User jellyBelly; - User huffyFluffy; - User sugarSplashy; + private User jellyBelly; + private User huffyFluffy; + private User sugarSplashy; - @BeforeClass - public static void beforeClass() { + @BeforeAll + static void beforeClass() { executor = new ThreadPoolExecutor(2, 2, 1, TimeUnit.SECONDS, new LinkedBlockingDeque<>()); } - @Before - public void setUp() { + @BeforeEach + void setUp() { - template = new MongoTemplate(new MongoClient(), "change-stream-tests"); template.dropCollection(User.class); container = new DefaultMessageListenerContainer(template, executor); @@ -102,21 +112,22 @@ public void setUp() { sugarSplashy.age = 5; } - @After - public void tearDown() { + @AfterEach + void tearDown() { container.stop(); } - @AfterClass - public static void afterClass() { + @AfterAll + static void afterClass() { executor.shutdown(); } @Test // DATAMONGO-1803 - public void readsPlainDocumentMessageCorrectly() throws InterruptedException { + void readsPlainDocumentMessageCorrectly() throws InterruptedException { CollectingMessageListener, Document> messageListener = new CollectingMessageListener<>(); - ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "user"); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); Subscription subscription = container.register(request, Document.class); awaitSubscription(subscription); @@ -135,12 +146,13 @@ public void readsPlainDocumentMessageCorrectly() throws InterruptedException { } @Test // DATAMONGO-1803 - public void useSimpleAggregationToFilterMessages() throws InterruptedException { + void useSimpleAggregationToFilterMessages() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // .collection("user") // .filter(newAggregation(match(where("age").is(7)))) // + .maxAwaitTime(Duration.ofMillis(10)) // .build(); Subscription subscription = container.register(request, User.class); @@ -159,13 +171,15 @@ public void useSimpleAggregationToFilterMessages() throws InterruptedException { } @Test // DATAMONGO-1803 - public void useAggregationToFilterMessages() throws InterruptedException { + @MongoVersion(asOf = "4.0") + void useAggregationToFilterMessages() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // .collection("user") // .filter(newAggregation(match( new Criteria().orOperator(where("user_name").is("huffyFluffy"), where("user_name").is("jellyBelly"))))) // + .maxAwaitTime(Duration.ofMillis(10)) // .build(); Subscription subscription = container.register(request, User.class); @@ -183,8 +197,8 @@ public void useAggregationToFilterMessages() throws InterruptedException { assertThat(messageBodies).hasSize(2).doesNotContain(sugarSplashy); } - @Test // DATAMONGO-1803 - public void mapsTypedAggregationToFilterMessages() throws InterruptedException { + @RepeatFailedTest(3) // DATAMONGO-1803 + void mapsTypedAggregationToFilterMessages() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); ChangeStreamRequest request = ChangeStreamRequest.builder() // @@ -192,6 +206,7 @@ public void mapsTypedAggregationToFilterMessages() throws InterruptedException { .publishTo(messageListener) // .filter(newAggregation(User.class, match(new Criteria().orOperator(where("userName").is("huffyFluffy"), where("userName").is("jellyBelly"))))) // + .maxAwaitTime(Duration.ofMillis(10)) // .build(); Subscription subscription = container.register(request, User.class); @@ -201,7 +216,7 @@ public void mapsTypedAggregationToFilterMessages() throws InterruptedException { template.save(sugarSplashy); template.save(huffyFluffy); - awaitMessages(messageListener); + awaitMessages(messageListener, 2); List messageBodies = messageListener.getMessages().stream().map(Message::getBody) .collect(Collectors.toList()); @@ -210,13 +225,14 @@ public void mapsTypedAggregationToFilterMessages() throws InterruptedException { } @Test // DATAMONGO-1803 - public void mapsReservedWordsCorrectly() throws InterruptedException { + void mapsReservedWordsCorrectly() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); ChangeStreamRequest request = ChangeStreamRequest.builder() // .collection("user") // .publishTo(messageListener) // .filter(newAggregation(User.class, match(where("operationType").is("replace")))) // + .maxAwaitTime(Duration.ofMillis(10)) // .build(); Subscription subscription = container.register(request, User.class); @@ -232,7 +248,7 @@ public void mapsReservedWordsCorrectly() throws InterruptedException { template.save(replacement); - awaitMessages(messageListener); + awaitMessages(messageListener, 1); List messageBodies = messageListener.getMessages().stream().map(Message::getBody) .collect(Collectors.toList()); @@ -241,13 +257,14 @@ public void mapsReservedWordsCorrectly() throws InterruptedException { } @Test // DATAMONGO-1803 - public void plainAggregationPipelineToFilterMessages() throws InterruptedException { + void plainAggregationPipelineToFilterMessages() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); ChangeStreamRequest request = ChangeStreamRequest.builder() // .collection("user") // .publishTo(messageListener) // .filter(new Document("$match", new Document("fullDocument.user_name", "sugarSplashy"))) // + .maxAwaitTime(Duration.ofMillis(10)) // .build(); Subscription subscription = container.register(request, User.class); @@ -257,7 +274,7 @@ public void plainAggregationPipelineToFilterMessages() throws InterruptedExcepti template.save(sugarSplashy); template.save(huffyFluffy); - awaitMessages(messageListener); + awaitMessages(messageListener, 1); List messageBodies = messageListener.getMessages().stream().map(Message::getBody) .collect(Collectors.toList()); @@ -266,10 +283,12 @@ public void plainAggregationPipelineToFilterMessages() throws InterruptedExcepti } @Test // DATAMONGO-1803 - public void resumesCorrectly() throws InterruptedException { + void resumesCorrectly() throws InterruptedException { CollectingMessageListener, User> messageListener1 = new CollectingMessageListener<>(); - Subscription subscription1 = container.register(new ChangeStreamRequest<>(messageListener1, () -> "user"), + Subscription subscription1 = container.register( + new ChangeStreamRequest<>(messageListener1, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())), User.class); awaitSubscription(subscription1); @@ -284,12 +303,12 @@ public void resumesCorrectly() throws InterruptedException { CollectingMessageListener, User> messageListener2 = new CollectingMessageListener<>(); ChangeStreamRequest subSequentRequest = ChangeStreamRequest.builder().collection("user") - .publishTo(messageListener2).resumeToken(resumeToken).build(); + .publishTo(messageListener2).resumeToken(resumeToken).maxAwaitTime(Duration.ofMillis(10)).build(); Subscription subscription2 = container.register(subSequentRequest, User.class); awaitSubscription(subscription2); - awaitMessages(messageListener2); + awaitMessages(messageListener2, 2); List messageBodies = messageListener2.getMessages().stream().map(Message::getBody) .collect(Collectors.toList()); @@ -298,10 +317,11 @@ public void resumesCorrectly() throws InterruptedException { } @Test // DATAMONGO-1803 - public void readsAndConvertsMessageBodyCorrectly() throws InterruptedException { + void readsAndConvertsMessageBodyCorrectly() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); - ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "user"); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); Subscription subscription = container.register(request, User.class); awaitSubscription(subscription); @@ -319,10 +339,11 @@ public void readsAndConvertsMessageBodyCorrectly() throws InterruptedException { } @Test // DATAMONGO-1803 - public void readsAndConvertsUpdateMessageBodyCorrectly() throws InterruptedException { + void readsAndConvertsUpdateMessageBodyCorrectly() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); - ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "user"); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); Subscription subscription = container.register(request, User.class); awaitSubscription(subscription); @@ -338,10 +359,11 @@ public void readsAndConvertsUpdateMessageBodyCorrectly() throws InterruptedExcep } @Test // DATAMONGO-1803 - public void readsOnlyDiffForUpdateWhenNotMappedToDomainType() throws InterruptedException { + void readsOnlyDiffForUpdateWhenNotMappedToDomainType() throws InterruptedException { CollectingMessageListener, Document> messageListener = new CollectingMessageListener<>(); - ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, () -> "user"); + ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())); Subscription subscription = container.register(request, Document.class); awaitSubscription(subscription); @@ -358,11 +380,14 @@ public void readsOnlyDiffForUpdateWhenNotMappedToDomainType() throws Interrupted } @Test // DATAMONGO-1803 - public void readsOnlyDiffForUpdateWhenOptionsDeclareDefaultExplicitly() throws InterruptedException { + void readsOnlyDiffForUpdateWhenOptionsDeclareDefaultExplicitly() throws InterruptedException { CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); - ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, new ChangeStreamRequestOptions( - "user", ChangeStreamOptions.builder().fullDocumentLookup(FullDocument.DEFAULT).build())); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.DEFAULT) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); Subscription subscription = container.register(request, User.class); awaitSubscription(subscription); @@ -378,11 +403,15 @@ public void readsOnlyDiffForUpdateWhenOptionsDeclareDefaultExplicitly() throws I } @Test // DATAMONGO-1803 - public void readsFullDocumentForUpdateWhenNotMappedToDomainTypeButLookupSpecified() throws InterruptedException { + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + void readsFullDocumentForUpdateWhenNotMappedToDomainTypeButLookupSpecified() throws InterruptedException { CollectingMessageListener, Document> messageListener = new CollectingMessageListener<>(); - ChangeStreamRequest request = new ChangeStreamRequest<>(messageListener, - new ChangeStreamRequestOptions("user", ChangeStreamOptions.builder().returnFullDocumentOnUpdate().build())); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.UPDATE_LOOKUP) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); Subscription subscription = container.register(request, Document.class); awaitSubscription(subscription); @@ -399,11 +428,414 @@ public void readsFullDocumentForUpdateWhenNotMappedToDomainTypeButLookupSpecifie .append("user_name", "jellyBelly").append("age", 8).append("_class", User.class.getName())); } - @Data + @Test // DATAMONGO-2012, DATAMONGO-2113 + @MongoVersion(asOf = "4.0") + void resumeAtTimestampCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener1 = new CollectingMessageListener<>(); + Subscription subscription1 = container.register( + new ChangeStreamRequest<>(messageListener1, + new ChangeStreamRequestOptions(null, "user", Duration.ofMillis(10), ChangeStreamOptions.builder().build())), + User.class); + + awaitSubscription(subscription1); + + template.save(jellyBelly); + + Thread.sleep(1000); // cluster timestamp is in seconds, so we need to wait at least one. + + template.save(sugarSplashy); + + awaitMessages(messageListener1, 12); + + Instant resumeAt = ((ChangeStreamEventMessage) messageListener1.getLastMessage()).getTimestamp(); + + template.save(huffyFluffy); + + awaitMessages(messageListener1, 3); + + CollectingMessageListener, User> messageListener2 = new CollectingMessageListener<>(); + ChangeStreamRequest subSequentRequest = ChangeStreamRequest.builder() // + .collection("user") // + .resumeAt(resumeAt) // + .publishTo(messageListener2) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription2 = container.register(subSequentRequest, User.class); + awaitSubscription(subscription2); + + awaitMessages(messageListener2, 2); + + List messageBodies = messageListener2.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2).doesNotContain(jellyBelly); + } + + @Test // DATAMONGO-1996 + void filterOnNestedElementWorksCorrectly() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // + .collection("user") // + .filter(newAggregation(User.class, match(where("address.street").is("flower street")))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + jellyBelly.address = new Address(); + jellyBelly.address.street = "candy ave"; + + huffyFluffy.address = new Address(); + huffyFluffy.address.street = "flower street"; + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + awaitMessages(messageListener); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(1).contains(huffyFluffy); + } + + @Test // DATAMONGO-1996 + void filterOnUpdateDescriptionElement() throws InterruptedException { + + template.save(jellyBelly); + template.save(sugarSplashy); + template.save(huffyFluffy); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder(messageListener) // + .collection("user") // + .filter(newAggregation(User.class, match(where("updateDescription.updatedFields.address").exists(true)))) // + .maxAwaitTime(Duration.ofMillis(10)) // + .fullDocumentLookup(FullDocument.UPDATE_LOOKUP).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))) + .apply(Update.update("address", new Address("candy ave"))).first(); + + template.update(User.class).matching(query(where("id").is(sugarSplashy.id))).apply(new Update().inc("age", 1)) + .first(); + + template.update(User.class).matching(query(where("id").is(huffyFluffy.id))) + .apply(Update.update("address", new Address("flower street"))).first(); + + awaitMessages(messageListener); + + List messageBodies = messageListener.getMessages().stream().map(Message::getBody) + .collect(Collectors.toList()); + + assertThat(messageBodies).hasSize(2); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredWhenAvailable() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.WHEN_AVAILABLE) // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isEqualTo(jellyBelly); + assertThat(messageListener.getLastMessage().getBody()).isEqualTo(jellyBelly.withAge(8)); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredRequired() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentLookup(FullDocument.WHEN_AVAILABLE) // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED) // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getFirstMessage().getBody()).isEqualTo(jellyBelly); + + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isEqualTo(jellyBelly); + assertThat(messageListener.getLastMessage().getBody()).isEqualTo(jellyBelly.withAge(8)); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionIsNotDeclared() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredDefault() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.DEFAULT).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredOff() throws InterruptedException { + + createUserCollectionWithChangeStreamPreAndPostImagesEnabled(); + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.OFF).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @EnableIfMongoServerVersion(isGreaterThanEqual = "6.0") + void readsFullDocumentBeforeChangeWhenOptionDeclaredWhenAvailableAndChangeStreamPreAndPostImagesDisabled() + throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + @Test // GH-4187 + @Disabled("Flakey test failing occasionally due to timing issues") + void readsFullDocumentBeforeChangeWhenOptionDeclaredRequiredAndMongoVersionIsLessThan6() throws InterruptedException { + + CollectingMessageListener, User> messageListener = new CollectingMessageListener<>(); + ChangeStreamRequest request = ChangeStreamRequest.builder() // + .collection("user") // + .fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.REQUIRED).maxAwaitTime(Duration.ofMillis(10)) // + .publishTo(messageListener).build(); + + Subscription subscription = container.register(request, User.class); + awaitSubscription(subscription); + + template.save(jellyBelly); + + template.update(User.class).matching(query(where("id").is(jellyBelly.id))).apply(Update.update("age", 8)).first(); + + awaitMessages(messageListener, 2); + + assertThat(messageListener.getFirstMessage().getBodyBeforeChange()).isNull(); + assertThat(messageListener.getLastMessage().getBodyBeforeChange()).isNull(); + } + + private void createUserCollectionWithChangeStreamPreAndPostImagesEnabled() { + template.createCollection(User.class, CollectionOptions.emitChangedRevisions()); + } + static class User { @Id String id; @Field("user_name") String userName; int age; + + Address address; + + User withAge(int age) { + + User user = new User(); + user.id = id; + user.userName = userName; + user.age = age; + + return user; + } + + public String getId() { + return this.id; + } + + public String getUserName() { + return this.userName; + } + + public int getAge() { + return this.age; + } + + public Address getAddress() { + return this.address; + } + + public void setId(String id) { + this.id = id; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public void setAge(int age) { + this.age = age; + } + + public void setAddress(Address address) { + this.address = address; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + User user = (User) o; + return age == user.age && Objects.equals(id, user.id) && Objects.equals(userName, user.userName) + && Objects.equals(address, user.address); + } + + @Override + public int hashCode() { + return Objects.hash(id, userName, age, address); + } + + public String toString() { + return "ChangeStreamTests.User(id=" + this.getId() + ", userName=" + this.getUserName() + ", age=" + this.getAge() + + ", address=" + this.getAddress() + ")"; + } + } + + static class Address { + + @Field("s") String street; + + public Address(String street) { + this.street = street; + } + + public Address() {} + + public String getStreet() { + return this.street; + } + + public void setStreet(String street) { + this.street = street; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Address address = (Address) o; + return Objects.equals(street, address.street); + } + + @Override + public int hashCode() { + return Objects.hash(street); + } + + public String toString() { + return "ChangeStreamTests.Address(street=" + this.getStreet() + ")"; + } } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java index a36b0cf6a7..5e9acbdcda 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/CursorReadingTaskUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,11 +24,16 @@ import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; import org.springframework.data.mongodb.core.messaging.Task.State; @@ -44,7 +49,8 @@ * * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class CursorReadingTaskUnitTests { @Mock MongoDatabase db; @@ -57,13 +63,14 @@ public class CursorReadingTaskUnitTests { ValueCapturingTaskStub task; - @Before + @BeforeEach public void setUp() { when(request.getRequestOptions()).thenReturn(options); when(request.getMessageListener()).thenReturn(listener); when(options.getCollectionName()).thenReturn("collection-name"); when(template.getDb()).thenReturn(db); + when(template.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); when(db.getName()).thenReturn("mock-db"); task = new ValueCapturingTaskStub(template, request, Object.class, cursor, errorHandler); @@ -93,6 +100,29 @@ public void stopTaskWhileEmittingMessages() throws Throwable { verify(listener, times(task.getValues().size())).onMessage(any()); } + @Test // DATAMONGO-2173, DATAMONGO-2366 + public void writesErrorOnStartToErrorHandler() { + + ArgumentCaptor errorCaptor = ArgumentCaptor.forClass(Throwable.class); + Task task = new ErrorOnInitCursorTaskStub(template, request, Object.class, errorHandler); + + task.run(); + verify(errorHandler).handleError(errorCaptor.capture()); + assertThat(errorCaptor.getValue()).hasMessageStartingWith("let's get it started (ha)"); + } + + @Test // DATAMONGO-2366 + public void errorOnNextNotifiesErrorHandlerOnlyOnce() { + + ArgumentCaptor errorCaptor = ArgumentCaptor.forClass(Throwable.class); + when(cursor.getServerCursor()).thenReturn(new ServerCursor(10, new ServerAddress("mock"))); + when(cursor.tryNext()).thenThrow(new IllegalStateException()); + + task.run(); + verify(errorHandler).handleError(errorCaptor.capture()); + assertThat(errorCaptor.getValue()).isInstanceOf(IllegalStateException.class); + } + private static class MultithreadedStopRunningWhileEmittingMessages extends MultithreadedTestCase { CursorReadingTask task; @@ -222,4 +252,17 @@ public List getValues() { return values; } } + + static class ErrorOnInitCursorTaskStub extends CursorReadingTask { + + public ErrorOnInitCursorTaskStub(MongoTemplate template, SubscriptionRequest request, Class targetType, + ErrorHandler errorHandler) { + super(template, request, targetType, errorHandler); + } + + @Override + protected MongoCursor initCursor(MongoTemplate template, RequestOptions options, Class targetType) { + throw new RuntimeException("let's get it started (ha), let's get it started in here..."); + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java index 4ed2423426..9373845a89 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,31 +16,37 @@ package org.springframework.data.mongodb.core.messaging; import static org.assertj.core.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.messaging.SubscriptionUtils.*; -import lombok.Data; - import java.time.Duration; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import org.bson.Document; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.dao.DataAccessException; import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.ChangeStreamOptions; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.data.mongodb.test.util.ReplicaSet; -import org.springframework.test.annotation.IfProfileValue; +import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; +import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoServerCondition; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.Template; import org.springframework.util.ErrorHandler; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.CreateCollectionOptions; import com.mongodb.client.model.changestream.ChangeStreamDocument; @@ -50,66 +56,105 @@ * * @author Christoph Strobl */ +@ExtendWith({ MongoTemplateExtension.class, MongoServerCondition.class }) public class DefaultMessageListenerContainerTests { - public static final String DATABASE_NAME = "change-stream-events"; - public static final String COLLECTION_NAME = "collection-1"; - MongoDbFactory dbFactory; + static final String DATABASE_NAME = "change-stream-events"; + static final String COLLECTION_NAME = "collection-1"; + static final String COLLECTION_2_NAME = "collection-2"; + static final String COLLECTION_3_NAME = "collection-3"; - MongoCollection collection; - private CollectingMessageListener messageListener; - private MongoTemplate template; + static final Duration TIMEOUT = Duration.ofSeconds(2); + + @Client static MongoClient client; + + @Template(database = DATABASE_NAME, initialEntitySet = Person.class) // + static MongoTemplate template; - public @Rule TestRule replSet = ReplicaSet.none(); + MongoDatabaseFactory dbFactory = template.getMongoDatabaseFactory(); - @Before - public void setUp() { + MongoCollection collection = template.getCollection(COLLECTION_NAME); + MongoCollection collection2 = template.getCollection(COLLECTION_2_NAME); - dbFactory = new SimpleMongoDbFactory(new MongoClient(), DATABASE_NAME); - template = new MongoTemplate(dbFactory); + private CollectingMessageListener messageListener; + + @BeforeEach + void beforeEach() throws InterruptedException { - template.dropCollection(COLLECTION_NAME); - collection = template.getCollection(COLLECTION_NAME); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_2_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_3_NAME, client); + + Thread.sleep(100); messageListener = new CollectingMessageListener<>(); } @Test // DATAMONGO-1803 - @IfProfileValue(name = "replSet", value = "true") + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") public void shouldCollectMappedChangeStreamMessagesCorrectly() throws InterruptedException { MessageListenerContainer container = new DefaultMessageListenerContainer(template); - Subscription subscription = container.register(new ChangeStreamRequest(messageListener, () -> COLLECTION_NAME), - Person.class); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Person.class); container.start(); - awaitSubscription(subscription, Duration.ofMillis(500)); + awaitSubscription(subscription, TIMEOUT); collection.insertOne(new Document("_id", "id-1").append("firstname", "foo")); collection.insertOne(new Document("_id", "id-2").append("firstname", "bar")); - awaitMessages(messageListener, 2, Duration.ofMillis(500)); + awaitMessages(messageListener, 2, TIMEOUT); assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) .containsExactly(new Person("id-1", "foo"), new Person("id-2", "bar")); + } + + @Test // DATAMONGO-2322 + @EnableIfReplicaSetAvailable + public void shouldNotifyErrorHandlerOnErrorInListener() throws InterruptedException { + + ErrorHandler errorHandler = mock(ErrorHandler.class); + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + AtomicBoolean thrownException = new AtomicBoolean(); + Subscription subscription = container.register(new ChangeStreamRequest(message -> { + + try { + if (thrownException.compareAndSet(false, true)) { + throw new IllegalStateException("Boom"); + } + } finally { + messageListener.onMessage(message); + } + + }, options()), Person.class, errorHandler); + container.start(); + + awaitSubscription(subscription, TIMEOUT); + + collection.insertOne(new Document("_id", "id-1").append("firstname", "foo")); + collection.insertOne(new Document("_id", "id-2").append("firstname", "bar")); + awaitMessages(messageListener, 2, TIMEOUT); + + verify(errorHandler, atLeast(1)).handleError(any(IllegalStateException.class)); + assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); } @Test // DATAMONGO-1803 - @IfProfileValue(name = "replSet", value = "true") - public void shouldNoLongerReceiveMessagesWhenConainerStopped() throws InterruptedException { + @EnableIfReplicaSetAvailable + public void shouldNoLongerReceiveMessagesWhenContainerStopped() throws InterruptedException { MessageListenerContainer container = new DefaultMessageListenerContainer(template); - Subscription subscription = container.register(new ChangeStreamRequest(messageListener, () -> COLLECTION_NAME), - Document.class); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Document.class); container.start(); - awaitSubscription(subscription, Duration.ofMillis(500)); + awaitSubscription(subscription, TIMEOUT); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); collection.insertOne(new Document("_id", "id-2").append("value", "bar")); - awaitMessages(messageListener, 2, Duration.ofMillis(500)); + awaitMessages(messageListener, 2, TIMEOUT); container.stop(); @@ -121,7 +166,7 @@ public void shouldNoLongerReceiveMessagesWhenConainerStopped() throws Interrupte } @Test // DATAMONGO-1803 - @IfProfileValue(name = "replSet", value = "true") + @EnableIfReplicaSetAvailable public void shouldReceiveMessagesWhenAddingRequestToAlreadyStartedContainer() throws InterruptedException { MessageListenerContainer container = new DefaultMessageListenerContainer(template); @@ -130,15 +175,14 @@ public void shouldReceiveMessagesWhenAddingRequestToAlreadyStartedContainer() th Document unexpected = new Document("_id", "id-1").append("value", "foo"); collection.insertOne(unexpected); - Subscription subscription = container.register(new ChangeStreamRequest(messageListener, () -> COLLECTION_NAME), - Document.class); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Document.class); - awaitSubscription(subscription, Duration.ofMillis(500)); + awaitSubscription(subscription, TIMEOUT); Document expected = new Document("_id", "id-2").append("value", "bar"); collection.insertOne(expected); - awaitMessages(messageListener, 1, Duration.ofMillis(500)); + awaitMessages(messageListener, 1, TIMEOUT); container.stop(); assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) @@ -146,12 +190,11 @@ public void shouldReceiveMessagesWhenAddingRequestToAlreadyStartedContainer() th } @Test // DATAMONGO-1803 - @IfProfileValue(name = "replSet", value = "true") + @EnableIfReplicaSetAvailable public void shouldStartReceivingMessagesWhenContainerStarts() throws InterruptedException { MessageListenerContainer container = new DefaultMessageListenerContainer(template); - Subscription subscription = container.register(new ChangeStreamRequest(messageListener, () -> COLLECTION_NAME), - Document.class); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, options()), Document.class); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); @@ -175,7 +218,7 @@ public void shouldStartReceivingMessagesWhenContainerStarts() throws Interrupted @Test // DATAMONGO-1803 public void tailableCursor() throws InterruptedException { - dbFactory.getDb().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); @@ -183,13 +226,12 @@ public void tailableCursor() throws InterruptedException { MessageListenerContainer container = new DefaultMessageListenerContainer(template); container.start(); - awaitSubscription( - container.register(new TailableCursorRequest(messageListener, () -> COLLECTION_NAME), Document.class), - Duration.ofMillis(500)); + awaitSubscription(container.register(new TailableCursorRequest(messageListener, options()), Document.class), + TIMEOUT); collection.insertOne(new Document("_id", "id-2").append("value", "bar")); - awaitMessages(messageListener, 2, Duration.ofSeconds(2)); + awaitMessages(messageListener, 2, TIMEOUT); container.stop(); assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); @@ -198,20 +240,19 @@ public void tailableCursor() throws InterruptedException { @Test // DATAMONGO-1803 public void tailableCursorOnEmptyCollection() throws InterruptedException { - dbFactory.getDb().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); MessageListenerContainer container = new DefaultMessageListenerContainer(template); container.start(); - awaitSubscription( - container.register(new TailableCursorRequest(messageListener, () -> COLLECTION_NAME), Document.class), - Duration.ofMillis(500)); + awaitSubscription(container.register(new TailableCursorRequest(messageListener, options()), Document.class), + TIMEOUT); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); collection.insertOne(new Document("_id", "id-2").append("value", "bar")); - awaitMessages(messageListener, 2, Duration.ofSeconds(2)); + awaitMessages(messageListener, 2, TIMEOUT); container.stop(); assertThat(messageListener.getTotalNumberMessagesReceived()).isEqualTo(2); @@ -220,7 +261,7 @@ public void tailableCursorOnEmptyCollection() throws InterruptedException { @Test // DATAMONGO-1803 public void abortsSubscriptionOnError() throws InterruptedException { - dbFactory.getDb().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); MessageListenerContainer container = new DefaultMessageListenerContainer(template); @@ -228,7 +269,7 @@ public void abortsSubscriptionOnError() throws InterruptedException { collection.insertOne(new Document("_id", "id-1").append("value", "foo")); - Subscription subscription = container.register(new TailableCursorRequest(messageListener, () -> COLLECTION_NAME), + Subscription subscription = container.register(new TailableCursorRequest(messageListener, options()), Document.class); awaitSubscription(subscription); @@ -248,7 +289,7 @@ public void abortsSubscriptionOnError() throws InterruptedException { @Test // DATAMONGO-1803 public void callsDefaultErrorHandlerOnError() throws InterruptedException { - dbFactory.getDb().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_3_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); @@ -261,14 +302,11 @@ public void callsDefaultErrorHandlerOnError() throws InterruptedException { try { container.start(); - Subscription subscription = container.register(new TailableCursorRequest(messageListener, () -> COLLECTION_NAME), + Subscription subscription = container.register(new TailableCursorRequest(messageListener, options()), Document.class); SubscriptionUtils.awaitSubscription(subscription); - - template.dropCollection(COLLECTION_NAME); - - Thread.sleep(20); + dbFactory.getMongoDatabase().drop(); verify(errorHandler, atLeast(1)).handleError(any(DataAccessException.class)); } finally { @@ -277,22 +315,22 @@ public void callsDefaultErrorHandlerOnError() throws InterruptedException { } @Test // DATAMONGO-1803 - @IfProfileValue(name = "replSet", value = "true") + @EnableIfReplicaSetAvailable public void runsMoreThanOneTaskAtOnce() throws InterruptedException { - dbFactory.getDb().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); MessageListenerContainer container = new DefaultMessageListenerContainer(template); container.start(); CollectingMessageListener tailableListener = new CollectingMessageListener<>(); - Subscription tailableSubscription = container - .register(new TailableCursorRequest(tailableListener, () -> COLLECTION_NAME), Document.class); + Subscription tailableSubscription = container.register(new TailableCursorRequest(tailableListener, options()), + Document.class); CollectingMessageListener, Document> changeStreamListener = new CollectingMessageListener<>(); - Subscription changeStreamSubscription = container - .register(new ChangeStreamRequest(changeStreamListener, () -> COLLECTION_NAME), Document.class); + Subscription changeStreamSubscription = container.register(new ChangeStreamRequest(changeStreamListener, options()), + Document.class); awaitSubscriptions(tailableSubscription, changeStreamSubscription); @@ -308,8 +346,34 @@ public void runsMoreThanOneTaskAtOnce() throws InterruptedException { assertThat(changeStreamListener.getFirstMessage().getRaw()).isInstanceOf(ChangeStreamDocument.class); } - @Data + @Test // DATAMONGO-2012 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void databaseLevelWatch() throws InterruptedException { + + MessageListenerContainer container = new DefaultMessageListenerContainer(template); + Subscription subscription = container.register(new ChangeStreamRequest(messageListener, RequestOptions.none()), + Person.class); + + container.start(); + + awaitSubscription(subscription, TIMEOUT); + + collection.insertOne(new Document("_id", "col-1-id-1").append("firstname", "foo")); + collection.insertOne(new Document("_id", "col-1-id-2").append("firstname", "bar")); + + collection2.insertOne(new Document("_id", "col-2-id-1").append("firstname", "bar")); + collection2.insertOne(new Document("_id", "col-2-id-2").append("firstname", "foo")); + + awaitMessages(messageListener, 4, TIMEOUT); + + assertThat(messageListener.getMessages().stream().map(Message::getBody).collect(Collectors.toList())) + .containsExactly(new Person("col-1-id-1", "foo"), new Person("col-1-id-2", "bar"), + new Person("col-2-id-1", "bar"), new Person("col-2-id-2", "foo")); + } + static class Person { + @Id String id; private String firstname; private String lastname; @@ -320,5 +384,57 @@ public Person(String id, String firstname) { this.id = id; this.firstname = firstname; } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(firstname, person.firstname) + && Objects.equals(lastname, person.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname); + } + + public String toString() { + return "DefaultMessageListenerContainerTests.Person(id=" + this.getId() + ", firstname=" + this.getFirstname() + + ", lastname=" + this.getLastname() + ")"; + } + } + + static ChangeStreamRequestOptions options() { + return new ChangeStreamRequestOptions(DATABASE_NAME, COLLECTION_NAME, Duration.ofMillis(10), + ChangeStreamOptions.builder().build()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java index 6cc596a2c7..4df47b1c51 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,11 +22,12 @@ import java.time.Duration; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.util.ErrorHandler; @@ -36,46 +37,46 @@ * * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class DefaultMessageListenerContainerUnitTests { +@ExtendWith(MockitoExtension.class) +class DefaultMessageListenerContainerUnitTests { @Mock MongoTemplate template; @Mock ErrorHandler errorHandler; - DefaultMessageListenerContainer container; + private DefaultMessageListenerContainer container; - @Before - public void setUp() { + @BeforeEach + void setUp() { container = new DefaultMessageListenerContainer(template); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1803 - public void throwsErrorOnNullTemplate() { - new DefaultMessageListenerContainer(null); + @Test // DATAMONGO-1803 + void throwsErrorOnNullTemplate() { + assertThatIllegalArgumentException().isThrownBy(() -> new DefaultMessageListenerContainer(null)); } @Test // DATAMONGO-1803 - public void startStopContainer() throws Throwable { + void startStopContainer() throws Throwable { runOnce(new MultithreadedStartStopContainer(container)); } @Test // DATAMONGO-1803 - public void subscribeToContainerBeforeStartup() throws Throwable { + void subscribeToContainerBeforeStartup() throws Throwable { runOnce(new MultithreadedSubscribeBeforeStartup(container)); } @Test // DATAMONGO-1803 - public void subscribeToContainerAfterStartup() throws Throwable { + void subscribeToContainerAfterStartup() throws Throwable { runOnce(new MultithreadedSubscribeAfterStartup(container)); } @Test // DATAMONGO-1803 - public void stopSubscriptionWhileRunning() throws Throwable { + void stopSubscriptionWhileRunning() throws Throwable { runOnce(new StopSubscriptionWhileRunning(container)); } @Test // DATAMONGO-1803 - public void removeSubscriptionWhileRunning() throws Throwable { + void removeSubscriptionWhileRunning() throws Throwable { runOnce(new RemoveSubscriptionWhileRunning(container)); } @@ -84,7 +85,7 @@ private static class RemoveSubscriptionWhileRunning extends MultithreadedTestCas DefaultMessageListenerContainer container; Subscription subscription; - public RemoveSubscriptionWhileRunning(DefaultMessageListenerContainer container) { + RemoveSubscriptionWhileRunning(DefaultMessageListenerContainer container) { this.container = container; subscription = container.register(new MockSubscriptionRequest(), new MockTask()); } @@ -114,7 +115,7 @@ private static class StopSubscriptionWhileRunning extends MultithreadedTestCase DefaultMessageListenerContainer container; Subscription subscription; - public StopSubscriptionWhileRunning(DefaultMessageListenerContainer container) { + StopSubscriptionWhileRunning(DefaultMessageListenerContainer container) { this.container = container; subscription = container.register(new MockSubscriptionRequest(), new MockTask()); } @@ -144,7 +145,7 @@ private static class MultithreadedSubscribeAfterStartup extends MultithreadedTes DefaultMessageListenerContainer container; - public MultithreadedSubscribeAfterStartup(DefaultMessageListenerContainer container) { + MultithreadedSubscribeAfterStartup(DefaultMessageListenerContainer container) { this.container = container; } @@ -174,7 +175,7 @@ private static class MultithreadedSubscribeBeforeStartup extends MultithreadedTe DefaultMessageListenerContainer container; - public MultithreadedSubscribeBeforeStartup(DefaultMessageListenerContainer container) { + MultithreadedSubscribeBeforeStartup(DefaultMessageListenerContainer container) { this.container = container; } @@ -207,7 +208,7 @@ private static class MultithreadedStartStopContainer extends MultithreadedTestCa DefaultMessageListenerContainer container; - public MultithreadedStartStopContainer(DefaultMessageListenerContainer container) { + MultithreadedStartStopContainer(DefaultMessageListenerContainer container) { this.container = container; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java index 13e653d40c..7cfe859e8e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/SubscriptionUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,10 +25,11 @@ * Utilities for testing long running asnyc message retrieval. * * @author Christoph Strobl + * @author Mark Paluch */ class SubscriptionUtils { - static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(1); + static final Duration DEFAULT_TIMEOUT = Duration.ofMillis(1500); /** * Wait for {@link Subscription#isActive() to become active} but not longer than {@link #DEFAULT_TIMEOUT}. @@ -44,7 +45,7 @@ static void awaitSubscription(Subscription subscription) throws InterruptedExcep * Wait for all {@link Subscription Subscriptions} to {@link Subscription#isActive() become active} but not longer * than {@link #DEFAULT_TIMEOUT}. * - * @param subscription + * @param subscriptions * @throws InterruptedException */ static void awaitSubscriptions(Subscription... subscriptions) throws InterruptedException { @@ -130,7 +131,8 @@ static void awaitMessages(CollectingMessageListener listener, int nrMessages, Du /** * {@link MessageListener} implementation collecting received {@link Message messages}. * - * @param + * @param source message type. + * @param target message type. */ static class CollectingMessageListener implements MessageListener { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java index 4c14a5b6f3..f9d4c71eda 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorRequestUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,8 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.messaging.DefaultMessageListenerContainerTests.Person; import org.springframework.data.mongodb.core.query.Query; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java index 3a7b90ca15..60d9153212 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TailableCursorTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,26 +20,28 @@ import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.test.util.Assertions.*; -import lombok.Data; - +import java.util.Objects; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.bson.Document; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.CollectionOptions; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.messaging.Message.MessageProperties; import org.springframework.data.mongodb.core.messaging.TailableCursorRequest.TailableCursorRequestOptions; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * Integration test for subscribing to a capped {@link com.mongodb.client.MongoCollection} inside the @@ -48,10 +50,12 @@ * @author Christoph Strobl * @author Mark Paluch */ +@ExtendWith({ MongoClientExtension.class }) public class TailableCursorTests { static final String COLLECTION_NAME = "user"; + static @Client MongoClient mongoClient; static ThreadPoolExecutor executor; MongoTemplate template; MessageListenerContainer container; @@ -60,15 +64,15 @@ public class TailableCursorTests { User huffyFluffy; User sugarSplashy; - @BeforeClass + @BeforeAll public static void beforeClass() { executor = new ThreadPoolExecutor(2, 2, 1, TimeUnit.SECONDS, new LinkedBlockingDeque<>()); } - @Before + @BeforeEach public void setUp() { - template = new MongoTemplate(new MongoClient(), "tailable-cursor-tests"); + template = new MongoTemplate(mongoClient, "tailable-cursor-tests"); template.dropCollection(User.class); template.createCollection(User.class, CollectionOptions.empty().capped().maxDocuments(10000).size(10000)); @@ -92,12 +96,12 @@ public void setUp() { sugarSplashy.age = 5; } - @After + @AfterEach public void tearDown() { container.stop(); } - @AfterClass + @AfterAll public static void afterClass() { executor.shutdown(); } @@ -157,7 +161,7 @@ public void filtersMessagesCorrectly() throws InterruptedException { template.save(sugarSplashy); template.save(huffyFluffy); - awaitMessages(messageListener); + awaitMessages(messageListener, 2); assertThat(messageListener.getMessages().stream().map(Message::getBody)).hasSize(2).doesNotContain(sugarSplashy); } @@ -178,7 +182,7 @@ public void mapsFilterToDomainType() throws InterruptedException { template.save(sugarSplashy); template.save(huffyFluffy); - awaitMessages(messageListener); + awaitMessages(messageListener, 1); assertThat(messageListener.getMessages().stream().map(Message::getBody)).hasSize(1).containsExactly(sugarSplashy); } @@ -196,17 +200,62 @@ public void emitsFromStart() throws InterruptedException { template.save(sugarSplashy); - awaitMessages(messageListener); + awaitMessages(messageListener, 3); assertThat(messageListener.getMessages().stream().map(Message::getBody)).hasSize(3).containsExactly(jellyBelly, huffyFluffy, sugarSplashy); } - @Data static class User { @Id String id; @Field("user_name") String userName; int age; + + public String getId() { + return this.id; + } + + public String getUserName() { + return this.userName; + } + + public int getAge() { + return this.age; + } + + public void setId(String id) { + this.id = id; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + User user = (User) o; + return age == user.age && Objects.equals(id, user.id) && Objects.equals(userName, user.userName); + } + + @Override + public int hashCode() { + return Objects.hash(id, userName, age); + } + + public String toString() { + return "TailableCursorTests.User(id=" + this.getId() + ", userName=" + this.getUserName() + ", age=" + + this.getAge() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java index 1fbeeee905..6888f9101c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/TaskFactoryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,12 +18,13 @@ import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; @@ -35,30 +36,30 @@ * * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class TaskFactoryUnitTests { +@ExtendWith(MockitoExtension.class) +class TaskFactoryUnitTests { @Mock MongoConverter converter; @Mock MongoTemplate template; @Mock MessageListener messageListener; @Mock ErrorHandler errorHandler; - TaskFactory factory; - - @Before - public void setUp() { + private TaskFactory factory; - when(template.getConverter()).thenReturn(converter); + @BeforeEach + void setUp() { factory = new TaskFactory(template); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1803 - public void requestMustNotBeNull() { - factory.forRequest(null, Object.class, errorHandler); + @Test // DATAMONGO-1803 + void requestMustNotBeNull() { + assertThatIllegalArgumentException().isThrownBy(() -> factory.forRequest(null, Object.class, errorHandler)); } @Test // DATAMONGO-1803 - public void createsChangeStreamRequestCorrectly() { + void createsChangeStreamRequestCorrectly() { + + when(template.getConverter()).thenReturn(converter); ChangeStreamRequestOptions options = Mockito.mock(ChangeStreamRequestOptions.class); Task task = factory.forRequest(new ChangeStreamRequest(messageListener, options), Object.class, errorHandler); @@ -67,7 +68,9 @@ public void createsChangeStreamRequestCorrectly() { } @Test // DATAMONGO-1803 - public void createsTailableRequestCorrectly() { + void createsTailableRequestCorrectly() { + + when(template.getConverter()).thenReturn(converter); RequestOptions options = Mockito.mock(RequestOptions.class); when(options.getCollectionName()).thenReturn("collection-1"); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java index 4334fac133..70bd6dc3d9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,20 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; - -import org.bson.Document; -import org.junit.Test; -import org.springframework.data.domain.Sort.Direction; import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.Warning; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; + /** * Unit tests for {@link BasicQuery}. * @@ -40,7 +42,7 @@ public class BasicQueryUnitTests { public void createsQueryFromPlainJson() { Query q = new BasicQuery("{ \"name\" : \"Thomas\"}"); Document reference = new Document("name", "Thomas"); - assertThat(q.getQueryObject(), is(reference)); + assertThat(q.getQueryObject()).isEqualTo(reference); } @Test @@ -48,7 +50,7 @@ public void addsCriteriaCorrectly() { Query q = new BasicQuery("{ \"name\" : \"Thomas\"}").addCriteria(where("age").lt(80)); Document reference = new Document("name", "Thomas"); reference.put("age", new Document("$lt", 80)); - assertThat(q.getQueryObject(), is(reference)); + assertThat(q.getQueryObject()).isEqualTo(reference); } @Test @@ -56,14 +58,15 @@ public void overridesSortCorrectly() { BasicQuery query = new BasicQuery("{}"); query.setSortObject(new Document("name", -1)); - query.with(new org.springframework.data.domain.Sort(Direction.ASC, "lastname")); + query.with(Sort.by(Direction.ASC, "lastname")); Document sortReference = new Document("name", -1); sortReference.put("lastname", 1); - assertThat(query.getSortObject(), is(sortReference)); + assertThat(query.getSortObject()).isEqualTo(sortReference); } @Test // DATAMONGO-1093 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "EqualsVerifier uses reflection on Optional") public void equalsContract() { BasicQuery query1 = new BasicQuery("{ \"name\" : \"Thomas\"}", "{\"name\":1, \"age\":1}"); @@ -90,9 +93,9 @@ public void handlesEqualsAndHashCodeCorrectlyForExactCopies() { BasicQuery query2 = new BasicQuery(qry, fields); query2.setSortObject(new Document("name", -1)); - assertThat(query1, is(equalTo(query1))); - assertThat(query1, is(equalTo(query2))); - assertThat(query1.hashCode(), is(query2.hashCode())); + assertThat(query1).isEqualTo(query1); + assertThat(query1).isEqualTo(query2); + assertThat(query1.hashCode()).isEqualTo(query2.hashCode()); } @Test // DATAMONGO-1093 @@ -107,8 +110,8 @@ public void handlesEqualsAndHashCodeCorrectlyWhenBasicQuerySettingsDiffer() { BasicQuery query2 = new BasicQuery(qry, fields); query2.setSortObject(new Document("name", 1)); - assertThat(query1, is(not(equalTo(query2)))); - assertThat(query1.hashCode(), is(not(query2.hashCode()))); + assertThat(query1).isNotEqualTo(query2); + assertThat(query1.hashCode()).isNotEqualTo(query2.hashCode()); } @Test // DATAMONGO-1093 @@ -123,8 +126,8 @@ public void handlesEqualsAndHashCodeCorrectlyWhenQuerySettingsDiffer() { BasicQuery query2 = new BasicQuery(qry, fields); query2.getMeta().setComment("bar"); - assertThat(query1, is(not(equalTo(query2)))); - assertThat(query1.hashCode(), is(not(query2.hashCode()))); + assertThat(query1).isNotEqualTo(query2); + assertThat(query1.hashCode()).isNotEqualTo(query2.hashCode()); } @Test // DATAMONGO-1387 @@ -135,7 +138,7 @@ public void returnsFieldsCorrectly() { BasicQuery query1 = new BasicQuery(qry, fields); - assertThat(query1.getFieldsObject(), isBsonObject().containing("name").containing("age")); + assertThat(query1.getFieldsObject()).containsKeys("name", "age"); } @Test // DATAMONGO-1387 @@ -146,7 +149,7 @@ public void handlesFieldsIncludeCorrectly() { BasicQuery query1 = new BasicQuery(qry); query1.fields().include("name"); - assertThat(query1.getFieldsObject(), isBsonObject().containing("name")); + assertThat(query1.getFieldsObject()).containsKey("name"); } @Test // DATAMONGO-1387 @@ -158,7 +161,6 @@ public void combinesFieldsIncludeCorrectly() { BasicQuery query1 = new BasicQuery(qry, fields); query1.fields().include("gender"); - assertThat(query1.getFieldsObject(), isBsonObject().containing("name").containing("age").containing("gender")); + assertThat(query1.getFieldsObject()).containsKeys("name", "age", "gender"); } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicUpdateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicUpdateUnitTests.java new file mode 100644 index 0000000000..dacc270230 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicUpdateUnitTests.java @@ -0,0 +1,114 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.data.mongodb.core.query.Update.Position; + +/** + * Unit tests for {@link BasicUpdate}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class BasicUpdateUnitTests { + + @Test // GH-4918 + void setOperationValueShouldAppendsOpsCorrectly() { + + BasicUpdate basicUpdate = new BasicUpdate("{}"); + basicUpdate.setOperationValue("$set", "key1", "alt"); + basicUpdate.setOperationValue("$set", "key2", "nps"); + basicUpdate.setOperationValue("$unset", "key3", "x"); + + assertThat(basicUpdate.getUpdateObject()) + .isEqualTo("{ '$set' : { 'key1' : 'alt', 'key2' : 'nps' }, '$unset' : { 'key3' : 'x' } }"); + } + + @Test // GH-4918 + void setOperationErrorsOnNonMapType() { + + BasicUpdate basicUpdate = new BasicUpdate("{ '$set' : 1 }"); + assertThatExceptionOfType(IllegalStateException.class) + .isThrownBy(() -> basicUpdate.setOperationValue("$set", "k", "v")); + } + + @ParameterizedTest // GH-4918 + @CsvSource({ // + "{ }, k1, false", // + "{ '$set' : { 'k1' : 'v1' } }, k1, true", // + "{ '$set' : { 'k1' : 'v1' } }, k2, false", // + "{ '$set' : { 'k1.k2' : 'v1' } }, k1, false", // + "{ '$set' : { 'k1.k2' : 'v1' } }, k1.k2, true", // + "{ '$set' : { 'k1' : 'v1' } }, '', false", // + "{ '$inc' : { 'k1' : 1 } }, k1, true" }) + void modifiesLooksUpKeyCorrectly(String source, String key, boolean modified) { + + BasicUpdate basicUpdate = new BasicUpdate(source); + assertThat(basicUpdate.modifies(key)).isEqualTo(modified); + } + + @ParameterizedTest // GH-4918 + @MethodSource("updateOpArgs") + void updateOpsShouldNotOverrideExistingValues(String operator, Function updateFunction) { + + Document source = Document.parse("{ '%s' : { 'key-1' : 'value-1' } }".formatted(operator)); + Update update = updateFunction.apply(new BasicUpdate(source)); + + assertThat(update.getUpdateObject()).containsEntry("%s.key-1".formatted(operator), "value-1") + .containsKey("%s.key-2".formatted(operator)); + } + + @Test // GH-4918 + void shouldNotOverridePullAll() { + + Document source = Document.parse("{ '$pullAll' : { 'key-1' : ['value-1'] } }"); + Update update = new BasicUpdate(source).pullAll("key-1", new String[] { "value-2" }).pullAll("key-2", + new String[] { "value-3" }); + + assertThat(update.getUpdateObject()).containsEntry("$pullAll.key-1", Arrays.asList("value-1", "value-2")) + .containsEntry("$pullAll.key-2", List.of("value-3")); + } + + static Stream updateOpArgs() { + return Stream.of( // + Arguments.of("$set", (Function) update -> update.set("key-2", "value-2")), + Arguments.of("$unset", (Function) update -> update.unset("key-2")), + Arguments.of("$inc", (Function) update -> update.inc("key-2", 1)), + Arguments.of("$push", (Function) update -> update.push("key-2", "value-2")), + Arguments.of("$addToSet", (Function) update -> update.addToSet("key-2", "value-2")), + Arguments.of("$pop", (Function) update -> update.pop("key-2", Position.FIRST)), + Arguments.of("$pull", (Function) update -> update.pull("key-2", "value-2")), + Arguments.of("$pullAll", + (Function) update -> update.pullAll("key-2", new String[] { "value-2" })), + Arguments.of("$rename", (Function) update -> update.rename("key-2", "value-2"))); + }; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java index 5429112613..72f42db9b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,225 +15,211 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.test.util.IsBsonObject.*; +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; -import java.util.Collections; +import java.util.Arrays; +import java.util.Base64; -import org.bson.Document; -import org.junit.Test; -import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; -import org.springframework.data.mongodb.core.geo.GeoJsonLineString; -import org.springframework.data.mongodb.core.geo.GeoJsonPoint; -import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.bson.types.Binary; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; /** - * @author Oliver Gierke - * @author Thomas Darimont + * Integration tests for {@link Criteria} usage as part of a {@link Query}. + * * @author Christoph Strobl + * @author Andreas Zink */ -public class CriteriaTests { +@ExtendWith(MongoTemplateExtension.class) +class CriteriaTests { - @Test - public void testSimpleCriteria() { - Criteria c = new Criteria("name").is("Bubba"); - assertEquals(Document.parse("{ \"name\" : \"Bubba\"}"), c.getCriteriaObject()); - } + @Template(initialEntitySet = { DocumentWithBitmask.class }) // + static MongoTestTemplate ops; - @Test - public void testNotEqualCriteria() { - Criteria c = new Criteria("name").ne("Bubba"); - assertEquals(Document.parse("{ \"name\" : { \"$ne\" : \"Bubba\"}}"), c.getCriteriaObject()); - } + static final DocumentWithBitmask FIFTY_FOUR/*00110110*/ = new DocumentWithBitmask("1", Integer.valueOf(54), + Integer.toBinaryString(54)); + static final DocumentWithBitmask TWENTY_INT/*00010100*/ = new DocumentWithBitmask("2", Integer.valueOf(20), + Integer.toBinaryString(20)); + static final DocumentWithBitmask TWENTY_FLOAT/*00010100*/ = new DocumentWithBitmask("3", Float.valueOf(20), + Integer.toBinaryString(20)); + static final DocumentWithBitmask ONE_HUNDRED_TWO/*01100110*/ = new DocumentWithBitmask("4", + new Binary(Base64.getDecoder().decode("Zg==")), "01100110"); - @Test - public void buildsIsNullCriteriaCorrectly() { + @BeforeEach + void beforeEach() { - Document reference = new Document("name", null); + ops.flush(); - Criteria criteria = new Criteria("name").is(null); - assertThat(criteria.getCriteriaObject(), is(reference)); + ops.insert(FIFTY_FOUR); + ops.insert(TWENTY_INT); + ops.insert(TWENTY_FLOAT); + ops.insert(ONE_HUNDRED_TWO); } - @Test - public void testChainedCriteria() { - Criteria c = new Criteria("name").is("Bubba").and("age").lt(21); - assertEquals(Document.parse("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"), c.getCriteriaObject()); - } + @Test // DATAMONGO-1808 + void bitsAllClearWithBitPositions() { - @Test(expected = InvalidMongoDbApiUsageException.class) - public void testCriteriaWithMultipleConditionsForSameKey() { - Criteria c = new Criteria("name").gte("M").and("name").ne("A"); - c.getCriteriaObject(); + assertThat(ops.find(query(where("value").bits().allClear(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); } - @Test - public void equalIfCriteriaMatches() { + @Test // DATAMONGO-1808 + void bitsAllClearWithNumericBitmask() { - Criteria left = new Criteria("name").is("Foo").and("lastname").is("Bar"); - Criteria right = new Criteria("name").is("Bar").and("lastname").is("Bar"); - - assertThat(left, is(not(right))); - assertThat(right, is(not(left))); + assertThat(ops.find(query(where("value").bits().allClear(35)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-507 - public void shouldThrowExceptionWhenTryingToNegateAndOperation() { + @Test // DATAMONGO-1808 + void bitsAllClearWithStringBitmask() { - new Criteria() // - .not() // - .andOperator(Criteria.where("delete").is(true).and("_id").is(42)); // + assertThat(ops.find(query(where("value").bits().allClear("ID==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-507 - public void shouldThrowExceptionWhenTryingToNegateOrOperation() { + @Test // DATAMONGO-1808 + void bitsAllSetWithBitPositions() { - new Criteria() // - .not() // - .orOperator(Criteria.where("delete").is(true).and("_id").is(42)); // + assertThat(ops.find(query(where("value").bits().allSet(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, ONE_HUNDRED_TWO); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-507 - public void shouldThrowExceptionWhenTryingToNegateNorOperation() { + @Test // DATAMONGO-1808 + void bitsAllSetWithNumericBitmask() { - new Criteria() // - .not() // - .norOperator(Criteria.where("delete").is(true).and("_id").is(42)); // + assertThat(ops.find(query(where("value").bits().allSet(50)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR); } - @Test // DATAMONGO-507 - public void shouldNegateFollowingSimpleExpression() { - - Criteria c = Criteria.where("age").not().gt(18).and("status").is("student"); - Document co = c.getCriteriaObject(); + @Test // DATAMONGO-1808 + void bitsAllSetWithStringBitmask() { - assertThat(co, is(notNullValue())); - assertThat(co, is(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"))); + assertThat(ops.find(query(where("value").bits().allSet("MC==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR); } - @Test // DATAMONGO-1068 - public void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() { + @Test // DATAMONGO-1808 + void bitsAnyClearWithBitPositions() { - Document document = new Criteria().getCriteriaObject(); - - assertThat(document, equalTo(new Document())); + assertThat(ops.find(query(where("value").bits().anyClear(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT); } - @Test // DATAMONGO-1068 - public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() { - - Document document = new Criteria().lt("foo").getCriteriaObject(); + @Test // DATAMONGO-1808 + void bitsAnyClearWithNumericBitmask() { - assertThat(document, equalTo(new Document().append("$lt", "foo"))); + assertThat(ops.find(query(where("value").bits().anyClear(35)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, TWENTY_INT, TWENTY_FLOAT, ONE_HUNDRED_TWO); } - @Test // DATAMONGO-1068 - public void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() { + @Test // DATAMONGO-1808 + void bitsAnyClearWithStringBitmask() { - Document document = new Criteria().lt("foo").gt("bar").getCriteriaObject(); - - assertThat(document, equalTo(new Document().append("$lt", "foo").append("$gt", "bar"))); + assertThat(ops.find(query(where("value").bits().anyClear("MC==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(TWENTY_INT, TWENTY_FLOAT, ONE_HUNDRED_TWO); } - @Test // DATAMONGO-1068 - public void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() { - - Document document = new Criteria().lt("foo").not().getCriteriaObject(); + @Test // DATAMONGO-1808 + void bitsAnySetWithBitPositions() { - assertThat(document, equalTo(new Document().append("$not", new Document("$lt", "foo")))); + assertThat(ops.find(query(where("value").bits().anySet(Arrays.asList(1, 5))), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, ONE_HUNDRED_TWO); } - @Test // DATAMONGO-1135 - public void geoJsonTypesShouldBeWrappedInGeometry() { + @Test // DATAMONGO-1808 + void bitsAnySetWithNumericBitmask() { - Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).getCriteriaObject(); - - assertThat(document, isBsonObject().containing("foo.$near.$geometry", new GeoJsonPoint(100, 200))); + assertThat(ops.find(query(where("value").bits().anySet(35)), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, ONE_HUNDRED_TWO); } - @Test // DATAMONGO-1135 - public void legacyCoordinateTypesShouldNotBeWrappedInGeometry() { - - Document document = new Criteria("foo").near(new Point(100, 200)).getCriteriaObject(); + @Test // DATAMONGO-1808 + void bitsAnySetWithStringBitmask() { - assertThat(document, isBsonObject().notContaining("foo.$near.$geometry")); + assertThat(ops.find(query(where("value").bits().anySet("MC==")), DocumentWithBitmask.class)) + .containsExactlyInAnyOrder(FIFTY_FOUR, TWENTY_INT, TWENTY_FLOAT, ONE_HUNDRED_TWO); } - @Test // DATAMONGO-1135 - public void maxDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { + static class DocumentWithBitmask { - Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); + @Id String id; + Object value; + String binaryValue; - assertThat(document, isBsonObject().containing("foo.$near.$maxDistance", 50D)); - } - - @Test // DATAMONGO-1135 - public void maxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + public DocumentWithBitmask(String id, Object value, String binaryValue) { - Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); + this.id = id; + this.value = value; + this.binaryValue = binaryValue; + } - assertThat(document, isBsonObject().containing("foo.$nearSphere.$maxDistance", 50D)); - } + public String getId() { + return this.id; + } - @Test // DATAMONGO-1110 - public void minDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { + public Object getValue() { + return this.value; + } - Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); + public String getBinaryValue() { + return this.binaryValue; + } - assertThat(document, isBsonObject().containing("foo.$near.$minDistance", 50D)); - } + public void setId(String id) { + this.id = id; + } - @Test // DATAMONGO-1110 - public void minDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + public void setValue(Object value) { + this.value = value; + } - Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); + public void setBinaryValue(String binaryValue) { + this.binaryValue = binaryValue; + } - assertThat(document, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D)); - } - - @Test // DATAMONGO-1110 - public void minAndMaxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { - - Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).maxDistance(100D) - .getCriteriaObject(); - - assertThat(document, isBsonObject().containing("foo.$nearSphere.$minDistance", 50D)); - assertThat(document, isBsonObject().containing("foo.$nearSphere.$maxDistance", 100D)); - } - - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1134 - public void intersectsShouldThrowExceptionWhenCalledWihtNullValue() { - new Criteria("foo").intersects(null); - } - - @Test // DATAMONGO-1134 - public void intersectsShouldWrapGeoJsonTypeInGeometryCorrectly() { - - GeoJsonLineString lineString = new GeoJsonLineString(new Point(0, 0), new Point(10, 10)); - Document document = new Criteria("foo").intersects(lineString).getCriteriaObject(); - - assertThat(document, isBsonObject().containing("foo.$geoIntersects.$geometry", lineString)); - } - - @Test // DATAMONGO-1835 - public void extractsJsonSchemaInChainCorrectly() { - - MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); - Criteria critera = Criteria.where("foo").is("bar").andDocumentStructureMatches(schema); - - assertThat(critera.getCriteriaObject(), is(equalTo(new Document("foo", "bar").append("$jsonSchema", - new Document("type", "object").append("required", Collections.singletonList("name")))))); - } + public String toString() { + return "CriteriaTests.DocumentWithBitmask(id=" + this.getId() + ", value=" + this.getValue() + ", binaryValue=" + + this.getBinaryValue() + ")"; + } - @Test // DATAMONGO-1835 - public void extractsJsonSchemaFromFactoryMethodCorrectly() { + public boolean equals(final Object o) { + if (o == this) + return true; + if (!(o instanceof DocumentWithBitmask)) + return false; + final DocumentWithBitmask other = (DocumentWithBitmask) o; + if (!other.canEqual((Object) this)) + return false; + final Object this$id = this.getId(); + final Object other$id = other.getId(); + if (this$id == null ? other$id != null : !this$id.equals(other$id)) + return false; + final Object this$binaryValue = this.getBinaryValue(); + final Object other$binaryValue = other.getBinaryValue(); + if (this$binaryValue == null ? other$binaryValue != null : !this$binaryValue.equals(other$binaryValue)) + return false; + return true; + } - MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); - Criteria critera = Criteria.matchingDocumentStructure(schema); + protected boolean canEqual(final Object other) { + return other instanceof DocumentWithBitmask; + } - assertThat(critera.getCriteriaObject(), is(equalTo(new Document("$jsonSchema", - new Document("type", "object").append("required", Collections.singletonList("name")))))); + public int hashCode() { + final int PRIME = 59; + int result = 1; + final Object $id = this.getId(); + result = result * PRIME + ($id == null ? 43 : $id.hashCode()); + final Object $binaryValue = this.getBinaryValue(); + result = result * PRIME + ($binaryValue == null ? 43 : $binaryValue.hashCode()); + return result; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java new file mode 100644 index 0000000000..e734dd6aba --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java @@ -0,0 +1,479 @@ +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.regex.Pattern; + +import org.bson.BsonRegularExpression; +import org.bson.Document; +import org.junit.jupiter.api.Test; + +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.geo.GeoJsonLineString; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; + +/** + * Unit tests for {@link Criteria}. + * + * @author Oliver Gierke + * @author Thomas Darimont + * @author Christoph Strobl + * @author Andreas Zink + * @author Ziemowit Stolarczyk + * @author Clément Petit + * @author Mark Paluch + * @author James McNee + */ +class CriteriaUnitTests { + + @Test + void testSimpleCriteria() { + Criteria c = new Criteria("name").is("Bubba"); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\"}"); + } + + @Test // GH-4850 + void testCombiningSimpleCriteria() { + + Document expected = Document.parse("{ name : { $eq : 123, $type : ['long'] } }"); + + Criteria c = Criteria.where("name") // + .is(123) // + .type(Type.INT_64); + + assertThat(c.getCriteriaObject()).isEqualTo(expected); + + c = Criteria.where("name") // + .type(Type.INT_64).is(123); + + assertThat(c.getCriteriaObject()).isEqualTo(expected); + } + + @Test // GH-4850 + void testCombiningBsonRegexCriteria() { + + Criteria c = Criteria.where("name").regex(new BsonRegularExpression("^spring$")).type(Type.INT_64); + + assertThat(c.getCriteriaObject()) + .isEqualTo(Document.parse("{ name : { $regex : RegExp('^spring$'), $type : ['long'] } }")); + } + + @Test // GH-4850 + void testCombiningRegexCriteria() { + + Criteria c = Criteria.where("name").regex("^spring$").type(Type.INT_64); + + assertThat(c.getCriteriaObject()).hasEntrySatisfying("name.$regex", + it -> assertThat(it).isInstanceOf(Pattern.class)); + } + + @Test + void testNotEqualCriteria() { + Criteria c = new Criteria("name").ne("Bubba"); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : { \"$ne\" : \"Bubba\"}}"); + } + + @Test + void buildsIsNullCriteriaCorrectly() { + + Document reference = new Document("name", null); + + Criteria criteria = new Criteria("name").is(null); + assertThat(criteria.getCriteriaObject()).isEqualTo(reference); + } + + @Test + void testChainedCriteria() { + Criteria c = new Criteria("name").is("Bubba").and("age").lt(21); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"); + } + + @Test + void testCriteriaWithMultipleConditionsForSameKey() { + Criteria c = new Criteria("name").gte("M").and("name").ne("A"); + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class).isThrownBy(c::getCriteriaObject); + } + + @Test + void equalIfCriteriaMatches() { + + Criteria left = new Criteria("name").is("Foo").and("lastname").is("Bar"); + Criteria right = new Criteria("name").is("Bar").and("lastname").is("Bar"); + + assertThat(left).isNotEqualTo(right); + assertThat(right).isNotEqualTo(left); + } + + @Test // GH-3286 + void shouldBuildCorrectAndOperator() { + + Collection operatorCriteria = Arrays.asList(Criteria.where("x").is(true), Criteria.where("y").is(42), + Criteria.where("z").is("value")); + + Criteria criteria = Criteria.where("foo").is("bar").andOperator(operatorCriteria); + + assertThat(criteria.getCriteriaObject()) + .isEqualTo("{\"$and\":[{\"x\":true}, {\"y\":42}, {\"z\":\"value\"}], \"foo\":\"bar\"}"); + } + + @Test // GH-3286 + void shouldBuildCorrectOrOperator() { + + Collection operatorCriteria = Arrays.asList(Criteria.where("x").is(true), Criteria.where("y").is(42), + Criteria.where("z").is("value")); + + Criteria criteria = Criteria.where("foo").is("bar").orOperator(operatorCriteria); + + assertThat(criteria.getCriteriaObject()) + .isEqualTo("{\"$or\":[{\"x\":true}, {\"y\":42}, {\"z\":\"value\"}], \"foo\":\"bar\"}"); + } + + @Test // GH-3286 + void shouldBuildCorrectNorOperator() { + + Collection operatorCriteria = Arrays.asList(Criteria.where("x").is(true), Criteria.where("y").is(42), + Criteria.where("z").is("value")); + + Criteria criteria = Criteria.where("foo").is("bar").norOperator(operatorCriteria); + + assertThat(criteria.getCriteriaObject()) + .isEqualTo("{\"$nor\":[{\"x\":true}, {\"y\":42}, {\"z\":\"value\"}], \"foo\":\"bar\"}"); + } + + @Test // DATAMONGO-507 + void shouldThrowExceptionWhenTryingToNegateAndOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria() // + .not() // + .andOperator(Criteria.where("delete").is(true).and("_id").is(42))); + } + + @Test // DATAMONGO-507 + void shouldThrowExceptionWhenTryingToNegateOrOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria() // + .not() // + .orOperator(Criteria.where("delete").is(true).and("_id").is(42))); + } + + @Test // DATAMONGO-507 + void shouldThrowExceptionWhenTryingToNegateNorOperation() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria() // + .not() // + .norOperator(Criteria.where("delete").is(true).and("_id").is(42))); + } + + @Test // DATAMONGO-507 + void shouldNegateFollowingSimpleExpression() { + + Criteria c = Criteria.where("age").not().gt(18).and("status").is("student"); + Document co = c.getCriteriaObject(); + + assertThat(co).isNotNull(); + assertThat(co).isEqualTo("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"); + } + + @Test // GH-3726 + void shouldBuildCorrectSampleRateOperation() { + Criteria c = new Criteria().sampleRate(0.4); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"$sampleRate\" : 0.4 }"); + } + + @Test // GH-3726 + void shouldThrowExceptionWhenSampleRateIsNegative() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(-1)); + } + + @Test // GH-3726 + void shouldThrowExceptionWhenSampleRateIsGreatedThanOne() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(1.01)); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() { + + Document document = new Criteria().getCriteriaObject(); + + assertThat(document).isEqualTo(new Document()); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresent() { + + Document document = new Criteria().lt("foo").getCriteriaObject(); + + assertThat(document).isEqualTo(new Document().append("$lt", "foo")); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldUseCritieraValuesWhenNoKeyIsPresentButMultipleCriteriasPresent() { + + Document document = new Criteria().lt("foo").gt("bar").getCriteriaObject(); + + assertThat(document).isEqualTo(new Document().append("$lt", "foo").append("$gt", "bar")); + } + + @Test // DATAMONGO-1068 + void getCriteriaObjectShouldRespectNotWhenNoKeyPresent() { + + Document document = new Criteria().lt("foo").not().getCriteriaObject(); + + assertThat(document).isEqualTo(new Document().append("$not", new Document("$lt", "foo"))); + } + + @Test // GH-4220 + void usesCorrectBsonType() { + + Document document = new Criteria("foo").type(Type.BOOLEAN).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$type", Collections.singletonList("bool")); + } + + @Test // DATAMONGO-1135 + void geoJsonTypesShouldBeWrappedInGeometry() { + + Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$near.$geometry", new GeoJsonPoint(100, 200)); + } + + @Test // DATAMONGO-1135 + void legacyCoordinateTypesShouldNotBeWrappedInGeometry() { + + Document document = new Criteria("foo").near(new Point(100, 200)).getCriteriaObject(); + + assertThat(document).doesNotContainKey("foo.$near.$geometry"); + } + + @Test // DATAMONGO-1135 + void maxDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$near.$maxDistance", 50D); + } + + @Test // DATAMONGO-1135 + void maxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).maxDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$nearSphere.$maxDistance", 50D); + } + + @Test // DATAMONGO-1110 + void minDistanceShouldBeMappedInsideNearWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").near(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$near.$minDistance", 50D); + } + + @Test // DATAMONGO-1110 + void minDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$nearSphere.$minDistance", 50D); + } + + @Test // DATAMONGO-1110 + void minAndMaxDistanceShouldBeMappedInsideNearSphereWhenUsedAlongWithGeoJsonType() { + + Document document = new Criteria("foo").nearSphere(new GeoJsonPoint(100, 200)).minDistance(50D).maxDistance(100D) + .getCriteriaObject(); + + assertThat(document).containsEntry("foo.$nearSphere.$minDistance", 50D); + assertThat(document).containsEntry("foo.$nearSphere.$maxDistance", 100D); + } + + @Test // DATAMONGO-1134 + void intersectsShouldThrowExceptionWhenCalledWihtNullValue() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria("foo").intersects(null)); + } + + @Test // DATAMONGO-1134 + void intersectsShouldWrapGeoJsonTypeInGeometryCorrectly() { + + GeoJsonLineString lineString = new GeoJsonLineString(new Point(0, 0), new Point(10, 10)); + Document document = new Criteria("foo").intersects(lineString).getCriteriaObject(); + + assertThat(document).containsEntry("foo.$geoIntersects.$geometry", lineString); + } + + @Test // DATAMONGO-1835 + void extractsJsonSchemaInChainCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); + Criteria criteria = Criteria.where("foo").is("bar").andDocumentStructureMatches(schema); + + assertThat(criteria.getCriteriaObject()).isEqualTo(new Document("foo", "bar").append("$jsonSchema", + new Document("type", "object").append("required", Collections.singletonList("name")))); + } + + @Test // DATAMONGO-1835 + void extractsJsonSchemaFromFactoryMethodCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().required("name").build(); + Criteria criteria = Criteria.matchingDocumentStructure(schema); + + assertThat(criteria.getCriteriaObject()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Collections.singletonList("name")))); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllClearWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().allClear(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAllClear\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllClearWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allClear(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAllClear\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllSetWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().allSet(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAllSet\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAllSetWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allSet(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAllSet\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnyClearWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().anyClear(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnyClearWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anyClear(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnySetWithIntBitmaskCorrectly() { + + Criteria numericBitmaskCriteria = new Criteria("field").bits().anySet(0b101); + + assertThat(numericBitmaskCriteria.getCriteriaObject()).isEqualTo("{ \"field\" : { \"$bitsAnySet\" : 5} }"); + } + + @Test // DATAMONGO-1808 + void shouldAppendBitsAnySetWithPositionListCorrectly() { + + Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anySet(Arrays.asList(0, 2)); + + assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) + .isEqualTo("{ \"field\" : { \"$bitsAnySet\" : [ 0, 2 ]} }"); + } + + @Test // DATAMONGO-2002 + void shouldEqualForSamePattern() { + + Criteria left = new Criteria("field").regex("foo"); + Criteria right = new Criteria("field").regex("foo"); + + assertThat(left).isEqualTo(right); + } + + @Test // DATAMONGO-2002 + void shouldEqualForDocument() { + + assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))) + .isEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))); + + assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))) + .isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two"))); + + assertThat(new Criteria("field").is(new Document("one", 1).append("two", "two"))) + .isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))); + + assertThat(new Criteria("field").is(new Document("one", 1).append("null", null).append("two", "two"))) + .isNotEqualTo(new Criteria("field").is(new Document("one", 1).append("two", "two").append("null", null))); + + assertThat(new Criteria("field").is(new Document())).isNotEqualTo(new Criteria("field").is("foo")); + assertThat(new Criteria("field").is("foo")).isNotEqualTo(new Criteria("field").is(new Document())); + } + + @Test // DATAMONGO-2002 + void shouldEqualForCollection() { + + assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))) + .isEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + + assertThat(new Criteria("field").is(Arrays.asList("foo", 1))) + .isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + + assertThat(new Criteria("field").is(Collections.singletonList("foo"))) + .isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + + assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))) + .isNotEqualTo(new Criteria("field").is(Collections.singletonList("foo"))); + + assertThat(new Criteria("field").is(Arrays.asList("foo", "bar"))).isNotEqualTo(new Criteria("field").is("foo")); + + assertThat(new Criteria("field").is("foo")).isNotEqualTo(new Criteria("field").is(Arrays.asList("foo", "bar"))); + } + + @Test // GH-3414 + void shouldEqualForSamePatternAndFlags() { + + Criteria left = new Criteria("field").regex("foo", "iu"); + Criteria right = new Criteria("field").regex("foo"); + + assertThat(left).isNotEqualTo(right); + } + + @Test // GH-3414 + void shouldEqualForNestedPattern() { + + Criteria left = new Criteria("a").orOperator(new Criteria("foo").regex("value", "i"), + new Criteria("bar").regex("value")); + Criteria right = new Criteria("a").orOperator(new Criteria("foo").regex("value", "i"), + new Criteria("bar").regex("value")); + + assertThat(left).isEqualTo(right); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java index 5ac7be266a..5299ac08d6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/FieldUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,74 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import java.util.List; /** - * Unit tests for {@link DocumentField}. + * Unit tests for {@link Field}. * * @author Oliver Gierke + * @author Owen Q + * @author Mark Paluch + * @author Kirill Egorov */ -public class FieldUnitTests { +class FieldUnitTests { @Test - public void sameObjectSetupCreatesEqualField() { + void sameObjectSetupCreatesEqualField() { Field left = new Field().elemMatch("key", Criteria.where("foo").is("bar")); Field right = new Field().elemMatch("key", Criteria.where("foo").is("bar")); - assertThat(left, is(right)); - assertThat(right, is(left)); + assertThat(left).isEqualTo(right); + assertThat(right).isEqualTo(left); + assertThat(left.getFieldsObject()).isEqualTo("{key: { $elemMatch: {foo:\"bar\"}}}"); + } + + @Test // DATAMONGO-2294 + void rendersInclusionCorrectly() { + + Field fields = new Field().include("foo", "bar").include("baz"); + + assertThat(fields.getFieldsObject()).isEqualTo("{foo:1, bar:1, baz:1}"); } @Test - public void differentObjectSetupCreatesEqualField() { + void differentObjectSetupCreatesEqualField() { Field left = new Field().elemMatch("key", Criteria.where("foo").is("bar")); Field right = new Field().elemMatch("key", Criteria.where("foo").is("foo")); - assertThat(left, is(not(right))); - assertThat(right, is(not(left))); + assertThat(left).isNotEqualTo(right); + assertThat(right).isNotEqualTo(left); + } + + @Test // DATAMONGO-2294 + void rendersExclusionCorrectly() { + + Field fields = new Field().exclude("foo", "bar").exclude("baz"); + + assertThat(fields.getFieldsObject()).isEqualTo("{foo:0, bar:0, baz:0}"); + } + + @Test // GH-4625 + void overriddenInclusionMethodsCreateEqualFields() { + + Field left = new Field().include("foo", "bar"); + Field right = new Field().include(List.of("foo", "bar")); + + assertThat(left).isEqualTo(right); + } + + @Test // GH-4625 + void overriddenExclusionMethodsCreateEqualFields() { + + Field left = new Field().exclude("foo", "bar"); + Field right = new Field().exclude(List.of("foo", "bar")); + + assertThat(left).isEqualTo(right); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java index ae69e7913c..156b5b23c6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IndexUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,15 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; import org.springframework.data.mongodb.core.index.GeospatialIndex; import org.springframework.data.mongodb.core.index.Index; -import org.springframework.data.mongodb.core.index.Index.Duplicates; /** * Unit tests for {@link Index}. @@ -37,44 +36,44 @@ public class IndexUnitTests { @Test public void testWithAscendingIndex() { Index i = new Index().on("name", Direction.ASC); - assertEquals(Document.parse("{ \"name\" : 1}"), i.getIndexKeys()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : 1}")); } @Test public void testWithDescendingIndex() { Index i = new Index().on("name", Direction.DESC); - assertEquals(Document.parse("{ \"name\" : -1}"), i.getIndexKeys()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : -1}")); } @Test public void testNamedMultiFieldUniqueIndex() { Index i = new Index().on("name", Direction.ASC).on("age", Direction.DESC); i.named("test").unique(); - assertEquals(Document.parse("{ \"name\" : 1 , \"age\" : -1}"), i.getIndexKeys()); - assertEquals(Document.parse("{ \"name\" : \"test\" , \"unique\" : true}"), i.getIndexOptions()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : 1 , \"age\" : -1}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"name\" : \"test\" , \"unique\" : true}")); } @Test public void testWithSparse() { Index i = new Index().on("name", Direction.ASC); i.sparse().unique(); - assertEquals(Document.parse("{ \"name\" : 1}"), i.getIndexKeys()); - assertEquals(Document.parse("{ \"unique\" : true , \"sparse\" : true}"), i.getIndexOptions()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"name\" : 1}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"unique\" : true , \"sparse\" : true}")); } @Test public void testGeospatialIndex() { GeospatialIndex i = new GeospatialIndex("location").withMin(0); - assertEquals(Document.parse("{ \"location\" : \"2d\"}"), i.getIndexKeys()); - assertEquals(Document.parse("{ \"min\" : 0}"), i.getIndexOptions()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"location\" : \"2d\"}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"min\" : 0}")); } @Test // DATAMONGO-778 public void testGeospatialIndex2DSphere() { GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_2DSPHERE); - assertEquals(Document.parse("{ \"location\" : \"2dsphere\"}"), i.getIndexKeys()); - assertEquals(Document.parse("{ }"), i.getIndexOptions()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"location\" : \"2dsphere\"}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ }")); } @Test // DATAMONGO-778 @@ -82,14 +81,14 @@ public void testGeospatialIndexGeoHaystack() { GeospatialIndex i = new GeospatialIndex("location").typed(GeoSpatialIndexType.GEO_HAYSTACK) .withAdditionalField("name").withBucketSize(40); - assertEquals(Document.parse("{ \"location\" : \"geoHaystack\" , \"name\" : 1}"), i.getIndexKeys()); - assertEquals(Document.parse("{ \"bucketSize\" : 40.0}"), i.getIndexOptions()); + assertThat(i.getIndexKeys()).isEqualTo(Document.parse("{ \"location\" : \"geoHaystack\" , \"name\" : 1}")); + assertThat(i.getIndexOptions()).isEqualTo(Document.parse("{ \"bucketSize\" : 40.0}")); } @Test public void ensuresPropertyOrder() { Index on = new Index("foo", Direction.ASC).on("bar", Direction.ASC); - assertThat(on.getIndexKeys(), is(Document.parse("{ \"foo\" : 1 , \"bar\" : 1}"))); + assertThat(on.getIndexKeys()).isEqualTo(Document.parse("{ \"foo\" : 1 , \"bar\" : 1}")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsQuery.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsQuery.java deleted file mode 100644 index f8ed0784c6..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsQuery.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2014-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import org.bson.Document; -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; -import org.hamcrest.core.IsEqual; -import org.springframework.data.domain.Sort.Direction; -import org.springframework.util.StringUtils; - -/** - * A {@link TypeSafeMatcher} that tests whether a given {@link Query} matches a query specification. - * - * @author Christoph Strobl - * @author Mark Paluch - * @param - */ -public class IsQuery extends TypeSafeMatcher { - - protected Document query; - protected Document sort; - protected Document fields; - - private long skip; - private int limit; - private String hint; - - protected IsQuery() { - query = new Document(); - sort = new Document(); - fields = new Document(); - } - - public static IsQuery isQuery() { - return new IsQuery(); - } - - public IsQuery limitingTo(int limit) { - this.limit = limit; - return this; - } - - public IsQuery skippig(long skip) { - this.skip = skip; - return this; - } - - public IsQuery providingHint(String hint) { - this.hint = hint; - return this; - } - - public IsQuery includingField(String fieldname) { - - if (fields == null) { - fields = new Document(); - } - fields.put(fieldname, 1); - - return this; - } - - public IsQuery excludingField(String fieldname) { - - if (fields == null) { - fields = new Document(); - } - fields.put(fieldname, -1); - - return this; - } - - public IsQuery sortingBy(String fieldname, Direction direction) { - - sort.put(fieldname, Direction.ASC.equals(direction) ? 1 : -1); - - return this; - } - - public IsQuery where(Criteria criteria) { - - this.query.putAll(criteria.getCriteriaObject()); - return this; - } - - @Override - public void describeTo(Description description) { - - BasicQuery expected = new BasicQuery(this.query, this.fields); - expected.setSortObject(sort); - expected.skip(this.skip); - expected.limit(this.limit); - - if (StringUtils.hasText(this.hint)) { - expected.withHint(this.hint); - } - - description.appendValue(expected); - } - - @Override - protected boolean matchesSafely(T item) { - - if (item == null) { - return false; - } - - if (!new IsEqual(query).matches(item.getQueryObject())) { - return false; - } - - if ((item.getSortObject() == null || item.getSortObject().isEmpty()) && !sort.isEmpty()) { - if (!new IsEqual(sort).matches(item.getSortObject())) { - return false; - } - } - - if (!new IsEqual(fields).matches(item.getFieldsObject())) { - return false; - } - - if (!new IsEqual(this.hint).matches(item.getHint())) { - return false; - } - - if (!new IsEqual(this.skip).matches(item.getSkip())) { - return false; - } - - if (!new IsEqual(this.limit).matches(item.getLimit())) { - return false; - } - - return true; - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsTextQuery.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsTextQuery.java deleted file mode 100644 index 5fe5ea37e6..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/IsTextQuery.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright 2014-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.query; - -import org.bson.Document; -import org.hamcrest.TypeSafeMatcher; -import org.springframework.util.StringUtils; - -/** - * A {@link TypeSafeMatcher} that tests whether a given {@link TextQuery} matches a query specification. - * - * @author Christoph Strobl - * @param - */ -public class IsTextQuery extends IsQuery { - - private final String SCORE_DEFAULT_FIELDNAME = "score"; - private final Document META_TEXT_SCORE = new Document("$meta", "textScore"); - - private String scoreFieldName = SCORE_DEFAULT_FIELDNAME; - - private IsTextQuery() { - super(); - } - - public static IsTextQuery isTextQuery() { - return new IsTextQuery(); - } - - public IsTextQuery searchingFor(String term) { - appendTerm(term); - return this; - } - - public IsTextQuery inLanguage(String language) { - appendLanguage(language); - return this; - } - - public IsTextQuery returningScore() { - - if (fields == null) { - fields = new Document(); - } - fields.put(scoreFieldName, META_TEXT_SCORE); - - return this; - } - - public IsTextQuery returningScoreAs(String fieldname) { - - this.scoreFieldName = fieldname != null ? fieldname : SCORE_DEFAULT_FIELDNAME; - - return this.returningScore(); - } - - public IsTextQuery sortingByScore() { - - sort.put(scoreFieldName, META_TEXT_SCORE); - - return this; - } - - @Override - public IsTextQuery where(Criteria criteria) { - - super.where(criteria); - return this; - } - - @Override - public IsTextQuery excludingField(String fieldname) { - - super.excludingField(fieldname); - return this; - } - - @Override - public IsTextQuery includingField(String fieldname) { - - super.includingField(fieldname); - return this; - } - - @Override - public IsTextQuery limitingTo(int limit) { - - super.limitingTo(limit); - return this; - } - - @Override - public IsQuery skippig(long skip) { - - super.skippig(skip); - return this; - } - - private void appendLanguage(String language) { - - Document document = getOrCreateTextDocument(); - document.put("$language", language); - } - - private Document getOrCreateTextDocument() { - - Document document = (Document) query.get("$text"); - if (document == null) { - document = new Document(); - } - - return document; - } - - private void appendTerm(String term) { - - Document document = getOrCreateTextDocument(); - String searchString = (String) document.get("$search"); - if (StringUtils.hasText(searchString)) { - searchString += (" " + term); - } else { - searchString = term; - } - document.put("$search", searchString); - query.put("$text", document); - } - -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MetricConversionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MetricConversionUnitTests.java new file mode 100644 index 0000000000..bbdad047f2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MetricConversionUnitTests.java @@ -0,0 +1,82 @@ +/* + * Copyright 2016-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.data.mongodb.core.query; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.data.Offset.offset; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.geo.Distance; +import org.springframework.data.geo.Metrics; + +/** + * Unit tests for {@link MetricConversion}. + * + * @author Mark Paluch + */ +public class MetricConversionUnitTests { + + @Test // DATAMONGO-1348 + public void shouldConvertMilesToMeters() { + + Distance distance = new Distance(1, Metrics.MILES); + double distanceInMeters = MetricConversion.getDistanceInMeters(distance); + + assertThat(distanceInMeters).isCloseTo(1609.3438343d, offset(0.000000001)); + } + + @Test // DATAMONGO-1348 + public void shouldConvertKilometersToMeters() { + + Distance distance = new Distance(1, Metrics.KILOMETERS); + double distanceInMeters = MetricConversion.getDistanceInMeters(distance); + + assertThat(distanceInMeters).isCloseTo(1000, offset(0.000000001)); + } + + @Test // DATAMONGO-1348 + public void shouldCalculateMetersToKilometersMultiplier() { + + double multiplier = MetricConversion.getMetersToMetricMultiplier(Metrics.KILOMETERS); + + assertThat(multiplier).isCloseTo(0.001, offset(0.000000001)); + } + + @Test // DATAMONGO-1348 + public void shouldCalculateMetersToMilesMultiplier() { + + double multiplier = MetricConversion.getMetersToMetricMultiplier(Metrics.MILES); + + assertThat(multiplier).isCloseTo(0.00062137, offset(0.000000001)); + } + + @Test // GH-4004 + void shouldConvertMetersToRadians/* on an earth like sphere with r=6378.137km */() { + assertThat(MetricConversion.metersToRadians(1000)).isCloseTo(0.000156785594d, offset(0.000000001)); + } + + @Test // GH-4004 + void shouldConvertKilometersToRadians/* on an earth like sphere with r=6378.137km */() { + assertThat(MetricConversion.toRadians(new Distance(1, Metrics.KILOMETERS))).isCloseTo(0.000156785594d, offset(0.000000001)); + } + + @Test // GH-4004 + void shouldConvertMilesToRadians/* on an earth like sphere with r=6378.137km */() { + assertThat(MetricConversion.toRadians(new Distance(1, Metrics.MILES))).isCloseTo(0.000252321328d, offset(0.000000001)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java index 27178f7d2b..d6bceea5d0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/MongoRegexCreatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -66,13 +66,23 @@ public void testSpecialCases() { parameter.check(); } - @lombok.RequiredArgsConstructor(staticName = "test") static class TestParameter { private final String source; private final MatchMode mode; private final String expectedResult, comment; + private TestParameter(String source, MatchMode mode, String expectedResult, String comment) { + this.source = source; + this.mode = mode; + this.expectedResult = expectedResult; + this.comment = comment; + } + + public static TestParameter test(String source, MatchMode mode, String expectedResult, String comment) { + return new TestParameter(source, mode, expectedResult, comment); + } + void check() { assertThat(MongoRegexCreator.INSTANCE.toRegularExpression(source, mode))// @@ -80,10 +90,6 @@ void check() { .isEqualTo(expectedResult); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return String.format("Mode: %s, %s", mode, comment); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java index 449ee24cb1..f4e3d26eb1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/NearQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,12 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import java.math.BigDecimal; +import java.math.RoundingMode; + +import org.junit.jupiter.api.Test; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Distance; @@ -26,6 +28,11 @@ import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.geo.GeoJsonPoint; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** * Unit tests for {@link NearQuery}. @@ -39,9 +46,9 @@ public class NearQueryUnitTests { private static final Distance ONE_FIFTY_KILOMETERS = new Distance(150, Metrics.KILOMETERS); - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullPoint() { - NearQuery.near(null); + assertThatIllegalArgumentException().isThrownBy(() -> NearQuery.near(null)); } @Test @@ -49,9 +56,9 @@ public void settingUpNearWithMetricRecalculatesDistance() { NearQuery query = NearQuery.near(2.5, 2.5, Metrics.KILOMETERS).maxDistance(150); - assertThat(query.getMaxDistance(), is(ONE_FIFTY_KILOMETERS)); - assertThat(query.getMetric(), is((Metric) Metrics.KILOMETERS)); - assertThat(query.isSpherical(), is(true)); + assertThat(query.getMaxDistance()).isEqualTo(ONE_FIFTY_KILOMETERS); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + assertThat(query.isSpherical()).isTrue(); } @Test @@ -61,37 +68,37 @@ public void settingMetricRecalculatesMaxDistance() { query.inMiles(); - assertThat(query.getMetric(), is((Metric) Metrics.MILES)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.MILES); } @Test public void configuresResultMetricCorrectly() { NearQuery query = NearQuery.near(2.5, 2.1); - assertThat(query.getMetric(), is((Metric) Metrics.NEUTRAL)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.NEUTRAL); query = query.maxDistance(ONE_FIFTY_KILOMETERS); - assertThat(query.getMetric(), is((Metric) Metrics.KILOMETERS)); - assertThat(query.getMaxDistance(), is(ONE_FIFTY_KILOMETERS)); - assertThat(query.isSpherical(), is(true)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + assertThat(query.getMaxDistance()).isEqualTo(ONE_FIFTY_KILOMETERS); + assertThat(query.isSpherical()).isTrue(); query = query.in(Metrics.MILES); - assertThat(query.getMetric(), is((Metric) Metrics.MILES)); - assertThat(query.getMaxDistance(), is(ONE_FIFTY_KILOMETERS)); - assertThat(query.isSpherical(), is(true)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.MILES); + assertThat(query.getMaxDistance()).isEqualTo(ONE_FIFTY_KILOMETERS); + assertThat(query.isSpherical()).isTrue(); query = query.maxDistance(new Distance(200, Metrics.KILOMETERS)); - assertThat(query.getMetric(), is((Metric) Metrics.MILES)); + assertThat(query.getMetric()).isEqualTo((Metric) Metrics.MILES); } - @Test // DATAMONGO-445 + @Test // DATAMONGO-445, DATAMONGO-2264 public void shouldTakeSkipAndLimitSettingsFromGivenPageable() { Pageable pageable = PageRequest.of(3, 5); NearQuery query = NearQuery.near(new Point(1, 1)).with(pageable); - assertThat(query.getSkip(), is((long)pageable.getPageNumber() * pageable.getPageSize())); - assertThat((Long) query.toDocument().get("num"), is((long)(pageable.getPageNumber() + 1) * pageable.getPageSize())); + assertThat(query.getSkip()).isEqualTo((long) pageable.getPageNumber() * pageable.getPageSize()); + assertThat(query.toDocument().get("num")).isEqualTo((long) pageable.getPageSize()); } @Test // DATAMONGO-445 @@ -102,11 +109,11 @@ public void shouldTakeSkipAndLimitSettingsFromGivenQuery() { NearQuery query = NearQuery.near(new Point(1, 1)) .query(Query.query(Criteria.where("foo").is("bar")).limit(limit).skip(skip)); - assertThat(query.getSkip(), is(skip)); - assertThat((Long) query.toDocument().get("num"), is((long)limit)); + assertThat(query.getSkip()).isEqualTo(skip); + assertThat((Long) query.toDocument().get("num")).isEqualTo((long) limit); } - @Test // DATAMONGO-445 + @Test // DATAMONGO-445, DATAMONGO-2264 public void shouldTakeSkipAndLimitSettingsFromPageableEvenIfItWasSpecifiedOnQuery() { int limit = 10; @@ -115,20 +122,20 @@ public void shouldTakeSkipAndLimitSettingsFromPageableEvenIfItWasSpecifiedOnQuer NearQuery query = NearQuery.near(new Point(1, 1)) .query(Query.query(Criteria.where("foo").is("bar")).limit(limit).skip(skip)).with(pageable); - assertThat(query.getSkip(), is((long)pageable.getPageNumber() * pageable.getPageSize())); - assertThat((Long) query.toDocument().get("num"), is((long)(pageable.getPageNumber() + 1) * pageable.getPageSize())); + assertThat(query.getSkip()).isEqualTo((long) pageable.getPageNumber() * pageable.getPageSize()); + assertThat(query.toDocument().get("num")).isEqualTo((long) pageable.getPageSize()); } @Test // DATAMONGO-829 public void nearQueryShouldInoreZeroLimitFromQuery() { NearQuery query = NearQuery.near(new Point(1, 2)).query(Query.query(Criteria.where("foo").is("bar"))); - assertThat(query.toDocument().get("num"), nullValue()); + assertThat(query.toDocument().get("num")).isNull(); } - @Test(expected = IllegalArgumentException.class) // DATAMONOGO-829 + @Test // DATAMONOGO-829 public void nearQueryShouldThrowExceptionWhenGivenANullQuery() { - NearQuery.near(new Point(1, 2)).query(null); + assertThatIllegalArgumentException().isThrownBy(() -> NearQuery.near(new Point(1, 2)).query(null)); } @Test // DATAMONGO-829 @@ -136,9 +143,148 @@ public void numShouldNotBeAlteredByQueryWithoutPageable() { long num = 100; NearQuery query = NearQuery.near(new Point(1, 2)); - query.num(num); + query.limit(num); query.query(Query.query(Criteria.where("foo").is("bar"))); - assertThat(DocumentTestUtils.getTypedValue(query.toDocument(), "num", Long.class), is(num)); + assertThat(DocumentTestUtils.getTypedValue(query.toDocument(), "num", Long.class)).isEqualTo(num); + } + + @Test // DATAMONGO-1348 + public void shouldNotUseSphericalForLegacyPoint() { + + NearQuery query = NearQuery.near(new Point(27.987901, 86.9165379)); + + assertThat(query.toDocument()).containsEntry("spherical", false); + } + + @Test // DATAMONGO-1348 + public void shouldUseSphericalForLegacyPointIfSet() { + + NearQuery query = NearQuery.near(new Point(27.987901, 86.9165379)); + query.spherical(true); + + assertThat(query.toDocument()).containsEntry("spherical", true); + } + + @Test // DATAMONGO-1348 + public void shouldUseSphericalForGeoJsonData() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + + assertThat(query.toDocument()).containsEntry("spherical", true); + } + + @Test // DATAMONGO-1348 + public void shouldUseSphericalForGeoJsonDataIfSphericalIsFalse() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.spherical(false); + + assertThat(query.toDocument()).containsEntry("spherical", true); + } + + @Test // DATAMONGO-1348 + public void shouldUseMetersForGeoJsonData() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(1); + + double meterToRadianMultiplier = BigDecimal.valueOf(1 / Metrics.KILOMETERS.getMultiplier() / 1000).// + setScale(8, RoundingMode.HALF_UP).// + doubleValue(); + assertThat(query.toDocument()).containsEntry("maxDistance", Metrics.KILOMETERS.getMultiplier() * 1000) + .containsEntry("distanceMultiplier", meterToRadianMultiplier); + } + + @Test // DATAMONGO-1348 + public void shouldUseMetersForGeoJsonDataWhenDistanceInKilometers() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.KILOMETERS)); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1000D).containsEntry("distanceMultiplier", 0.001D); + } + + @Test // DATAMONGO-1348 + public void shouldUseMetersForGeoJsonDataWhenDistanceInMiles() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.MILES)); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1609.3438343D).containsEntry("distanceMultiplier", + 0.00062137D); + } + + @Test // DATAMONGO-1348 + public void shouldUseKilometersForDistanceWhenMaxDistanceInMiles() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.MILES)).in(Metrics.KILOMETERS); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1609.3438343D).containsEntry("distanceMultiplier", + 0.001D); + } + + @Test // DATAMONGO-1348 + public void shouldUseMilesForDistanceWhenMaxDistanceInKilometers() { + + NearQuery query = NearQuery.near(new GeoJsonPoint(27.987901, 86.9165379)); + query.maxDistance(new Distance(1, Metrics.KILOMETERS)).in(Metrics.MILES); + + assertThat(query.toDocument()).containsEntry("maxDistance", 1000D).containsEntry("distanceMultiplier", 0.00062137D); + } + + @Test // GH-4277 + void fetchesReadPreferenceFromUnderlyingQueryObject() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)) + .query(new Query().withReadPreference(ReadPreference.nearest())); + + assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.nearest()); + } + + @Test // GH-4277 + void fetchesReadConcernFromUnderlyingQueryObject() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).query(new Query().withReadConcern(ReadConcern.SNAPSHOT)); + + assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void usesReadPreferenceFromNearQueryIfUnderlyingQueryDoesNotDefineAny() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadPreference(ReadPreference.nearest()) + .query(new Query()); + + assertThat(((Query) ReflectionTestUtils.getField(nearQuery, "query")).getReadPreference()).isNull(); + assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.nearest()); + } + + @Test // GH-4277 + void usesReadConcernFromNearQueryIfUnderlyingQueryDoesNotDefineAny() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadConcern(ReadConcern.SNAPSHOT).query(new Query()); + + assertThat(((Query) ReflectionTestUtils.getField(nearQuery, "query")).getReadConcern()).isNull(); + assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.SNAPSHOT); + } + + @Test // GH-4277 + void readPreferenceFromUnderlyingQueryOverridesNearQueryOne() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadPreference(ReadPreference.nearest()) + .query(new Query().withReadPreference(ReadPreference.primary())); + + assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.primary()); + } + + @Test // GH-4277 + void readConcernFromUnderlyingQueryOverridesNearQueryOne() { + + NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadConcern(ReadConcern.SNAPSHOT) + .query(new Query().withReadConcern(ReadConcern.MAJORITY)); + + assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.MAJORITY); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java index 0688286c77..fa7a8516ca 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,20 +15,18 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import java.util.Arrays; - import org.bson.Document; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; import org.springframework.data.mongodb.core.SpecialDoc; @@ -42,190 +40,356 @@ * @author Christoph Strobl * @author Mark Paluch */ -public class QueryTests { - - @Rule public ExpectedException exception = ExpectedException.none(); +class QueryTests { @Test - public void testSimpleQuery() { + void testSimpleQuery() { + Query q = new Query(where("name").is("Thomas").and("age").lt(80)); - Document expected = Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}")); } @Test - public void testQueryWithNot() { + void testQueryWithNot() { + Query q = new Query(where("name").is("Thomas").and("age").not().mod(10, 0)); - Document expected = Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$not\" : { \"$mod\" : [ 10 , 0]}}}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()) + .isEqualTo(Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$not\" : { \"$mod\" : [ 10 , 0]}}}")); } @Test - public void testInvalidQueryWithNotIs() { - try { - new Query(where("name").not().is("Thomas")); - Assert.fail("This should have caused an InvalidDocumentStoreApiUsageException"); - } catch (InvalidMongoDbApiUsageException e) {} + void testInvalidQueryWithNotIs() { + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class) + .isThrownBy(() -> new Query(where("name").not().is("Thomas"))); } @Test - public void testOrQuery() { + void testOrQuery() { + Query q = new Query(new Criteria().orOperator(where("name").is("Sven").and("age").lt(50), where("age").lt(50), where("name").is("Thomas"))); - Document expected = Document.parse( - "{ \"$or\" : [ { \"name\" : \"Sven\" , \"age\" : { \"$lt\" : 50}} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse( + "{ \"$or\" : [ { \"name\" : \"Sven\" , \"age\" : { \"$lt\" : 50}} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}")); } @Test - public void testAndQuery() { + void testAndQuery() { + Query q = new Query(new Criteria().andOperator(where("name").is("Sven"), where("age").lt(50))); Document expected = Document.parse("{ \"$and\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}}]}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(expected); } @Test - public void testNorQuery() { + void testNorQuery() { + Query q = new Query( new Criteria().norOperator(where("name").is("Sven"), where("age").lt(50), where("name").is("Thomas"))); - Document expected = Document - .parse("{ \"$nor\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(Document + .parse("{ \"$nor\" : [ { \"name\" : \"Sven\"} , { \"age\" : { \"$lt\" : 50}} , { \"name\" : \"Thomas\"}]}")); } - @Test - public void testQueryWithLimit() { + @Test // GH-4584 + void testQueryWithLimit() { + Query q = new Query(where("name").gte("M").lte("T").and("age").not().gt(22)); q.limit(50); - Document expected = Document - .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}"); - Assert.assertEquals(expected, q.getQueryObject()); - Assert.assertEquals(50, q.getLimit()); + + assertThat(q.getQueryObject()).isEqualTo(Document + .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}")); + assertThat(q.getLimit()).isEqualTo(50); + + q.limit(Limit.unlimited()); + assertThat(q.getLimit()).isZero(); + assertThat(q.isLimited()).isFalse(); + + q.limit(Limit.of(10)); + assertThat(q.getLimit()).isEqualTo(10); + assertThat(q.isLimited()).isTrue(); + + q.limit(Limit.of(-1)); + assertThat(q.getLimit()).isZero(); + assertThat(q.isLimited()).isFalse(); + + Query other = new Query(where("name").gte("M")).limit(Limit.of(10)); + assertThat(new Query(where("name").gte("M")).limit(10)).isEqualTo(other).hasSameHashCodeAs(other); } @Test - public void testQueryWithFieldsAndSlice() { + void testQueryWithFieldsAndSlice() { + Query q = new Query(where("name").gte("M").lte("T").and("age").not().gt(22)); q.fields().exclude("address").include("name").slice("orders", 10); - Document expected = Document - .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}"); - Assert.assertEquals(expected, q.getQueryObject()); - Document expectedFields = Document.parse("{ \"address\" : 0 , \"name\" : 1 , \"orders\" : { \"$slice\" : 10}}"); - Assert.assertEquals(expectedFields, q.getFieldsObject()); + assertThat(q.getQueryObject()).isEqualTo(Document + .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}")); + + assertThat(q.getFieldsObject()) + .isEqualTo(Document.parse("{ \"address\" : 0 , \"name\" : 1 , \"orders\" : { \"$slice\" : 10}}")); } @Test // DATAMONGO-652 - public void testQueryWithFieldsElemMatchAndPositionalOperator() { + void testQueryWithFieldsElemMatchAndPositionalOperator() { Query query = query(where("name").gte("M").lte("T").and("age").not().gt(22)); query.fields().elemMatch("products", where("name").is("milk")).position("comments", 2); - Document expected = Document - .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}"); - assertThat(query.getQueryObject(), is(expected)); - Document expectedFields = Document - .parse("{ \"products\" : { \"$elemMatch\" : { \"name\" : \"milk\"}} , \"comments.$\" : 2}"); - assertThat(query.getFieldsObject(), is(expectedFields)); + assertThat(query.getQueryObject()).isEqualTo(Document + .parse("{ \"name\" : { \"$gte\" : \"M\" , \"$lte\" : \"T\"} , \"age\" : { \"$not\" : { \"$gt\" : 22}}}")); + assertThat(query.getFieldsObject()) + .isEqualTo(Document.parse("{ \"products\" : { \"$elemMatch\" : { \"name\" : \"milk\"}} , \"comments.$\" : 2}")); } @Test - public void testSimpleQueryWithChainedCriteria() { + void testSimpleQueryWithChainedCriteria() { + Query q = new Query(where("name").is("Thomas").and("age").lt(80)); - Document expected = Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse("{ \"name\" : \"Thomas\" , \"age\" : { \"$lt\" : 80}}")); } @Test - public void testComplexQueryWithMultipleChainedCriteria() { + void testComplexQueryWithMultipleChainedCriteria() { + Query q = new Query( where("name").regex("^T.*").and("age").gt(20).lt(80).and("city").in("Stockholm", "London", "New York")); - Document expected = Document - .parse("{ \"name\" : { \"$regex\" : \"^T.*\", \"$options\" : \"\" } , \"age\" : { \"$gt\" : 20 , \"$lt\" : 80} , " - + "\"city\" : { \"$in\" : [ \"Stockholm\" , \"London\" , \"New York\"]}}"); - - Assert.assertEquals(expected.toJson(), q.getQueryObject().toJson()); + assertThat(q.getQueryObject().toJson()).isEqualTo(Document.parse( + "{ \"name\" : { \"$regex\" : \"^T.*\", \"$options\" : \"\" } , \"age\" : { \"$gt\" : 20 , \"$lt\" : 80} , " + + "\"city\" : { \"$in\" : [ \"Stockholm\" , \"London\" , \"New York\"]}}") + .toJson()); } @Test - public void testAddCriteriaWithComplexQueryWithMultipleChainedCriteria() { + void testAddCriteriaWithComplexQueryWithMultipleChainedCriteria() { + Query q1 = new Query( where("name").regex("^T.*").and("age").gt(20).lt(80).and("city").in("Stockholm", "London", "New York")); Query q2 = new Query(where("name").regex("^T.*").and("age").gt(20).lt(80)) .addCriteria(where("city").in("Stockholm", "London", "New York")); - Assert.assertEquals(q1.getQueryObject().toString(), q2.getQueryObject().toString()); + + assertThat(q1.getQueryObject()).hasToString(q2.getQueryObject().toString()); + Query q3 = new Query(where("name").regex("^T.*")).addCriteria(where("age").gt(20).lt(80)) .addCriteria(where("city").in("Stockholm", "London", "New York")); - Assert.assertEquals(q1.getQueryObject().toString(), q3.getQueryObject().toString()); + assertThat(q1.getQueryObject()).hasToString(q3.getQueryObject().toString()); } @Test - public void testQueryWithElemMatch() { + void testQueryWithElemMatch() { + Query q = new Query(where("openingHours").elemMatch(where("dayOfWeek").is("Monday").and("open").lte("1800"))); - Document expected = Document.parse( - "{ \"openingHours\" : { \"$elemMatch\" : { \"dayOfWeek\" : \"Monday\" , \"open\" : { \"$lte\" : \"1800\"}}}}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse( + "{ \"openingHours\" : { \"$elemMatch\" : { \"dayOfWeek\" : \"Monday\" , \"open\" : { \"$lte\" : \"1800\"}}}}")); } @Test - public void testQueryWithIn() { + void testQueryWithIn() { + Query q = new Query(where("state").in("NY", "NJ", "PA")); - Document expected = Document.parse("{ \"state\" : { \"$in\" : [ \"NY\" , \"NJ\" , \"PA\"]}}"); - Assert.assertEquals(expected, q.getQueryObject()); + assertThat(q.getQueryObject()).isEqualTo(Document.parse("{ \"state\" : { \"$in\" : [ \"NY\" , \"NJ\" , \"PA\"]}}")); } @Test - public void testQueryWithRegex() { + void testQueryWithRegex() { + Query q = new Query(where("name").regex("b.*")); - Document expected = Document.parse("{ \"name\" : { \"$regex\" : \"b.*\", \"$options\" : \"\" }}"); - Assert.assertEquals(expected.toJson(), q.getQueryObject().toJson()); + assertThat(q.getQueryObject().toJson()) + .isEqualTo(Document.parse("{ \"name\" : { \"$regex\" : \"b.*\", \"$options\" : \"\" }}").toJson()); } @Test - public void testQueryWithRegexAndOption() { + void testQueryWithRegexAndOption() { Query q = new Query(where("name").regex("b.*", "i")); - Document expected = Document.parse("{ \"name\" : { \"$regex\" : \"b.*\" , \"$options\" : \"i\"}}"); - Assert.assertEquals(expected.toJson(), q.getQueryObject().toJson()); + assertThat(q.getQueryObject().toJson()) + .isEqualTo(Document.parse("{ \"name\" : { \"$regex\" : \"b.*\" , \"$options\" : \"i\"}}").toJson()); } @Test // DATAMONGO-538 - public void addsSortCorrectly() { + void addsSortCorrectly() { Query query = new Query().with(Sort.by(Direction.DESC, "foo")); - assertThat(query.getSortObject(), is(Document.parse("{ \"foo\" : -1}"))); + assertThat(query.getSortObject()).isEqualTo(Document.parse("{ \"foo\" : -1}")); } @Test - public void rejectsOrderWithIgnoreCase() { - - exception.expect(IllegalArgumentException.class); - exception.expectMessage("foo"); + void rejectsOrderWithIgnoreCase() { - new Query().with(Sort.by(new Sort.Order("foo").ignoreCase())); + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> new Query().with(Sort.by(Order.asc("foo").ignoreCase()))); } - @Test // DATAMONGO-709, DATAMONGO-1735 - @SuppressWarnings("unchecked") - public void shouldReturnClassHierarchyOfRestrictedTypes() { + @Test // DATAMONGO-709, DATAMONGO-1735, // DATAMONGO-2198 + void shouldReturnClassHierarchyOfRestrictedTypes() { Query query = new Query(where("name").is("foo")).restrict(SpecialDoc.class); - assertThat(query.toString(), is( - "Query: { \"name\" : \"foo\", \"_$RESTRICTED_TYPES\" : [ { \"$java\" : class org.springframework.data.mongodb.core.SpecialDoc } ] }, Fields: { }, Sort: { }")); - assertThat(query.getRestrictedTypes(), is(notNullValue())); - assertThat(query.getRestrictedTypes().size(), is(1)); - assertThat(query.getRestrictedTypes(), hasItems(Arrays.asList(SpecialDoc.class).toArray(new Class[0]))); + + assertThat(query.getRestrictedTypes()).containsExactly(SpecialDoc.class); } @Test // DATAMONGO-1421 - public void addCriteriaForSamePropertyMultipleTimesShouldThrowAndSafelySerializeErrorMessage() { + void addCriteriaForSamePropertyMultipleTimesShouldThrowAndSafelySerializeErrorMessage() { + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class).isThrownBy(() -> { + + Query query = new Query(); + query.addCriteria(where("value").is(EnumType.VAL_1)); + query.addCriteria(where("value").is(EnumType.VAL_2)); + }).withMessageContaining("second 'value' criteria") + .withMessageContaining("already contains '{ \"value\" : { \"$java\" : VAL_1 } }'"); + } + + @Test // DATAMONGO-1783 + void queryOfShouldCreateNewQueryWithEqualBehaviour() { - exception.expect(InvalidMongoDbApiUsageException.class); - exception.expectMessage("second 'value' criteria"); - exception.expectMessage("already contains '{ \"value\" : { \"$java\" : VAL_1 } }'"); + Query source = new Query(); + source.addCriteria(where("This you must ken").is(EnumType.VAL_1)); + + compareQueries(Query.of(source), source); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnCriteriaFromSource() { + + Query source = new Query(); + source.addCriteria(where("From one make ten").is("and two let be.")); + Query target = Query.of(source); + + assertThat(target.getQueryObject()).containsAllEntriesOf(new Document("From one make ten", "and two let be.")) + .isNotSameAs(source.getQueryObject()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldAppendCriteria() { + + Query source = new Query(); + source.addCriteria(where("Skip o'er the four").is("From five and six")); + Query target = Query.of(source); + + compareQueries(target, source); + target.addCriteria(where("the Witch's tricks").is("make seven and eight")); + + assertThat(target.getQueryObject()).isEqualTo( + new Document("Skip o'er the four", "From five and six").append("the Witch's tricks", "make seven and eight")); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnCollationFromSource() { + + Query source = new Query().collation(Collation.simple()); + Query target = Query.of(source); + + compareQueries(target, source); + source.collation(Collation.of("Tis finished straight")); + + assertThat(target.getCollation()).contains(Collation.simple()).isNotEqualTo(source.getCollation()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnSortFromSource() { + + Query source = new Query().with(Sort.by("And nine is one")); + Query target = Query.of(source); + + compareQueries(target, source); + source.with(Sort.by("And ten is none")); + + assertThat(target.getSortObject()).isEqualTo(new Document("And nine is one", 1)) + .isNotEqualTo(source.getSortObject()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnFieldsFromSource() { + + Query source = new Query(); + source.fields().include("That is the witch's one-time-one"); + Query target = Query.of(source); + + compareQueries(target, source); + source.fields().exclude("Goethe"); + + assertThat(target.getFieldsObject()).isEqualTo(new Document("That is the witch's one-time-one", 1)) + .isNotEqualTo(source.getFieldsObject()); + } + + @Test // DATAMONGO-1783, DATAMONGO-2572 + void clonedQueryShouldNotDependOnMetaFromSource() { + + Query source = new Query().maxTimeMsec(100); + Query target = Query.of(source); + + compareQueries(target, source); + source.allowSecondaryReads(); + + Meta meta = new Meta(); + meta.setMaxTimeMsec(100); + assertThat(target.getMeta()).isEqualTo(meta).isNotEqualTo(source.getMeta()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldNotDependOnRestrictedTypesFromSource() { + + Query source = new Query(); + source.restrict(EnumType.class); + Query target = Query.of(source); + + compareQueries(target, source); + source.restrict(Query.class); + + assertThat(target.getRestrictedTypes()).containsExactly(EnumType.class).isNotEqualTo(source.getRestrictedTypes()); + } + + @Test // DATAMONGO-1783 + void clonedQueryShouldApplyRestrictionsFromBasicQuery() { + + BasicQuery source = new BasicQuery("{ 'foo' : 'bar'}"); + Query target = Query.of(source); + + compareQueries(target, source); + + target.addCriteria(where("one").is("10")); + assertThat(target.getQueryObject()).isEqualTo(new Document("foo", "bar").append("one", "10")) + .isNotEqualTo(source.getQueryObject()); + } + + @Test // DATAMONGO-2478 + void queryOfShouldWorkOnProxiedObjects() { + + BasicQuery source = new BasicQuery("{ 'foo' : 'bar'}", "{ '_id' : -1, 'foo' : 1 }"); + source.withHint("the hint"); + source.limit(10); + source.setSortObject(new Document("_id", 1)); + + ProxyFactory proxyFactory = new ProxyFactory(source); + proxyFactory.setInterfaces(new Class[0]); + + Query target = Query.of((Query) proxyFactory.getProxy()); + + compareQueries(target, source); + } + + @Test // GH-4771 + void appliesSortOfUnpagedPageable() { Query query = new Query(); - query.addCriteria(where("value").is(EnumType.VAL_1)); - query.addCriteria(where("value").is(EnumType.VAL_2)); + query.with(Pageable.unpaged(Sort.by("sortMe"))); + + assertThat(query.isSorted()).isTrue(); + } + + private void compareQueries(Query actual, Query expected) { + + assertThat(actual.getCollation()).isEqualTo(expected.getCollation()); + assertThat(actual.getSortObject()).hasSameSizeAs(expected.getSortObject()) + .containsAllEntriesOf(expected.getSortObject()); + assertThat(actual.getFieldsObject()).hasSameSizeAs(expected.getFieldsObject()) + .containsAllEntriesOf(expected.getFieldsObject()); + assertThat(actual.getQueryObject()).hasSameSizeAs(expected.getQueryObject()) + .containsAllEntriesOf(expected.getQueryObject()); + assertThat(actual.getHint()).isEqualTo(expected.getHint()); + assertThat(actual.getLimit()).isEqualTo(expected.getLimit()); + assertThat(actual.getSkip()).isEqualTo(expected.getSkip()); + assertThat(actual.getMeta()).isEqualTo(expected.getMeta()); + assertThat(actual.getRestrictedTypes()).isEqualTo(expected.getRestrictedTypes()); } enum EnumType { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java index 65a183af89..1cf4d8b027 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/SortTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,11 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; @@ -35,20 +35,20 @@ public class SortTests { public void testWithSortAscending() { Query s = new Query().with(Sort.by(Direction.ASC, "name")); - assertEquals(Document.parse("{ \"name\" : 1}"), s.getSortObject()); + assertThat(s.getSortObject()).isEqualTo(Document.parse("{ \"name\" : 1}")); } @Test public void testWithSortDescending() { Query s = new Query().with(Sort.by(Direction.DESC, "name")); - assertEquals(Document.parse("{ \"name\" : -1}"), s.getSortObject()); + assertThat(s.getSortObject()).isEqualTo(Document.parse("{ \"name\" : -1}")); } @Test // DATADOC-177 public void preservesOrderKeysOnMultipleSorts() { Query sort = new Query().with(Sort.by(Direction.DESC, "foo").and(Sort.by(Direction.DESC, "bar"))); - assertThat(sort.getSortObject(), is(Document.parse("{ \"foo\" : -1 , \"bar\" : -1}"))); + assertThat(sort.getSortObject()).isEqualTo(Document.parse("{ \"foo\" : -1 , \"bar\" : -1}")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java index ad20da8499..b5da29f5e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextCriteriaUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,94 +15,115 @@ */ package org.springframework.data.mongodb.core.query; +import static org.assertj.core.api.Assertions.*; + import org.bson.Document; -import org.hamcrest.core.IsEqual; -import org.junit.Assert; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; -import org.junit.Test; import org.springframework.data.mongodb.core.DocumentTestUtils; /** * Unit tests for {@link TextCriteria}. * * @author Christoph Strobl + * @author Daniel Debray */ -public class TextCriteriaUnitTests { +class TextCriteriaUnitTests { @Test // DATAMONGO-850 - public void shouldNotHaveLanguageField() { + void shouldNotHaveLanguageField() { TextCriteria criteria = TextCriteria.forDefaultLanguage(); - assertThat(criteria.getCriteriaObject(), equalTo(searchObject("{ }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ }")); } @Test // DATAMONGO-850 - public void shouldNotHaveLanguageForNonDefaultLanguageField() { + void shouldNotHaveLanguageForNonDefaultLanguageField() { TextCriteria criteria = TextCriteria.forLanguage("spanish"); - assertThat(criteria.getCriteriaObject(), equalTo(searchObject("{ \"$language\" : \"spanish\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$language\" : \"spanish\" }")); } @Test // DATAMONGO-850 - public void shouldCreateSearchFieldForSingleTermCorrectly() { + void shouldCreateSearchFieldForSingleTermCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matching("cake"); - assertThat(criteria.getCriteriaObject(), equalTo(searchObject("{ \"$search\" : \"cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"cake\" }")); } @Test // DATAMONGO-850 - public void shouldCreateSearchFieldCorrectlyForMultipleTermsCorrectly() { + void shouldCreateSearchFieldCorrectlyForMultipleTermsCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("bake", "coffee", "cake"); - assertThat(criteria.getCriteriaObject(), equalTo(searchObject("{ \"$search\" : \"bake coffee cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"bake coffee cake\" }")); } @Test // DATAMONGO-850 - public void shouldCreateSearchFieldForPhraseCorrectly() { + void shouldCreateSearchFieldForPhraseCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingPhrase("coffee cake"); - Assert.assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text"), - IsEqual. equalTo(new Document("$search", "\"coffee cake\""))); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "\"coffee cake\"")); } @Test // DATAMONGO-850 - public void shouldCreateNotFieldCorrectly() { + void shouldCreateNotFieldCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().notMatching("cake"); - assertThat(criteria.getCriteriaObject(), equalTo(searchObject("{ \"$search\" : \"-cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"-cake\" }")); } @Test // DATAMONGO-850 - public void shouldCreateSearchFieldCorrectlyForNotMultipleTermsCorrectly() { + void shouldCreateSearchFieldCorrectlyForNotMultipleTermsCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().notMatchingAny("bake", "coffee", "cake"); - assertThat(criteria.getCriteriaObject(), equalTo(searchObject("{ \"$search\" : \"-bake -coffee -cake\" }"))); + + assertThat(criteria.getCriteriaObject()).isEqualTo(searchObject("{ \"$search\" : \"-bake -coffee -cake\" }")); } @Test // DATAMONGO-850 - public void shouldCreateSearchFieldForNotPhraseCorrectly() { + void shouldCreateSearchFieldForNotPhraseCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().notMatchingPhrase("coffee cake"); - Assert.assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text"), - IsEqual. equalTo(new Document("$search", "-\"coffee cake\""))); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "-\"coffee cake\"")); } @Test // DATAMONGO-1455 - public void caseSensitiveOperatorShouldBeSetCorrectly() { + void caseSensitiveOperatorShouldBeSetCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matching("coffee").caseSensitive(true); - assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text"), - equalTo(new Document("$search", "coffee").append("$caseSensitive", true))); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "coffee").append("$caseSensitive", true)); } @Test // DATAMONGO-1456 - public void diacriticSensitiveOperatorShouldBeSetCorrectly() { + void diacriticSensitiveOperatorShouldBeSetCorrectly() { TextCriteria criteria = TextCriteria.forDefaultLanguage().matching("coffee").diacriticSensitive(true); - assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text"), - equalTo(new Document("$search", "coffee").append("$diacriticSensitive", true))); + + assertThat(DocumentTestUtils.getAsDocument(criteria.getCriteriaObject(), "$text")) + .isEqualTo(new Document("$search", "coffee").append("$diacriticSensitive", true)); + } + + @Test // DATAMONGO-2504 + void twoIdenticalCriteriaShouldBeEqual() { + + TextCriteria criteriaOne = TextCriteria.forDefaultLanguage().matching("coffee"); + TextCriteria criteriaTwo = TextCriteria.forDefaultLanguage().matching("coffee"); + + assertThat(criteriaOne).isEqualTo(criteriaTwo); + assertThat(criteriaOne).hasSameHashCodeAs(criteriaTwo); + assertThat(criteriaOne).isNotEqualTo(criteriaTwo.diacriticSensitive(false)); + assertThat(criteriaOne.hashCode()).isNotEqualTo(criteriaTwo.diacriticSensitive(false).hashCode()); } private Document searchObject(String json) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java index bbdf7a3dfc..6ea0f5aa9c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,43 +15,34 @@ */ package org.springframework.data.mongodb.core.query; -import static org.hamcrest.collection.IsCollectionWithSize.*; -import static org.hamcrest.collection.IsEmptyCollection.*; -import static org.hamcrest.collection.IsIterableContainingInOrder.*; -import static org.hamcrest.core.AnyOf.*; -import static org.hamcrest.core.IsCollectionContaining.*; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import java.util.List; -import lombok.ToString; import org.bson.Document; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.data.annotation.Id; import org.springframework.data.domain.PageRequest; -import org.springframework.data.mongodb.config.AbstractIntegrationTests; -import org.springframework.data.mongodb.core.index.IndexOperations; -import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.Language; import org.springframework.data.mongodb.core.mapping.TextScore; import org.springframework.data.mongodb.core.query.TextQueryTests.FullTextDoc.FullTextDocBuilder; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.lang.Nullable; /** * @author Christoph Strobl * @author Mark Paluch */ -public class TextQueryTests extends AbstractIntegrationTests { - - public static @ClassRule MongoVersionRule version = MongoVersionRule.atLeast(new Version(2, 6)); +@ExtendWith(MongoTemplateExtension.class) +public class TextQueryTests { private static final FullTextDoc BAKE = new FullTextDocBuilder().headline("bake").build(); private static final FullTextDoc COFFEE = new FullTextDocBuilder().subHeadline("coffee").build(); @@ -62,11 +53,14 @@ public class TextQueryTests extends AbstractIntegrationTests { private static final FullTextDoc FRENCH_MILK = new FullTextDocBuilder().headline("leche").lanugage("french").build(); private static final FullTextDoc MILK_AND_SUGAR = new FullTextDocBuilder().headline("milk and sugar").build(); - private @Autowired MongoOperations template; + @Template(initialEntitySet = FullTextDoc.class) // + static MongoTestTemplate template; - @Before + @BeforeEach public void setUp() { + template.flush(); + IndexOperations indexOps = template.indexOps(FullTextDoc.class); indexOps.dropAllIndexes(); @@ -107,8 +101,8 @@ public void shouldOnlyFindDocumentsMatchingAnyWordOfGivenQuery() { initWithDefaultDocuments(); List result = template.find(new TextQuery("bake coffee cake"), FullTextDoc.class); - assertThat(result, hasSize(3)); - assertThat(result, hasItems(BAKE, COFFEE, CAKE)); + assertThat(result).hasSize(3); + assertThat(result).contains(BAKE, COFFEE, CAKE); } @Test // DATAMONGO-850 @@ -117,7 +111,7 @@ public void shouldNotFindDocumentsWhenQueryDoesNotMatchAnyDocumentInIndex() { initWithDefaultDocuments(); List result = template.find(new TextQuery("tasmanian devil"), FullTextDoc.class); - assertThat(result, hasSize(0)); + assertThat(result).hasSize(0); } @Test // DATAMONGO-850 @@ -128,11 +122,11 @@ public void shouldApplySortByScoreCorrectly() { template.insert(coffee2); List result = template.find(new TextQuery("bake coffee cake").sortByScore(), FullTextDoc.class); - assertThat(result, hasSize(4)); - assertThat(result.get(0), anyOf(equalTo(BAKE), equalTo(coffee2))); - assertThat(result.get(1), anyOf(equalTo(BAKE), equalTo(coffee2))); - assertThat(result.get(2), equalTo(COFFEE)); - assertThat(result.get(3), equalTo(CAKE)); + assertThat(result).hasSize(4); + assertThat(result.get(0)).isIn(BAKE, coffee2); + assertThat(result.get(1)).isIn(BAKE, coffee2); + assertThat(result.get(2)).isEqualTo(COFFEE); + assertThat(result.get(3)).isEqualTo(CAKE); } @Test // DATAMONGO-850 @@ -140,8 +134,8 @@ public void shouldFindTextInAnyLanguage() { initWithDefaultDocuments(); List result = template.find(new TextQuery("leche"), FullTextDoc.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(SPANISH_MILK, FRENCH_MILK)); + assertThat(result).hasSize(2); + assertThat(result).contains(SPANISH_MILK, FRENCH_MILK); } @Test // DATAMONGO-850 @@ -150,8 +144,8 @@ public void shouldOnlyFindTextInSpecificLanguage() { initWithDefaultDocuments(); List result = template.find(new TextQuery("leche").addCriteria(where("language").is("spanish")), FullTextDoc.class); - assertThat(result, hasSize(1)); - assertThat(result.get(0), equalTo(SPANISH_MILK)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(SPANISH_MILK); } @Test // DATAMONGO-850 @@ -160,8 +154,8 @@ public void shouldNotFindDocumentsWithNegatedTerms() { initWithDefaultDocuments(); List result = template.find(new TextQuery("bake coffee -cake"), FullTextDoc.class); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(BAKE, COFFEE)); + assertThat(result).hasSize(2); + assertThat(result).contains(BAKE, COFFEE); } @Test // DATAMONGO-976 @@ -172,9 +166,9 @@ public void shouldInlcudeScoreCorreclty() { List result = template.find(new TextQuery("bake coffee -cake").includeScore().sortByScore(), FullTextDoc.class); - assertThat(result, hasSize(2)); + assertThat(result).hasSize(2); for (FullTextDoc scoredDoc : result) { - assertTrue(scoredDoc.score > 0F); + assertThat(scoredDoc.score > 0F).isTrue(); } } @@ -186,8 +180,8 @@ public void shouldApplyPhraseCorrectly() { TextQuery query = TextQuery.queryText(TextCriteria.forDefaultLanguage().matchingPhrase("milk and sugar")); List result = template.find(query, FullTextDoc.class); - assertThat(result, hasSize(1)); - assertThat(result, contains(MILK_AND_SUGAR)); + assertThat(result).hasSize(1); + assertThat(result).containsExactly(MILK_AND_SUGAR); } @Test // DATAMONGO-850 @@ -198,7 +192,7 @@ public void shouldReturnEmptyListWhenNoDocumentsMatchGivenPhrase() { TextQuery query = TextQuery.queryText(TextCriteria.forDefaultLanguage().matchingPhrase("milk no sugar")); List result = template.find(query, FullTextDoc.class); - assertThat(result, empty()); + assertThat(result).isEmpty(); } @Test // DATAMONGO-850 @@ -207,16 +201,16 @@ public void shouldApplyPaginationCorrectly() { initWithDefaultDocuments(); // page 1 - List result = template - .find(new TextQuery("bake coffee cake").sortByScore().with(PageRequest.of(0, 2)), FullTextDoc.class); - assertThat(result, hasSize(2)); - assertThat(result, contains(BAKE, COFFEE)); + List result = template.find(new TextQuery("bake coffee cake").sortByScore().with(PageRequest.of(0, 2)), + FullTextDoc.class); + assertThat(result).hasSize(2); + assertThat(result).containsExactly(BAKE, COFFEE); // page 2 result = template.find(new TextQuery("bake coffee cake").sortByScore().with(PageRequest.of(1, 2)), FullTextDoc.class); - assertThat(result, hasSize(1)); - assertThat(result, contains(CAKE)); + assertThat(result).hasSize(1); + assertThat(result).containsExactly(CAKE); } private void initWithDefaultDocuments() { @@ -230,7 +224,6 @@ private void initWithDefaultDocuments() { } @org.springframework.data.mongodb.core.mapping.Document(collection = "fullTextDoc") - @ToString static class FullTextDoc { @Id String id; @@ -256,7 +249,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } @@ -305,6 +298,10 @@ public boolean equals(Object obj) { return true; } + public String toString() { + return "TextQueryTests.FullTextDoc(id=" + this.id + ", language=" + this.language + ", headline=" + this.headline + ", subheadline=" + this.subheadline + ", body=" + this.body + ", score=" + this.score + ")"; + } + static class FullTextDocBuilder { private FullTextDoc instance; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java index fb812ad209..155fcd3f99 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/TextQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,9 @@ */ package org.springframework.data.mongodb.core.query; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.query.IsTextQuery.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; @@ -35,17 +34,21 @@ public class TextQueryUnitTests { @Test // DATAMONGO-850 public void shouldCreateQueryObjectCorrectly() { - assertThat(new TextQuery(QUERY), isTextQuery().searchingFor(QUERY)); + assertThat(new TextQuery(QUERY).getQueryObject()).containsEntry("$text.$search", QUERY); } @Test // DATAMONGO-850 public void shouldIncludeLanguageInQueryObjectWhenNotNull() { - assertThat(new TextQuery(QUERY, LANGUAGE_SPANISH), isTextQuery().searchingFor(QUERY).inLanguage(LANGUAGE_SPANISH)); + assertThat(new TextQuery(QUERY, LANGUAGE_SPANISH).getQueryObject()).containsEntry("$text.$search", QUERY) + .containsEntry("$text.$language", LANGUAGE_SPANISH); } @Test // DATAMONGO-850 public void shouldIncludeScoreFieldCorrectly() { - assertThat(new TextQuery(QUERY).includeScore(), isTextQuery().searchingFor(QUERY).returningScore()); + + TextQuery textQuery = new TextQuery(QUERY).includeScore(); + assertThat(textQuery.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(textQuery.getFieldsObject()).containsKey("score"); } @Test // DATAMONGO-850 @@ -54,12 +57,18 @@ public void shouldNotOverrideExistingProjections() { TextQuery query = new TextQuery(TextCriteria.forDefaultLanguage().matching(QUERY)).includeScore(); query.fields().include("foo"); - assertThat(query, isTextQuery().searchingFor(QUERY).returningScore().includingField("foo")); + assertThat(query.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(query.getFieldsObject()).containsKeys("score", "foo"); } @Test // DATAMONGO-850 public void shouldIncludeSortingByScoreCorrectly() { - assertThat(new TextQuery(QUERY).sortByScore(), isTextQuery().searchingFor(QUERY).returningScore().sortingByScore()); + + TextQuery textQuery = new TextQuery(QUERY).sortByScore(); + + assertThat(textQuery.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(textQuery.getFieldsObject()).containsKey("score"); + assertThat(textQuery.getSortObject()).containsKey("score"); } @Test // DATAMONGO-850 @@ -69,15 +78,43 @@ public void shouldNotOverrideExistingSort() { query.with(Sort.by(Direction.DESC, "foo")); query.sortByScore(); - assertThat(query, - isTextQuery().searchingFor(QUERY).returningScore().sortingByScore().sortingBy("foo", Direction.DESC)); + assertThat(query.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(query.getFieldsObject()).containsKeys("score"); + assertThat(query.getSortObject()).containsEntry("foo", -1).containsKey("score"); } @Test // DATAMONGO-850 public void shouldUseCustomFieldnameForScoring() { + TextQuery query = new TextQuery(QUERY).includeScore("customFieldForScore").sortByScore(); - assertThat(query, isTextQuery().searchingFor(QUERY).returningScoreAs("customFieldForScore").sortingByScore()); + assertThat(query.getQueryObject()).containsEntry("$text.$search", QUERY); + assertThat(query.getFieldsObject()).containsKeys("customFieldForScore"); + assertThat(query.getSortObject()).containsKey("customFieldForScore"); + } + + @Test // GH-3896 + public void retainsSortOrderWhenUsingScore() { + + TextQuery query = new TextQuery(QUERY); + query.with(Sort.by(Direction.DESC, "one")); + query.sortByScore(); + query.with(Sort.by(Direction.DESC, "two")); + + assertThat(query.getSortObject().keySet().stream()).containsExactly("one", "score", "two"); + + query = new TextQuery(QUERY); + query.with(Sort.by(Direction.DESC, "one")); + query.sortByScore(); + + assertThat(query.getSortObject().keySet().stream()).containsExactly("one", "score"); + + query = new TextQuery(QUERY); + query.sortByScore(); + query.with(Sort.by(Direction.DESC, "one")); + query.with(Sort.by(Direction.DESC, "two")); + + assertThat(query.getSortObject().keySet().stream()).containsExactly("score", "one", "two"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java index 02ae13f55e..91a0e43ee9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UntypedExampleMatcherUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,10 @@ package org.springframework.data.mongodb.core.query; import static org.assertj.core.api.Assertions.*; -import static org.hamcrest.Matchers.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + import org.springframework.data.domain.ExampleMatcher; import org.springframework.data.domain.ExampleMatcher.NullHandler; import org.springframework.data.domain.ExampleMatcher.StringMatcher; @@ -31,8 +31,8 @@ public class UntypedExampleMatcherUnitTests { ExampleMatcher matcher; - @Before - public void setUp() throws Exception { + @BeforeEach + public void setUp() { matcher = UntypedExampleMatcher.matching(); } @@ -56,9 +56,10 @@ public void nullHandlerShouldReturnIgnoreByDefault() { assertThat(matcher.getNullHandler()).isEqualTo(NullHandler.IGNORE); } - @Test(expected = UnsupportedOperationException.class) // DATAMONGO-1768 - public void ignoredPathsIsNotModifiable() throws Exception { - matcher.getIgnoredPaths().add("¯\\_(ツ)_/¯"); + @Test // DATAMONGO-1768 + public void ignoredPathsIsNotModifiable() { + assertThatExceptionOfType(UnsupportedOperationException.class) + .isThrownBy(() -> matcher.getIgnoredPaths().add("¯\\_(ツ)_/¯")); } @Test // DATAMONGO-1768 @@ -125,7 +126,7 @@ public void withCreatesNewInstance() { matcher = UntypedExampleMatcher.matching().withIgnorePaths("foo", "bar", "foo"); ExampleMatcher configuredExampleSpec = matcher.withIgnoreCase(); - assertThat(matcher).isNotEqualTo(sameInstance(configuredExampleSpec)); + assertThat(matcher).isNotSameAs(configuredExampleSpec); assertThat(matcher.getIgnoredPaths()).hasSize(2); assertThat(matcher.isIgnoreCaseEnabled()).isFalse(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java index 1340e83247..f5b8684687 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/UpdateTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,7 @@ import java.util.Map; import org.bson.Document; -import org.joda.time.DateTime; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.query.Update.Position; @@ -91,30 +90,6 @@ public void testPush() { assertThat(u.getUpdateObject()).isEqualTo(Document.parse("{ \"$push\" : { \"authors\" : { \"name\" : \"Sven\"}}}")); } - @Test - public void testPushAll() { - - Map m1 = Collections.singletonMap("name", "Sven"); - Map m2 = Collections.singletonMap("name", "Maria"); - - Update u = new Update().pushAll("authors", new Object[] { m1, m2 }); - assertThat(u.getUpdateObject()).isEqualTo( - Document.parse("{ \"$pushAll\" : { \"authors\" : [ { \"name\" : \"Sven\"} , { \"name\" : \"Maria\"}]}}")); - } - - @Test // DATAMONGO-354 - public void testMultiplePushAllShouldBePossibleWhenUsingDifferentFields() { - - Map m1 = Collections.singletonMap("name", "Sven"); - Map m2 = Collections.singletonMap("name", "Maria"); - - Update u = new Update().pushAll("authors", new Object[] { m1, m2 }); - u.pushAll("books", new Object[] { "Spring in Action" }); - - assertThat(u.getUpdateObject()).isEqualTo(Document.parse( - "{ \"$pushAll\" : { \"authors\" : [ { \"name\" : \"Sven\"} , { \"name\" : \"Maria\"}] , \"books\" : [ \"Spring in Action\"]}}")); - } - @Test public void testAddToSet() { @@ -238,24 +213,25 @@ public void testUpdateAffectsFieldShouldReturnFalseWhenUpdateWithoutKeyCreatedFr assertThat(clone.modifies("oof")).isFalse(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-853 + @Test // DATAMONGO-853 public void testAddingMultiFieldOperationThrowsExceptionWhenCalledWithNullKey() { - new Update().addMultiFieldOperation("$op", null, "exprected to throw IllegalArgumentException."); + assertThatIllegalArgumentException().isThrownBy( + () -> new Update().addMultiFieldOperation("$op", null, "exprected to throw IllegalArgumentException.")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-853 + @Test // DATAMONGO-853 public void testAddingSingleFieldOperationThrowsExceptionWhenCalledWithNullKey() { - new Update().addFieldOperation("$op", null, "exprected to throw IllegalArgumentException."); + assertThatIllegalArgumentException().isThrownBy( + () -> new Update().addMultiFieldOperation("$op", null, "exprected to throw IllegalArgumentException.")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-853 + @Test // DATAMONGO-853 public void testCreatingUpdateWithNullKeyThrowsException() { - Update.update(null, "value"); + assertThatIllegalArgumentException().isThrownBy(() -> Update.update(null, "value")); } @Test // DATAMONGO-953 public void testEquality() { - Update actualUpdate = new Update() // .inc("size", 1) // .set("nl", null) // @@ -346,13 +322,13 @@ public void getUpdateObjectShouldReturnCurrentDateCorrectlyWhenUsingMixedDateAnd @Test // DATAMONGO-1002 public void toStringWorksForUpdateWithComplexObject() { - Update update = new Update().addToSet("key", new DateTime()); + Update update = new Update().addToSet("key", new Date()); assertThat(update.toString()).isNotNull(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1097 + @Test // DATAMONGO-1097 public void multiplyShouldThrowExceptionForNullMultiplier() { - new Update().multiply("key", null); + assertThatIllegalArgumentException().isThrownBy(() -> new Update().multiply("key", null)); } @Test // DATAMONGO-1097 @@ -390,11 +366,6 @@ public void getUpdateObjectShouldReturnCorrectRepresentationForBitwiseXor() { .isEqualTo(new Document().append("$bit", new Document("key", new Document("xor", 10L)))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-943 - public void pushShouldThrowExceptionWhenGivenNegativePosition() { - new Update().push("foo").atPosition(-1).each("booh"); - } - @Test // DATAMONGO-1346 public void registersMultiplePullAllClauses() { @@ -410,14 +381,14 @@ public void registersMultiplePullAllClauses() { assertThat(pullAll.get("field2")).isNotNull(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1404 + @Test // DATAMONGO-1404 public void maxShouldThrowExceptionForNullMultiplier() { - new Update().max("key", null); + assertThatIllegalArgumentException().isThrownBy(() -> new Update().max("key", null)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1404 + @Test // DATAMONGO-1404 public void minShouldThrowExceptionForNullMultiplier() { - new Update().min("key", null); + assertThatIllegalArgumentException().isThrownBy(() -> new Update().min("key", null)); } @Test // DATAMONGO-1404 @@ -472,18 +443,17 @@ public void getUpdateObjectShouldReturnCorrectDateRepresentationForMin() { assertThat(update.getUpdateObject()).isEqualTo(new Document("$min", new Document("key", date))); } - @Test // DATAMONGO-1777 + @Test // DATAMONGO-1777, DATAMONGO-2199 public void toStringShouldPrettyPrintModifiers() { assertThat(new Update().push("key").atPosition(Position.FIRST).value("Arya").toString()).isEqualTo( - "{ \"$push\" : { \"key\" : { \"$java\" : { \"$position\" : { \"$java\" : { \"$position\" : 0} }, \"$each\" : { \"$java\" : { \"$each\" : [ \"Arya\"]} } } } } }"); + "{ \"$push\" : { \"key\" : { \"$java\" : { \"$position\" : { \"$java\" : { \"$position\" : 0} }, \"$each\" : { \"$java\" : { \"$each\" : [ \"Arya\" ] } } } } } }"); } - @Test // DATAMONGO-1777 + @Test // DATAMONGO-1777, DATAMONGO-2198 public void toStringConsidersIsolated() { - assertThat(new Update().set("key", "value").isolated().toString()) - .isEqualTo("{ \"$set\" : { \"key\" : \"value\" }, \"$isolated\" : 1 }"); + assertThat(new Update().set("key", "value").isolated().toString()).contains("\"$isolated\""); } @Test // DATAMONGO-1778 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java index 2912302a59..0c411dcb4f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +18,20 @@ import static org.springframework.data.domain.Range.from; import static org.springframework.data.domain.Range.Bound.*; import static org.springframework.data.mongodb.core.schema.JsonSchemaObject.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaObject.array; import static org.springframework.data.mongodb.core.schema.JsonSchemaObject.of; import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Range; import org.springframework.data.domain.Range.*; @@ -33,22 +40,65 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Michał Kurcius */ -public class JsonSchemaObjectUnitTests { +class JsonSchemaObjectUnitTests { + + // ----------------- + // type from class + // ----------------- + + @Test // DATAMONGO-1849 + void primitiveType() { + + assertThat(JsonSchemaObject.of(boolean.class).getTypes()).containsExactly(Type.booleanType()); + assertThat(JsonSchemaObject.of(int.class).getTypes()).containsExactly(Type.intType()); + assertThat(JsonSchemaObject.of(long.class).getTypes()).containsExactly(Type.longType()); + assertThat(JsonSchemaObject.of(float.class).getTypes()).containsExactly(Type.doubleType()); + assertThat(JsonSchemaObject.of(double.class).getTypes()).containsExactly(Type.doubleType()); + assertThat(JsonSchemaObject.of(short.class).getTypes()).containsExactly(Type.numberType()); + } + + @Test // DATAMONGO-1849 + void objectType() { + + assertThat(JsonSchemaObject.of(Object.class).getTypes()).containsExactly(Type.objectType()); + assertThat(JsonSchemaObject.of(Map.class).getTypes()).containsExactly(Type.objectType()); + assertThat(JsonSchemaObject.of(Document.class).getTypes()).containsExactly(Type.objectType()); + } + + @Test // DATAMONGO-1849 + void binaryData() { + assertThat(JsonSchemaObject.of(byte[].class).getTypes()).containsExactly(Type.binaryType()); + } + + @Test // DATAMONGO-1849 + void collectionType() { + + assertThat(JsonSchemaObject.of(Object[].class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(Collection.class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(List.class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(Set.class).getTypes()).containsExactly(Type.arrayType()); + } + + @Test // DATAMONGO-1849 + void dateType() { + assertThat(JsonSchemaObject.of(Date.class).getTypes()).containsExactly(Type.dateType()); + } // ----------------- // type : 'object' // ----------------- @Test // DATAMONGO-1835 - public void objectObjectShouldRenderTypeCorrectly() { + void objectObjectShouldRenderTypeCorrectly() { assertThat(object().generatedDescription().toDocument()) .isEqualTo(new Document("type", "object").append("description", "Must be an object.")); } @Test // DATAMONGO-1835 - public void objectObjectShouldRenderNrPropertiesCorrectly() { + void objectObjectShouldRenderNrPropertiesCorrectly() { assertThat(object().propertiesCount(from(inclusive(10)).to(inclusive(20))).generatedDescription().toDocument()) .isEqualTo(new Document("type", "object").append("description", "Must be an object with [10-20] properties.") @@ -56,16 +106,16 @@ public void objectObjectShouldRenderNrPropertiesCorrectly() { } @Test // DATAMONGO-1835 - public void objectObjectShouldRenderRequiredPropertiesCorrectly() { + void objectObjectShouldRenderRequiredPropertiesCorrectly() { - assertThat(object().required("spring", "data", "mongodb").generatedDescription().toDocument()).isEqualTo( - new Document("type", "object") - .append("description", "Must be an object where spring, data, mongodb are mandatory.").append("required", - Arrays.asList("spring", "data", "mongodb"))); + assertThat(object().required("spring", "data", "mongodb").generatedDescription().toDocument()) + .isEqualTo(new Document("type", "object") + .append("description", "Must be an object where spring, data, mongodb are mandatory.") + .append("required", Arrays.asList("spring", "data", "mongodb"))); } @Test // DATAMONGO-1835 - public void objectObjectShouldRenderAdditionalPropertiesCorrectlyWhenBoolean() { + void objectObjectShouldRenderAdditionalPropertiesCorrectlyWhenBoolean() { assertThat(object().additionalProperties(true).generatedDescription().toDocument()).isEqualTo( new Document("type", "object").append("description", "Must be an object allowing additional properties.") @@ -77,7 +127,7 @@ public void objectObjectShouldRenderAdditionalPropertiesCorrectlyWhenBoolean() { } @Test // DATAMONGO-1835 - public void objectObjectShouldRenderPropertiesCorrectly() { + void objectObjectShouldRenderPropertiesCorrectly() { Document expected = new Document("type", "object") .append("description", "Must be an object defining restrictions for name, active.").append("properties", @@ -92,7 +142,7 @@ public void objectObjectShouldRenderPropertiesCorrectly() { } @Test // DATAMONGO-1835 - public void objectObjectShouldRenderNestedObjectPropertiesCorrectly() { + void objectObjectShouldRenderNestedObjectPropertiesCorrectly() { Document expected = new Document("type", "object") .append("description", "Must be an object defining restrictions for address.") @@ -101,7 +151,6 @@ public void objectObjectShouldRenderNestedObjectPropertiesCorrectly() { .append("properties", new Document("city", new Document("type", "string") .append("description", "Must be a string with length [3-unbounded.").append("minLength", 3))))); - assertThat(object() .properties(JsonSchemaProperty.object("address") .properties(JsonSchemaProperty.string("city").minLength(3).generatedDescription()).generatedDescription()) @@ -109,7 +158,7 @@ public void objectObjectShouldRenderNestedObjectPropertiesCorrectly() { } @Test // DATAMONGO-1835 - public void objectObjectShouldRenderPatternPropertiesCorrectly() { + void objectObjectShouldRenderPatternPropertiesCorrectly() { Document expected = new Document("type", "object") .append("description", "Must be an object defining restrictions for patterns na.*.") @@ -120,26 +169,37 @@ public void objectObjectShouldRenderPatternPropertiesCorrectly() { .generatedDescription().toDocument()).isEqualTo(expected); } + @Test // DATAMONGO-1849 + void objectShouldIncludeRequiredNestedCorrectly() { + + assertThat(object() // + .properties( // + JsonSchemaProperty.required(JsonSchemaProperty.string("lastname")) // + ).toDocument()) + .isEqualTo(new Document("type", "object").append("required", Collections.singletonList("lastname")) + .append("properties", new Document("lastname", new Document("type", "string")))); + } + // ----------------- // type : 'string' // ----------------- @Test // DATAMONGO-1835 - public void stringObjectShouldRenderTypeCorrectly() { + void stringObjectShouldRenderTypeCorrectly() { assertThat(string().generatedDescription().toDocument()) .isEqualTo(new Document("type", "string").append("description", "Must be a string.")); } @Test // DATAMONGO-1835 - public void stringObjectShouldRenderDescriptionCorrectly() { + void stringObjectShouldRenderDescriptionCorrectly() { assertThat(string().description("error msg").toDocument()) .isEqualTo(new Document("type", "string").append("description", "error msg")); } @Test // DATAMONGO-1835 - public void stringObjectShouldRenderRangeCorrectly() { + void stringObjectShouldRenderRangeCorrectly() { assertThat(string().length(from(inclusive(10)).to(inclusive(20))).generatedDescription().toDocument()) .isEqualTo(new Document("type", "string").append("description", "Must be a string with length [10-20].") @@ -147,7 +207,7 @@ public void stringObjectShouldRenderRangeCorrectly() { } @Test // DATAMONGO-1835 - public void stringObjectShouldRenderPatternCorrectly() { + void stringObjectShouldRenderPatternCorrectly() { assertThat(string().matching("^spring$").generatedDescription().toDocument()) .isEqualTo(new Document("type", "string").append("description", "Must be a string matching ^spring$.") @@ -159,7 +219,7 @@ public void stringObjectShouldRenderPatternCorrectly() { // ----------------- @Test // DATAMONGO-1835 - public void numberObjectShouldRenderMultipleOfCorrectly() { + void numberObjectShouldRenderMultipleOfCorrectly() { assertThat(number().multipleOf(3.141592F).generatedDescription().toDocument()) .isEqualTo(new Document("type", "number").append("description", "Must be a numeric value multiple of 3.141592.") @@ -167,7 +227,7 @@ public void numberObjectShouldRenderMultipleOfCorrectly() { } @Test // DATAMONGO-1835 - public void numberObjectShouldRenderMaximumCorrectly() { + void numberObjectShouldRenderMaximumCorrectly() { assertThat( number().within(Range.of(Bound.unbounded(), Bound.inclusive(3.141592F))).generatedDescription().toDocument()) @@ -183,7 +243,7 @@ public void numberObjectShouldRenderMaximumCorrectly() { } @Test // DATAMONGO-1835 - public void numberObjectShouldRenderMinimumCorrectly() { + void numberObjectShouldRenderMinimumCorrectly() { assertThat( number().within(Range.of(Bound.inclusive(3.141592F), Bound.unbounded())).generatedDescription().toDocument()) @@ -203,35 +263,42 @@ public void numberObjectShouldRenderMinimumCorrectly() { // ----------------- @Test // DATAMONGO-1835 - public void arrayObjectShouldRenderItemsCorrectly() { + void arrayObjectShouldRenderItemsCorrectly() { assertThat(array().items(Arrays.asList(string(), bool())).toDocument()).isEqualTo(new Document("type", "array") .append("items", Arrays.asList(new Document("type", "string"), new Document("type", "boolean")))); } + @Test // DATAMONGO-2613 + void arrayObjectShouldRenderItemsCorrectlyAsObjectIfContainsOnlyOneElement() { + + assertThat(array().items(Collections.singletonList(string())).toDocument()) + .isEqualTo(new Document("type", "array").append("items", new Document("type", "string"))); + } + @Test // DATAMONGO-1835 - public void arrayObjectShouldRenderMaxItemsCorrectly() { + void arrayObjectShouldRenderMaxItemsCorrectly() { assertThat(array().maxItems(5).generatedDescription().toDocument()).isEqualTo(new Document("type", "array") .append("description", "Must be an array having size unbounded-5].").append("maxItems", 5)); } @Test // DATAMONGO-1835 - public void arrayObjectShouldRenderMinItemsCorrectly() { + void arrayObjectShouldRenderMinItemsCorrectly() { assertThat(array().minItems(5).generatedDescription().toDocument()).isEqualTo(new Document("type", "array") .append("description", "Must be an array having size [5-unbounded.").append("minItems", 5)); } @Test // DATAMONGO-1835 - public void arrayObjectShouldRenderUniqueItemsCorrectly() { + void arrayObjectShouldRenderUniqueItemsCorrectly() { assertThat(array().uniqueItems(true).generatedDescription().toDocument()).isEqualTo(new Document("type", "array") .append("description", "Must be an array of unique values.").append("uniqueItems", true)); } @Test // DATAMONGO-1835 - public void arrayObjectShouldRenderAdditionalItemsItemsCorrectly() { + void arrayObjectShouldRenderAdditionalItemsItemsCorrectly() { assertThat(array().additionalItems(true).generatedDescription().toDocument()) .isEqualTo(new Document("type", "array").append("description", "Must be an array with additional items.") @@ -246,10 +313,10 @@ public void arrayObjectShouldRenderAdditionalItemsItemsCorrectly() { // ----------------- @Test // DATAMONGO-1835 - public void booleanShouldRenderCorrectly() { + void booleanShouldRenderCorrectly() { assertThat(bool().generatedDescription().toDocument()) - .isEqualTo(new Document("type", "boolean").append("description", "Must be a boolean.")); + .isEqualTo(new Document("type", "boolean").append("description", "Must be a boolean")); } // ----------------- @@ -257,10 +324,10 @@ public void booleanShouldRenderCorrectly() { // ----------------- @Test // DATAMONGO-1835 - public void nullShouldRenderCorrectly() { + void nullShouldRenderCorrectly() { assertThat(nil().generatedDescription().toDocument()) - .isEqualTo(new Document("type", "null").append("description", "Must be null.")); + .isEqualTo(new Document("type", "null").append("description", "Must be null")); } // ----------------- @@ -268,10 +335,10 @@ public void nullShouldRenderCorrectly() { // ----------------- @Test // DATAMONGO-1877 - public void dateShouldRenderCorrectly() { + void dateShouldRenderCorrectly() { assertThat(date().generatedDescription().toDocument()) - .isEqualTo(new Document("bsonType", "date").append("description", "Must be a date.")); + .isEqualTo(new Document("bsonType", "date").append("description", "Must be a date")); } // ----------------- @@ -279,10 +346,10 @@ public void dateShouldRenderCorrectly() { // ----------------- @Test // DATAMONGO-1877 - public void timestampShouldRenderCorrectly() { + void timestampShouldRenderCorrectly() { assertThat(timestamp().generatedDescription().toDocument()) - .isEqualTo(new Document("bsonType", "timestamp").append("description", "Must be a timestamp.")); + .isEqualTo(new Document("bsonType", "timestamp").append("description", "Must be a timestamp")); } // ----------------- @@ -290,35 +357,35 @@ public void timestampShouldRenderCorrectly() { // ----------------- @Test // DATAMONGO-1835 - public void typedObjectShouldRenderEnumCorrectly() { + void typedObjectShouldRenderEnumCorrectly() { assertThat(of(String.class).possibleValues(Arrays.asList("one", "two")).toDocument()) .isEqualTo(new Document("type", "string").append("enum", Arrays.asList("one", "two"))); } @Test // DATAMONGO-1835 - public void typedObjectShouldRenderAllOfCorrectly() { + void typedObjectShouldRenderAllOfCorrectly() { assertThat(of(Object.class).allOf(Arrays.asList(string())).toDocument()) .isEqualTo(new Document("type", "object").append("allOf", Arrays.asList(new Document("type", "string")))); } @Test // DATAMONGO-1835 - public void typedObjectShouldRenderAnyOfCorrectly() { + void typedObjectShouldRenderAnyOfCorrectly() { assertThat(of(String.class).anyOf(Arrays.asList(string())).toDocument()) .isEqualTo(new Document("type", "string").append("anyOf", Arrays.asList(new Document("type", "string")))); } @Test // DATAMONGO-1835 - public void typedObjectShouldRenderOneOfCorrectly() { + void typedObjectShouldRenderOneOfCorrectly() { assertThat(of(String.class).oneOf(Arrays.asList(string())).toDocument()) .isEqualTo(new Document("type", "string").append("oneOf", Arrays.asList(new Document("type", "string")))); } @Test // DATAMONGO-1835 - public void typedObjectShouldRenderNotCorrectly() { + void typedObjectShouldRenderNotCorrectly() { assertThat(untyped().notMatch(string()).toDocument()) .isEqualTo(new Document("not", new Document("type", "string"))); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java index 361ed155d0..c9f6934d9b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaPropertyUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,7 @@ import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; /** @@ -63,4 +63,9 @@ public void shouldRenderDateCorrectly() { public void shouldRenderTimestampCorrectly() { assertThat(JsonSchemaProperty.timestamp("foo").toDocument()).containsEntry("foo.bsonType", "timestamp"); } + + @Test // DATAMONGO-2282 + public void objectIdShouldBeRenderedCorrectly() { + assertThat(JsonSchemaProperty.objectId("_id").toDocument()).containsEntry("_id.bsonType", "objectId"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java index ff8b9ff795..a7cf75366f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,30 +17,27 @@ import static org.springframework.data.mongodb.test.util.Assertions.*; -import lombok.Data; - import java.util.Collections; import java.util.List; import org.bson.Document; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.CollectionOptions; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; import com.mongodb.client.model.CreateCollectionOptions; import com.mongodb.client.model.ValidationAction; @@ -52,29 +49,30 @@ * * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class MongoJsonSchemaTests { - public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_6_0 = MongoVersionRule.atLeast(Version.parse("3.6.0")); + static @Client MongoClient mongoClient; @Configuration - static class Config extends AbstractMongoConfiguration { + static class Config extends AbstractMongoClientConfiguration { @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; } @Override protected String getDatabaseName() { return "json-schema-tests"; } + } @Autowired MongoTemplate template; - @Before + @BeforeEach public void setUp() { template.dropCollection(Person.class); @@ -181,7 +179,6 @@ Document readSchemaFromDatabase(String collectionName) { return collectionInfo.get("options", Document.class).get("validator", Document.class); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = "persons") static class Person { @@ -189,6 +186,34 @@ static class Person { String lastname; Address address; + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Address getAddress() { + return this.address; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAddress(Address address) { + this.address = address; + } + + public String toString() { + return "MongoJsonSchemaTests.Person(firstname=" + this.getFirstname() + ", lastname=" + this.getLastname() + + ", address=" + this.getAddress() + ")"; + } } static class Address { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java index 054c5d311f..1691305617 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,20 @@ */ package org.springframework.data.mongodb.core.schema; +import static org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.encrypted; import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.UUID; import org.bson.Document; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; /** * Unit tests for {@link MongoJsonSchema}. @@ -30,11 +36,11 @@ * @author Christoph Strobl * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class MongoJsonSchemaUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoJsonSchemaUnitTests { @Test // DATAMONGO-1835 - public void toDocumentRendersSchemaCorrectly() { + void toDocumentRendersSchemaCorrectly() { MongoJsonSchema schema = MongoJsonSchema.builder() // .required("firstname", "lastname") // @@ -45,7 +51,7 @@ public void toDocumentRendersSchemaCorrectly() { } @Test // DATAMONGO-1835 - public void rendersDocumentBasedSchemaCorrectly() { + void rendersDocumentBasedSchemaCorrectly() { Document document = MongoJsonSchema.builder() // .required("firstname", "lastname") // @@ -57,13 +63,89 @@ public void rendersDocumentBasedSchemaCorrectly() { new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1835 - public void throwsExceptionOnNullRoot() { - MongoJsonSchema.of((JsonSchemaObject) null); + @Test // DATAMONGO-1849 + void rendersRequiredPropertiesCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname") // + .properties( // + JsonSchemaProperty.required(JsonSchemaProperty.string("lastname")) // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")).append("properties", + new Document("lastname", new Document("type", "string"))))); + } + + @Test // DATAMONGO-2306 + void rendersEncryptedPropertyCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + encrypted(string("ssn")) // + .aead_aes_256_cbc_hmac_sha_512_deterministic() // + .keyId("*key0_id") // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("properties", + new Document("ssn", new Document("encrypt", new Document("keyId", "*key0_id") + .append("algorithm", "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic").append("bsonType", "string")))))); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1835 - public void throwsExceptionOnNullDocument() { - MongoJsonSchema.of((Document) null); + @Test // DATAMONGO-2306 + void rendersEncryptedPropertyWithKeyIdCorrectly() { + + UUID uuid = UUID.randomUUID(); + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + encrypted(string("ssn")) // + .aead_aes_256_cbc_hmac_sha_512_deterministic() // + .keys(uuid) // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("properties", + new Document("ssn", new Document("encrypt", new Document("keyId", Collections.singletonList(uuid)) + .append("algorithm", "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic").append("bsonType", "string")))))); + } + + @Test // GH-4185 + void rendersQueryablePropertyCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + queryable(rangeEncrypted(number("ssn")), + List.of(QueryCharacteristics.range().contention(0).trimFactor(1).sparsity(1).min(0).max(200)))) + .build(); + + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(""" + { + "type": "object", + "properties": { + "ssn": { + "encrypt": { + "bsonType": "long", + "algorithm": "Range", + "queries": [{ + "queryType": "range", + "contention": {$numberLong: "0"}, + "trimFactor": 1, + "sparsity": {$numberLong: "1"}, + "min": 0, + "max": 200 + }] + } + } + } + } + """); + } + + @Test // DATAMONGO-1835 + void throwsExceptionOnNullRoot() { + assertThatIllegalArgumentException().isThrownBy(() -> MongoJsonSchema.of((JsonSchemaObject) null)); + } + + @Test // DATAMONGO-1835 + void throwsExceptionOnNullDocument() { + assertThatIllegalArgumentException().isThrownBy(() -> MongoJsonSchema.of((Document) null)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java index bf2d11a177..4615568d10 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/ReactiveMongoJsonSchemaTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,63 +17,67 @@ import static org.springframework.data.mongodb.test.util.Assertions.*; -import lombok.Data; import reactor.test.StepVerifier; import java.time.Duration; +import java.util.Collections; import java.util.List; +import java.util.Set; import org.bson.Document; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataRetrievalFailureException; -import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; import org.springframework.data.mongodb.core.CollectionOptions; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; import org.springframework.data.mongodb.core.mapping.Field; -import org.springframework.data.mongodb.test.util.MongoVersionRule; -import org.springframework.data.util.Version; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; import com.mongodb.reactivestreams.client.MongoClient; -import com.mongodb.reactivestreams.client.MongoClients; /** * Integration tests for {@link MongoJsonSchema} using reactive infrastructure. * * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration public class ReactiveMongoJsonSchemaTests { - public static @ClassRule MongoVersionRule REQUIRES_AT_LEAST_3_6_0 = MongoVersionRule.atLeast(Version.parse("3.6.0")); + static @Client MongoClient mongoClient; @Configuration - static class Config extends AbstractReactiveMongoConfiguration { + static class Config extends ReactiveMongoClientClosingTestConfiguration { @Override public MongoClient reactiveMongoClient() { - return MongoClients.create(); + return mongoClient; } @Override protected String getDatabaseName() { return "json-schema-tests"; } + + @Override + protected Set> getInitialEntitySet() { + return Collections.emptySet(); + } } @Autowired ReactiveMongoTemplate template; - @Before + @BeforeEach public void setUp() { - StepVerifier.create(template.dropCollection(Person.class)).verifyComplete(); + template.dropCollection(Person.class).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1835 @@ -88,7 +92,7 @@ public void writeSchemaViaTemplate() { ).build(); - StepVerifier.create(template.createCollection(Person.class, CollectionOptions.empty().schema(schema))) + template.createCollection(Person.class, CollectionOptions.empty().schema(schema)).as(StepVerifier::create) .expectNextCount(1).verifyComplete(); Document $jsonSchema = new MongoJsonSchemaMapper(template.getConverter()).mapSchema(schema.toDocument(), @@ -120,7 +124,6 @@ Document readSchemaFromDatabase(String collectionName) { return collectionInfo.get("options", Document.class).get("validator", Document.class); } - @Data @org.springframework.data.mongodb.core.mapping.Document(collection = "persons") static class Person { @@ -128,6 +131,34 @@ static class Person { String lastname; Address address; + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public Address getAddress() { + return this.address; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAddress(Address address) { + this.address = address; + } + + public String toString() { + return "ReactiveMongoJsonSchemaTests.Person(firstname=" + this.getFirstname() + ", lastname=" + this.getLastname() + + ", address=" + this.getAddress() + ")"; + } } static class Address { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunctionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunctionUnitTests.java new file mode 100644 index 0000000000..6db26a1250 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/TypeUnifyingMergeFunctionUnitTests.java @@ -0,0 +1,138 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import static org.mockito.Mockito.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.ConflictResolutionFunction.Resolution; + +/** + * Unit tests for {@link TypeUnifyingMergeFunction}. + * + * @author Christoph Strobl + */ +@ExtendWith(MockitoExtension.class) +public class TypeUnifyingMergeFunctionUnitTests { + + @Mock ConflictResolutionFunction crf; + + TypeUnifyingMergeFunction mergeFunction; + + @BeforeEach + void beforeEach() { + mergeFunction = new TypeUnifyingMergeFunction(crf); + } + + @Test // GH-3870 + void nonOverlapping() { + + Map a = new LinkedHashMap<>(); + a.put("a", "a-value"); + Map b = new LinkedHashMap<>(); + b.put("b", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("a", "a-value").containsEntry("b", "b-value"); + } + + @Test // GH-3870 + void resolvesNonConflictingTypeKeys/* type vs bsonType */() { + + Map a = new LinkedHashMap<>(); + a.put("type", "string"); + Map b = new LinkedHashMap<>(); + b.put("bsonType", "string"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("type", "string").doesNotContainKey("bsonType"); + } + + @Test // GH-3870 + void nonOverlappingNestedMap() { + + Map a = new LinkedHashMap<>(); + a.put("a", Collections.singletonMap("nested", "value")); + Map b = new LinkedHashMap<>(); + b.put("b", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("a", Collections.singletonMap("nested", "value")).containsEntry("b", "b-value"); + } + + @Test // GH-3870 + void nonOverlappingNestedMaps() { + + Map a = new LinkedHashMap<>(); + a.put("nested", Collections.singletonMap("a", "a-value")); + Map b = new LinkedHashMap<>(); + b.put("nested", Collections.singletonMap("b", "b-value")); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("nested.a", "a-value").containsEntry("nested.b", "b-value"); + } + + @Test // GH-3870 + void delegatesConflictToResolutionFunction() { + + ArgumentCaptor aValueCaptor = ArgumentCaptor.forClass(Object.class); + ArgumentCaptor bValueCaptor = ArgumentCaptor.forClass(Object.class); + + when(crf.resolveConflict(any(), aValueCaptor.capture(), bValueCaptor.capture())).thenReturn(Resolution.ofValue("nested", "from-function")); + + Map a = new LinkedHashMap<>(); + a.put("nested", Collections.singletonMap("a", "a-value")); + Map b = new LinkedHashMap<>(); + b.put("nested", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).containsEntry("nested", "from-function") // + .doesNotContainKey("nested.a"); + + assertThat(aValueCaptor.getValue()).isEqualTo(a); + assertThat(bValueCaptor.getValue()).isEqualTo(b); + } + + @Test // GH-3870 + void skipsConflictItemsWhenAdvised() { + + ArgumentCaptor aValueCaptor = ArgumentCaptor.forClass(Object.class); + ArgumentCaptor bValueCaptor = ArgumentCaptor.forClass(Object.class); + + when(crf.resolveConflict(any(), aValueCaptor.capture(), bValueCaptor.capture())).thenReturn(Resolution.SKIP); + + Map a = new LinkedHashMap<>(); + a.put("nested", Collections.singletonMap("a", "a-value")); + a.put("some", "value"); + Map b = new LinkedHashMap<>(); + b.put("nested", "b-value"); + + Document target = mergeFunction.apply(a, b); + assertThat(target).hasSize(1).containsEntry("some", "value"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java index 022fa0e512..c2f31b829f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/ExecutableMongoScriptUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,58 +15,34 @@ */ package org.springframework.data.mongodb.core.script; -import static org.hamcrest.core.IsEqual.*; -import static org.hamcrest.core.IsNull.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.springframework.util.ObjectUtils; +import org.junit.jupiter.api.Test; /** * @author Christoph Strobl */ -public class ExecutableMongoScriptUnitTests { - - public @Rule ExpectedException expectedException = ExpectedException.none(); +class ExecutableMongoScriptUnitTests { @Test // DATAMONGO-479 - public void constructorShouldThrowExceptionWhenRawScriptIsNull() { - - expectException(IllegalArgumentException.class, "must not be", "null"); - - new ExecutableMongoScript(null); + void constructorShouldThrowExceptionWhenRawScriptIsNull() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMongoScript(null)) + .withMessageContaining("must not be").withMessageContaining("null"); } @Test // DATAMONGO-479 - public void constructorShouldThrowExceptionWhenRawScriptIsEmpty() { - - expectException(IllegalArgumentException.class, "must not be", "empty"); - - new ExecutableMongoScript(""); + void constructorShouldThrowExceptionWhenRawScriptIsEmpty() { + assertThatIllegalArgumentException().isThrownBy(() -> new ExecutableMongoScript("")) + .withMessageContaining("must not be").withMessageContaining("empty"); } @Test // DATAMONGO-479 - public void getCodeShouldReturnCodeRepresentationOfRawScript() { + void getCodeShouldReturnCodeRepresentationOfRawScript() { String jsFunction = "function(x) { return x; }"; ExecutableMongoScript script = new ExecutableMongoScript(jsFunction); - assertThat(script.getCode(), notNullValue()); - assertThat(script.getCode().toString(), equalTo(jsFunction)); + assertThat(script.getCode()).isNotNull().hasToString(jsFunction); } - - private void expectException(Class type, String... messageFragments) { - - expectedException.expect(IllegalArgumentException.class); - - if (!ObjectUtils.isEmpty(messageFragments)) { - for (String fragment : messageFragments) { - expectedException.expectMessage(fragment); - } - } - } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java index 50598dcd0a..3eca8e90b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/script/NamedMongoScriptUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,9 @@ */ package org.springframework.data.mongodb.core.script; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link NamedMongoScript}. @@ -29,19 +28,19 @@ */ public class NamedMongoScriptUnitTests { - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 + @Test // DATAMONGO-479 public void shouldThrowExceptionWhenScriptNameIsNull() { - new NamedMongoScript(null, "return 1;"); + assertThatIllegalArgumentException().isThrownBy(() -> new NamedMongoScript(null, "return 1;")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 + @Test // DATAMONGO-479 public void shouldThrowExceptionWhenScriptNameIsEmptyString() { - new NamedMongoScript("", "return 1"); + assertThatIllegalArgumentException().isThrownBy(() -> new NamedMongoScript("", "return 1")); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-479 + @Test // DATAMONGO-479 public void shouldThrowExceptionWhenRawScriptIsEmptyString() { - new NamedMongoScript("foo", ""); + assertThatIllegalArgumentException().isThrownBy(() -> new NamedMongoScript("foo", "")); } @Test // DATAMONGO-479 @@ -49,6 +48,6 @@ public void getCodeShouldReturnCodeRepresentationOfRawScript() { String jsFunction = "function(x) { return x; }"; - assertThat(new NamedMongoScript("echo", jsFunction).getCode(), is(jsFunction)); + assertThat(new NamedMongoScript("echo", jsFunction).getCode()).isEqualTo(jsFunction); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java index 837e456236..b9b2f18c73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/spel/ExpressionNodeUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,17 @@ */ package org.springframework.data.mongodb.core.spel; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collection; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.expression.spel.ExpressionState; import org.springframework.expression.spel.SpelNode; import org.springframework.expression.spel.ast.OpDivide; @@ -38,8 +38,8 @@ * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class ExpressionNodeUnitTests { +@ExtendWith(MockitoExtension.class) +class ExpressionNodeUnitTests { @Mock ExpressionState state; @@ -48,18 +48,18 @@ public class ExpressionNodeUnitTests { @Mock OpDivide divide; @Mock OpMultiply multiply; - Collection operators; + private Collection operators; - @Before - public void setUp() { + @BeforeEach + void setUp() { this.operators = Arrays.asList(minus, plus, divide, multiply); } @Test // DATAMONGO-774 - public void createsOperatorNodeForOperations() { + void createsOperatorNodeForOperations() { for (SpelNode operator : operators) { - assertThat(ExpressionNode.from(operator, state), is(instanceOf(OperatorNode.class))); + assertThat(ExpressionNode.from(operator, state)).isInstanceOf(OperatorNode.class); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java index e3d59fe25c..f044646037 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/validation/CriteriaValidatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +18,7 @@ import static org.assertj.core.api.Assertions.*; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.query.Criteria; /** @@ -40,8 +40,8 @@ public void testSimpleCriteria() { .isEqualTo(new Document("$type", 16).append("$gte", 0).append("$lte", 122)); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1322 + @Test // DATAMONGO-1322 public void testFailOnNull() { - CriteriaValidator.of(null); + assertThatIllegalArgumentException().isThrownBy(() -> CriteriaValidator.of(null)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/MongoApplication.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/MongoApplication.java new file mode 100644 index 0000000000..c68dc5018f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/MongoApplication.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// tag::file[] +package org.springframework.data.mongodb.example; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; + +import com.mongodb.client.MongoClients; + +public class MongoApplication { + + public static void main(String[] args) throws Exception { + + MongoOperations mongoOps = new MongoTemplate(MongoClients.create(), "database"); + mongoOps.insert(new Person("Joe", 34)); + + System.out.println(mongoOps.query(Person.class).matching(where("name").is("Joe")).firstValue()); + + mongoOps.dropCollection("person"); + } +} +//end::file[] diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/Person.java new file mode 100644 index 0000000000..8c1e9e4d81 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/Person.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// tag::file[] +package org.springframework.data.mongodb.example; + +// tag::class[] +public class Person { + + private String id; + private String name; + private int age; + + public Person(String name, int age) { + this.name = name; + this.age = age; + } + + public String getId() { + return id; + } + + public String getName() { + return name; + } + + public int getAge() { + return age; + } + + @Override + public String toString() { + return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; + } +} +// end::class[] +// end::file[] diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/ReactiveMongoApplication.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/ReactiveMongoApplication.java new file mode 100644 index 0000000000..8022f45ab7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example/ReactiveMongoApplication.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// tag::file[] +package org.springframework.data.mongodb.example; + +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + +import com.mongodb.reactivestreams.client.MongoClients; + +public class ReactiveMongoApplication { + + public static void main(String[] args) throws Exception { + + ReactiveMongoOperations mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database"); + + mongoOps.insert(new Person("Joe", 34)) + .then(mongoOps.query(Person.class).matching(where("name").is("Joe")).first()) + .doOnNext(System.out::println) + .block(); + + mongoOps.dropCollection("person").block(); + } +} +// end::file[] diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java index bd42c9ac2c..516a1890bb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/AntPathUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,11 @@ */ package org.springframework.data.mongodb.gridfs; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.regex.Pattern; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Unit tests for {@link AntPath}. @@ -35,8 +34,8 @@ public void buildRegexCorrectly() { AntPath path = new AntPath("**/foo/*-bar.xml"); String regex = path.toRegex(); - assertThat(Pattern.matches(regex, "foo/bar/foo/foo-bar.xml"), is(true)); - assertThat(Pattern.matches(regex, "foo/bar/foo/bar/foo-bar.xml"), is(false)); - assertThat(regex, is(".*\\Q/foo/\\E[^/]*\\Q-bar.xml\\E")); + assertThat(Pattern.matches(regex, "foo/bar/foo/foo-bar.xml")).isTrue(); + assertThat(Pattern.matches(regex, "foo/bar/foo/bar/foo-bar.xml")).isFalse(); + assertThat(regex).isEqualTo(".*\\Q/foo/\\E[^/]*\\Q-bar.xml\\E"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java index a159ca7da8..708ec4967c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsResourceUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,11 +17,12 @@ import static org.assertj.core.api.Assertions.*; +import java.io.FileNotFoundException; import java.util.Date; import org.bson.BsonObjectId; import org.bson.Document; -import org.junit.Test; +import org.junit.jupiter.api.Test; import com.mongodb.MongoGridFSException; import com.mongodb.client.gridfs.model.GridFSFile; @@ -38,16 +39,25 @@ public class GridFsResourceUnitTests { public void shouldReadContentTypeCorrectly() { Document metadata = new Document(GridFsResource.CONTENT_TYPE_FIELD, "text/plain"); - GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), "foo", metadata); + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), metadata); GridFsResource resource = new GridFsResource(file); assertThat(resource.getContentType()).isEqualTo("text/plain"); } + @Test // DATAMONGO-2240 + public void shouldReturnGridFSFile() { + + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), new Document()); + GridFsResource resource = new GridFsResource(file); + + assertThat(resource.getGridFSFile()).isSameAs(file); + } + @Test // DATAMONGO-1850 public void shouldThrowExceptionOnEmptyContentType() { - GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), "foo", null); + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), null); GridFsResource resource = new GridFsResource(file); assertThatThrownBy(resource::getContentType).isInstanceOf(MongoGridFSException.class); @@ -56,9 +66,37 @@ public void shouldThrowExceptionOnEmptyContentType() { @Test // DATAMONGO-1850 public void shouldThrowExceptionOnEmptyContentTypeInMetadata() { - GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), "foo", new Document()); + GridFSFile file = new GridFSFile(new BsonObjectId(), "foo", 0, 0, new Date(), new Document()); GridFsResource resource = new GridFsResource(file); assertThatThrownBy(resource::getContentType).isInstanceOf(MongoGridFSException.class); } + + @Test // DATAMONGO-1914 + public void gettersThrowExceptionForAbsentResource() { + + GridFsResource absent = GridFsResource.absent("foo"); + + assertThat(absent.exists()).isFalse(); + assertThat(absent.getDescription()).contains("GridFs resource [foo]"); + + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(absent::getContentType); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(absent::getId); + + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::contentLength); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::getInputStream); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::lastModified); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::getURI); + assertThatExceptionOfType(FileNotFoundException.class).isThrownBy(absent::getURL); + } + + @Test // DATAMONGO-1914 + public void shouldReturnFilenameForAbsentResource() { + + GridFsResource absent = GridFsResource.absent("foo"); + + assertThat(absent.exists()).isFalse(); + assertThat(absent.getDescription()).contains("GridFs resource [foo]"); + assertThat(absent.getFilename()).isEqualTo("foo"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java index f499e636c1..f6c96e44a4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,16 +16,21 @@ package org.springframework.data.mongodb.gridfs; import static org.assertj.core.api.Assertions.*; -import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayList; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Stream; -import org.assertj.core.api.Assertions; import org.bson.BsonObjectId; +import org.bson.BsonString; import org.bson.Document; import org.bson.types.ObjectId; import org.junit.Before; @@ -34,14 +39,18 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.query.Query; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.util.StreamUtils; import com.mongodb.MongoGridFSException; import com.mongodb.client.gridfs.GridFSFindIterable; +import com.mongodb.client.gridfs.model.GridFSFile; /** * Integration tests for {@link GridFsTemplate}. @@ -50,14 +59,18 @@ * @author Philipp Schneider * @author Thomas Darimont * @author Martin Baumgartner + * @author Hartmut Lang + * @author Mark Paluch + * @author Denis Zavedeev */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:gridfs/gridfs.xml") public class GridFsTemplateIntegrationTests { Resource resource = new ClassPathResource("gridfs/gridfs.xml"); @Autowired GridFsOperations operations; + @Autowired SimpleMongoClientDatabaseFactory mongoClient; @Before public void setUp() { @@ -69,24 +82,42 @@ public void storesAndFindsSimpleDocument() throws IOException { ObjectId reference = operations.store(resource.getInputStream(), "foo.xml"); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query(where("_id").is(reference))); result.into(files); - assertThat(files.size()).isEqualTo(1); + assertThat(files).hasSize(1); assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } + // @Test // DATAMONGO-2392 + // public void storesAndFindsByUUID() throws IOException { + // + // UUID uuid = UUID.randomUUID(); + // + // GridFSFile fs = new GridFSFile(new BsonObjectId(new ObjectId(uuid.to)) + // GridFSInputFile in = fs.createFile(resource.getInputStream(), "gridfs.xml"); + // + // in.put("_id", uuid); + // in.put("contentType", "application/octet-stream"); + // in.save(); + // + // GridFSFile file = operations.findOne(query(where("_id").is(uuid))); + // GridFsResource resource = operations.getResource(file); + // + // assertThat(resource.exists()).isTrue(); + // } + @Test // DATAMONGO-6 public void writesMetadataCorrectly() throws IOException { Document metadata = new Document("key", "value"); ObjectId reference = operations.store(resource.getInputStream(), "foo.xml", metadata); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query(whereMetaData("key").is("value"))); result.into(files); - assertThat(files.size()).isEqualTo(1); + assertThat(files).hasSize(1); assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } @@ -98,11 +129,11 @@ public void marshalsComplexMetadata() throws IOException { ObjectId reference = operations.store(resource.getInputStream(), "foo.xml", metadata); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query(whereFilename().is("foo.xml"))); result.into(files); - assertThat(files.size()).isEqualTo(1); + assertThat(files).hasSize(1); assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } @@ -113,7 +144,7 @@ public void findsFilesByResourcePattern() throws IOException { GridFsResource[] resources = operations.getResources("*.xml"); - assertThat(resources.length).isEqualTo(1); + assertThat(resources).hasSize(1); assertThat(((BsonObjectId) resources[0].getId()).getValue()).isEqualTo(reference); assertThat(resources[0].contentLength()).isEqualTo(resource.contentLength()); } @@ -124,7 +155,7 @@ public void findsFilesByResourceLocation() throws IOException { ObjectId reference = operations.store(resource.getInputStream(), "foo.xml"); GridFsResource[] resources = operations.getResources("foo.xml"); - assertThat(resources.length).isEqualTo(1); + assertThat(resources).hasSize(1); assertThat(((BsonObjectId) resources[0].getId()).getValue()).isEqualTo(reference); assertThat(resources[0].contentLength()).isEqualTo(resource.contentLength()); } @@ -134,11 +165,11 @@ public void storesContentType() throws IOException { ObjectId reference = operations.store(resource.getInputStream(), "foo2.xml", "application/xml"); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query(whereContentType().is("application/xml"))); result.into(files); - assertThat(files.size()).isEqualTo(1); + assertThat(files).hasSize(1); assertThat(((BsonObjectId) files.get(0).getId()).getValue()).isEqualTo(reference); } @@ -151,7 +182,7 @@ public void considersSortWhenQueryingFiles() throws IOException { Query query = new Query().with(Sort.by(Direction.ASC, "filename")); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query); result.into(files); @@ -171,14 +202,14 @@ public void queryingWithEmptyQueryReturnsAllFiles() throws IOException { assertThat(files).hasSize(1).extracting(it -> ((BsonObjectId) it.getId()).getValue()).containsExactly(reference); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1762 + @Test // DATAMONGO-1762 public void queryingWithNullQueryThrowsException() { - operations.find(null); + assertThatIllegalArgumentException().isThrownBy(() -> operations.find(null)); } - @Test // DATAMONGO-813 - public void getResourceShouldReturnNullForNonExistingResource() { - assertThat(operations.getResource("doesnotexist")).isNull(); + @Test // DATAMONGO-813, DATAMONGO-1914 + public void getResourceShouldReturnAbsentResourceForNonExistingResource() { + assertThat(operations.getResource("doesnotexist")).isEqualTo(GridFsResource.absent("doesnotexist")); } @Test // DATAMONGO-809 @@ -187,7 +218,7 @@ public void storesAndFindsSimpleDocumentWithMetadataDocument() throws IOExceptio Document metadata = new Document("key", "value"); ObjectId reference = operations.store(resource.getInputStream(), "foobar", metadata); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query(whereMetaData("key").is("value"))); result.into(files); @@ -201,7 +232,7 @@ public void storesAndFindsSimpleDocumentWithMetadataObject() throws IOException metadata.version = "1.0"; ObjectId reference = operations.store(resource.getInputStream(), "foobar", metadata); - List files = new ArrayList(); + List files = new ArrayList<>(); GridFSFindIterable result = operations.find(query(whereMetaData("version").is("1.0"))); result.into(files); @@ -222,7 +253,81 @@ public void failsOnNonExistingContentTypeRetrieval() throws IOException { operations.store(resource.getInputStream(), "no-content-type", (String) null); GridFsResource result = operations.getResource("no-content-type"); - assertThatThrownBy(() -> result.getContentType()).isInstanceOf(MongoGridFSException.class); + assertThatThrownBy(result::getContentType).isInstanceOf(MongoGridFSException.class); + } + + @Test // DATAMONGO-1813 + public void convertFileToResource() throws IOException { + + Document metadata = new Document("key", "value"); + ObjectId reference = operations.store(resource.getInputStream(), "foobar", metadata); + + GridFSFile file = operations.findOne(query(whereMetaData("key").is("value"))); + GridFsResource result = operations.getResource(file); + + assertThat(result.contentLength()).isEqualTo(resource.contentLength()); + assertThat(((BsonObjectId) result.getId()).getValue()).isEqualTo(reference); + } + + @Test // DATAMONGO-2021 + public void getResourceShouldRetrieveContentByIdentity() throws IOException { + + ClassPathResource secondResource = new ClassPathResource("gridfs/another-resource.xml"); + + ObjectId reference1 = operations.store(resource.getInputStream(), "foo.xml"); + ObjectId reference2 = operations.store(secondResource.getInputStream(), "foo.xml"); + + Map fixture = new LinkedHashMap<>(); + fixture.put(reference1, resource); + fixture.put(reference2, secondResource); + + for (Entry entry : fixture.entrySet()) { + + GridFsResource fsFile = operations.getResource(operations.findOne(query(where("_id").is(entry.getKey())))); + byte[] content = StreamUtils.copyToByteArray(fsFile.getInputStream()); + + assertThat(content).isEqualTo(StreamUtils.copyToByteArray(entry.getValue().getInputStream())); + } + } + + @Test // DATAMONGO-625 + public void storeSavesGridFsUploadWithGivenIdCorrectly() throws IOException { + + String id = "id-1"; + + GridFsUpload upload = GridFsUpload.fromStream(resource.getInputStream()) // + .id(id) // + .filename("gridFsUpload.xml") // + .contentType("xml") // + .build(); + + assertThat(operations.store(upload)).isEqualTo(id); + + GridFsResource fsFile = operations.getResource(operations.findOne(query(where("_id").is(id)))); + byte[] content = StreamUtils.copyToByteArray(fsFile.getInputStream()); + + assertThat(content).isEqualTo(StreamUtils.copyToByteArray(resource.getInputStream())); + assertThat(fsFile.getFilename()).isEqualTo("gridFsUpload.xml"); + assertThat(fsFile.getId()).isEqualTo(new BsonString(id)); + assertThat(fsFile.getFileId()).isEqualTo(id); + assertThat(fsFile.getContentType()).isEqualTo("xml"); + } + + @Test // DATAMONGO-765 + public void considersSkipLimitWhenQueryingFiles() { + + Stream.of("a", "aa", "aaa", // + "b", "bb", "bb", // + "c", "cc", "ccc", // + "d", "dd", "ddd") // + .forEach(filename -> operations.store(new ByteArrayInputStream(new byte[0]), filename)); + + PageRequest pageRequest = PageRequest.of(2, 3, Direction.ASC, "filename"); + List filenames = operations.find(new Query().with(pageRequest)) // + .map(GridFSFile::getFilename) // + .into(new ArrayList<>()); + + assertThat(filenames).containsExactly("c", "cc", "ccc"); } class Metadata { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateUnitTests.java new file mode 100644 index 0000000000..25693d10b7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/GridFsTemplateUnitTests.java @@ -0,0 +1,79 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; + +/** + * @author Christoph Strobl + */ +class GridFsTemplateUnitTests { + + private GridFsTemplateStub template; + + @BeforeEach + void beforeEach() { + template = new GridFsTemplateStub(); + } + + @Test // DATAMONGO-2574 + void contentMetadataDoesNotOverrideContentTypeIfSet() { + + template.onStoreReturn(new ObjectId()); + template.store(new ByteArrayInputStream(new byte[] {}), "filename", "json", new Document("meta", "data")); + + assertThat(template.capturedUpload().getOptions().getContentType()).isEqualTo("json"); + assertThat(template.capturedUpload().getOptions().getMetadata()).containsEntry("meta", "data"); + } + + private static class GridFsTemplateStub extends GridFsTemplate { + + private Object onStoreResult; + private GridFsObject capturedUpload; + + GridFsTemplateStub() { + super(mock(MongoDatabaseFactory.class), mock(MongoConverter.class)); + } + + @Override + public T store(GridFsObject upload) { + + this.capturedUpload = upload; + return (T) onStoreResult; + } + + GridFsTemplateStub onStoreReturn(Object result) { + + this.onStoreResult = result; + return this; + } + + GridFsObject capturedUpload() { + return capturedUpload; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResourceUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResourceUnitTests.java new file mode 100644 index 0000000000..ca408bb502 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsResourceUnitTests.java @@ -0,0 +1,98 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.nio.ByteBuffer; + +import org.junit.jupiter.api.Test; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.reactivestreams.client.gridfs.GridFSDownloadPublisher; + +/** + * Unit tests for {@link ReactiveGridFsResource}. + * + * @author Christoph Strobl + */ +class ReactiveGridFsResourceUnitTests { + + @Test // DATAMONGO-2427 + void streamCanOnlyBeConsumedOnce() { + + ReactiveGridFsResource resource = new ReactiveGridFsResource("file.name", new StubGridFSDownloadPublisher()); + + assertThat(resource.exists()).isTrue(); + + resource.getInputStream().as(StepVerifier::create).verifyComplete(); + resource.getInputStream().as(StepVerifier::create).verifyError(IllegalStateException.class); + resource.getDownloadStream().as(StepVerifier::create).verifyError(IllegalStateException.class); + } + + @Test // DATAMONGO-2427 + void existReturnsFalseForNullPublisher() { + + ReactiveGridFsResource resource = new ReactiveGridFsResource("file.name", null); + + assertThat(resource.exists()).isFalse(); + } + + @Test // DATAMONGO-2427 + void nonExistingResourceProducesEmptyDownloadStream() { + + ReactiveGridFsResource resource = new ReactiveGridFsResource("file.name", null); + + resource.getInputStream().as(StepVerifier::create).verifyComplete(); + resource.getInputStream().as(StepVerifier::create).verifyComplete(); + resource.getDownloadStream().as(StepVerifier::create).verifyComplete(); + } + + private static class StubGridFSDownloadPublisher implements GridFSDownloadPublisher { + + @Override + public Publisher getGridFSFile() { + return Mono.empty(); + } + + @Override + public GridFSDownloadPublisher bufferSizeBytes(int bufferSizeBytes) { + return null; + } + + @Override + public void subscribe(Subscriber s) { + + s.onSubscribe(new Subscription() { + @Override + public void request(long n) { + s.onComplete(); + } + + @Override + public void cancel() { + + } + }); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateTests.java new file mode 100644 index 0000000000..d0b87cf5fd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateTests.java @@ -0,0 +1,336 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.gridfs.GridFsCriteria.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import org.bson.BsonObjectId; +import org.bson.BsonString; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.core.io.buffer.DataBufferUtils; +import org.springframework.core.io.buffer.DefaultDataBuffer; +import org.springframework.core.io.buffer.DefaultDataBufferFactory; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.util.FileCopyUtils; +import org.springframework.util.StreamUtils; + +import com.mongodb.client.gridfs.model.GridFSFile; +import com.mongodb.internal.HexUtils; + +/** + * Integration tests for {@link ReactiveGridFsTemplate}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Nick Stolwijk + * @author Denis Zavedeev + */ +@RunWith(SpringRunner.class) +@ContextConfiguration("classpath:gridfs/reactive-gridfs.xml") +public class ReactiveGridFsTemplateTests { + + Resource resource = new ClassPathResource("gridfs/gridfs.xml"); + + @Autowired ReactiveGridFsOperations operations; + @Autowired SimpleMongoClientDatabaseFactory mongoClient; + @Autowired ReactiveMongoDatabaseFactory dbFactory; + @Autowired MongoConverter mongoConverter; + + @Before + public void setUp() { + + operations.delete(new Query()) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void storesAndFindsSimpleDocument() { + + DefaultDataBufferFactory factory = new DefaultDataBufferFactory(); + DefaultDataBuffer first = factory.wrap("first".getBytes()); + DefaultDataBuffer second = factory.wrap("second".getBytes()); + + ObjectId reference = operations.store(Flux.just(first, second), "foo.xml").block(); + + operations.find(query(where("_id").is(reference))) // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(((BsonObjectId) actual.getId()).getValue()).isEqualTo(reference); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void storesAndLoadsLargeFileCorrectly() { + + ByteBuffer buffer = ByteBuffer.allocate(1000 * 1000); // 1 mb + int i = 0; + while (buffer.remaining() != 0) { + buffer.put(HexUtils.toHex(new byte[] { (byte) (i++ % 16) }).getBytes()); + } + buffer.flip(); + + DefaultDataBufferFactory factory = new DefaultDataBufferFactory(); + + ObjectId reference = operations.store(Flux.just(factory.wrap(buffer)), "large.txt").block(); + + buffer.clear(); + + // default chunk size + operations.findOne(query(where("_id").is(reference))).flatMap(operations::getResource) + .flatMapMany(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + assertThat(dataBuffer.readableByteCount()).isEqualTo(buffer.remaining()); + assertThat(dataBuffer.asByteBuffer()).isEqualTo(buffer); + }).verifyComplete(); + + // small chunk size + operations.findOne(query(where("_id").is(reference))).flatMap(operations::getResource) + .flatMapMany(reactiveGridFsResource -> reactiveGridFsResource.getDownloadStream(256)) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + assertThat(dataBuffer.readableByteCount()).isEqualTo(buffer.remaining()); + assertThat(dataBuffer.asByteBuffer()).isEqualTo(buffer); + }).verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void writesMetadataCorrectly() throws IOException { + + Document metadata = new Document("key", "value"); + + Flux source = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + ObjectId reference = operations.store(source, "foo.xml", "binary/octet-stream", metadata).block(); + + operations.find(query(whereMetaData("key").is("value"))) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getObjectId()).isEqualTo(reference); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void marshalsComplexMetadata() { + + Metadata metadata = new Metadata(); + metadata.version = "1.0"; + + Flux source = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + ObjectId reference = operations.store(source, "foo.xml", "binary/octet-stream", metadata).block(); + + operations.find(query(whereMetaData("version").is("1.0"))) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.getObjectId()).isEqualTo(reference); + assertThat(actual.getMetadata()).containsEntry("version", "1.0"); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void getResourceShouldRetrieveContentByIdentity() throws IOException { + + byte[] content = StreamUtils.copyToByteArray(resource.getInputStream()); + Flux source = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + ObjectId reference = operations.store(source, "foo.xml", null, null).block(); + + operations.findOne(query(where("_id").is(reference))).flatMap(operations::getResource) + .flatMapMany(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + byte[] actual = new byte[dataBuffer.readableByteCount()]; + dataBuffer.read(actual); + + assertThat(actual).isEqualTo(content); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-1855, DATAMONGO-2240 + public void shouldEmitFirstEntryWhenFindFirstRetrievesMoreThanOneResult() throws IOException { + + Flux upload1 = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + Flux upload2 = DataBufferUtils.read(new ClassPathResource("gridfs/another-resource.xml"), + new DefaultDataBufferFactory(), 256); + + operations.store(upload1, "foo.xml", null, null).block(); + operations.store(upload2, "foo2.xml", null, null).block(); + + operations.findFirst(query(where("filename").regex("foo*"))) // + .flatMap(operations::getResource) // + .as(StepVerifier::create) // + .assertNext(actual -> { + + assertThat(actual.getGridFSFile()).isNotNull(); + }).verifyComplete(); + } + + @Test // DATAMONGO-2240 + public void shouldReturnNoGridFsFileWhenAbsent() { + + operations.getResource("absent") // + .as(StepVerifier::create) // + .assertNext(actual -> { + + assertThat(actual.exists()).isFalse(); + assertThat(actual.getGridFSFile()).isEqualTo(Mono.empty()); + }).verifyComplete(); + } + + @Test // DATAMONGO-1855 + public void shouldEmitErrorWhenFindOneRetrievesMoreThanOneResult() throws IOException { + + Flux upload1 = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + Flux upload2 = DataBufferUtils.read(new ClassPathResource("gridfs/another-resource.xml"), + new DefaultDataBufferFactory(), 256); + + operations.store(upload1, "foo.xml", null, null).block(); + operations.store(upload2, "foo2.xml", null, null).block(); + + operations.findOne(query(where("filename").regex("foo*"))) // + .as(StepVerifier::create) // + .expectError(IncorrectResultSizeDataAccessException.class) // + .verify(); + } + + @Test // DATAMONGO-1855 + public void getResourcesByPattern() throws IOException { + + byte[] content = StreamUtils.copyToByteArray(resource.getInputStream()); + Flux upload = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + + operations.store(upload, "foo.xml", null, null).block(); + + operations.getResources("foo*") // + .flatMap(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + byte[] actual = new byte[dataBuffer.readableByteCount()]; + dataBuffer.read(actual); + + assertThat(actual).isEqualTo(content); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-625 + public void storeSavesGridFsUploadWithGivenIdCorrectly() throws IOException { + + String id = "id-1"; + byte[] content = StreamUtils.copyToByteArray(resource.getInputStream()); + Flux data = DataBufferUtils.read(resource, new DefaultDataBufferFactory(), 256); + + ReactiveGridFsUpload upload = ReactiveGridFsUpload.fromPublisher(data) // + .id(id) // + .filename("gridFsUpload.xml") // + .contentType("xml") // + .build(); + + operations.store(upload).as(StepVerifier::create).expectNext(id).verifyComplete(); + + operations.findOne(query(where("_id").is(id))).flatMap(operations::getResource) + .flatMapMany(ReactiveGridFsResource::getDownloadStream) // + .transform(DataBufferUtils::join) // + .as(StepVerifier::create) // + .consumeNextWith(dataBuffer -> { + + byte[] actual = new byte[dataBuffer.readableByteCount()]; + dataBuffer.read(actual); + + assertThat(actual).isEqualTo(content); + }) // + .verifyComplete(); + + operations.findOne(query(where("_id").is(id))).as(StepVerifier::create).consumeNextWith(it -> { + assertThat(it.getFilename()).isEqualTo("gridFsUpload.xml"); + assertThat(it.getId()).isEqualTo(new BsonString(id)); + assertThat(it.getMetadata()).containsValue("xml"); + }).verifyComplete(); + } + + @Test // DATAMONGO-765 + public void considersSkipLimitWhenQueryingFiles() { + + DataBufferFactory bufferFactory = new DefaultDataBufferFactory(); + DataBuffer buffer = bufferFactory.allocateBuffer(0); + Flux.just("a", "aa", "aaa", // + "b", "bb", "bbb", // + "c", "cc", "ccc", // + "d", "dd", "ddd") // + .flatMap(fileName -> operations.store(Mono.just(buffer), fileName)) // + .as(StepVerifier::create) // + .expectNextCount(12) // + .verifyComplete(); + + PageRequest pageRequest = PageRequest.of(2, 3, Sort.Direction.ASC, "filename"); + operations.find(new Query().with(pageRequest)) // + .map(GridFSFile::getFilename) // + .as(StepVerifier::create) // + .expectNext("c", "cc", "ccc") // + .verifyComplete(); + } + + static class Metadata { + String version; + } + + public static String readToString(DataBuffer dataBuffer) { + try { + return FileCopyUtils.copyToString(new InputStreamReader(dataBuffer.asInputStream())); + } catch (IOException e) { + return e.getMessage(); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateUnitTests.java new file mode 100644 index 0000000000..b8f0ab98a4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/gridfs/ReactiveGridFsTemplateUnitTests.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.gridfs; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.reactivestreams.Publisher; +import org.springframework.core.io.buffer.DataBuffer; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.MongoConverter; + +/** + * @author Christoph Strobl + */ +class ReactiveGridFsTemplateUnitTests { + + private ReactiveGridFsTemplateStub template; + + @BeforeEach + void beforeEach() { + template = new ReactiveGridFsTemplateStub(); + } + + @Test // DATAMONGO-2574 + void contentMetadataDoesNotOverrideContentTypeIfSet() { + + template.onStoreReturn(new ObjectId()); + template.store(Flux.empty(), "filename", "json", new Document("meta", "data")); + + assertThat(template.capturedUpload().getOptions().getContentType()).isEqualTo("json"); + assertThat(template.capturedUpload().getOptions().getMetadata()).containsEntry("meta", "data"); + } + + private static class ReactiveGridFsTemplateStub extends ReactiveGridFsTemplate { + + private Object onStoreResult; + private GridFsObject> capturedUpload; + + ReactiveGridFsTemplateStub() { + super(mock(ReactiveMongoDatabaseFactory.class), mock(MongoConverter.class)); + } + + @Override + public Mono store(GridFsObject> upload) { + + capturedUpload = upload; + return Mono.just((T) onStoreResult); + } + + ReactiveGridFsTemplateStub onStoreReturn(Object result) { + + this.onStoreResult = result; + return this; + } + + GridFsObject> capturedUpload() { + return capturedUpload; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java index af7e55ee93..e70b398f7f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,16 @@ */ package org.springframework.data.mongodb.monitor; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.net.UnknownHostException; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * This test class assumes that you are already running the MongoDB server. @@ -35,11 +33,10 @@ * @author Thomas Darimont * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") +@ExtendWith(MongoClientExtension.class) public class MongoMonitorIntegrationTests { - @Autowired MongoClient mongoClient; + static @Client MongoClient mongoClient; @Test public void serverInfo() { @@ -59,8 +56,8 @@ public void getHostNameShouldReturnServerNameReportedByMongo() throws UnknownHos throw e; } - assertThat(hostName, is(notNullValue())); - assertThat(hostName, is("127.0.0.1")); + assertThat(hostName).isNotNull(); + assertThat(hostName).isEqualTo("127.0.0.1:27017"); } @Test diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java index 1289c5dc0a..1fdbb1f188 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/monitor/Resumeable.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ImperativeIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ImperativeIntegrationTests.java new file mode 100644 index 0000000000..c1b1b4851e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ImperativeIntegrationTests.java @@ -0,0 +1,96 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.exporter.FinishedSpan; +import io.micrometer.tracing.test.SampleTestRunner; + +import java.util.List; + +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.PersonRepository; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +/** + * Collection of tests that log metrics and tracing with an external tracing tool. + * + * @author Greg Turnquist + * @author Mark Paluch + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = TestConfig.class) +public class ImperativeIntegrationTests extends SampleTestRunner { + + @Autowired PersonRepository repository; + + ImperativeIntegrationTests() { + super(SampleRunnerConfig.builder().build()); + } + + @Override + protected MeterRegistry createMeterRegistry() { + return TestConfig.METER_REGISTRY; + } + + @Override + protected ObservationRegistry createObservationRegistry() { + return TestConfig.OBSERVATION_REGISTRY; + } + + @Override + public SampleTestRunnerConsumer yourCode() { + + return (tracer, meterRegistry) -> { + + repository.deleteAll(); + repository.save(new Person("Dave", "Matthews", 42)); + List people = repository.findByLastname("Matthews"); + + assertThat(people).hasSize(1); + assertThat(people.get(0)).extracting("firstname", "lastname").containsExactly("Dave", "Matthews"); + + repository.deleteAll(); + + System.out.println(((SimpleMeterRegistry) meterRegistry).getMetersAsString()); + + assertThat(tracer.getFinishedSpans()).hasSize(5).extracting(FinishedSpan::getName).contains("person.delete", + "person.update", "person.find"); + + for (FinishedSpan span : tracer.getFinishedSpans()) { + + assertThat(span.getTags()).containsEntry("db.system", "mongodb").containsEntry("net.transport", "IP.TCP"); + + if (MongoClientVersion.isVersion5orNewer()) { + assertThat(span.getTags()).containsKeys("db.connection_string", "db.name", "db.operation", + "db.mongodb.collection", "net.peer.name", "net.peer.port"); + } else { + assertThat(span.getTags()).containsKeys("db.connection_string", "db.name", "db.operation", + "db.mongodb.collection", "net.peer.name", "net.peer.port", "net.sock.peer.addr", "net.sock.peer.port"); + } + } + }; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/MongoObservationCommandListenerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/MongoObservationCommandListenerTests.java new file mode 100644 index 0000000000..5c2cb0b701 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/MongoObservationCommandListenerTests.java @@ -0,0 +1,251 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import static io.micrometer.core.tck.MeterRegistryAssert.*; +import static org.mockito.Mockito.*; + +import io.micrometer.common.KeyValues; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; + +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.observability.MongoObservation.LowCardinalityCommandKeyNames; + +import com.mongodb.ConnectionString; +import com.mongodb.RequestContext; +import com.mongodb.ServerAddress; +import com.mongodb.client.SynchronousContextProvider; +import com.mongodb.connection.ClusterId; +import com.mongodb.connection.ConnectionDescription; +import com.mongodb.connection.ServerId; +import com.mongodb.event.CommandFailedEvent; +import com.mongodb.event.CommandStartedEvent; +import com.mongodb.event.CommandSucceededEvent; + +/** + * Series of test cases exercising {@link MongoObservationCommandListener}. + * + * @author Marcin Grzejszczak + * @author Greg Turnquist + * @author Mark Paluch + * @author François Kha + */ +class MongoObservationCommandListenerTests { + + ObservationRegistry observationRegistry; + MeterRegistry meterRegistry; + + MongoObservationCommandListener listener; + + @BeforeEach + void setup() { + + this.meterRegistry = new SimpleMeterRegistry(); + this.observationRegistry = ObservationRegistry.create(); + this.observationRegistry.observationConfig().observationHandler(new DefaultMeterObservationHandler(meterRegistry)); + + this.listener = new MongoObservationCommandListener(observationRegistry); + } + + @Test + void commandStartedShouldNotInstrumentWhenAdminDatabase() { + + // when + listener.commandStarted(new CommandStartedEvent(null, 0, 0, null, "admin", "", null)); + + // then + assertThat(meterRegistry).hasNoMetrics(); + } + + @Test + void commandStartedShouldNotInstrumentWhenNoRequestContext() { + + // when + listener.commandStarted(new CommandStartedEvent(null, 0, 0, null, "some name", "", null)); + + // then + assertThat(meterRegistry).hasNoMetrics(); + } + + @Test + void commandStartedShouldNotInstrumentWhenNoParentSampleInRequestContext() { + + // when + listener.commandStarted(new CommandStartedEvent(new MapRequestContext(), 0, 0, null, "some name", "", null)); + + // then + assertThat(meterRegistry).hasMeterWithName("spring.data.mongodb.command.active"); + } + + @Test + void successfullyCompletedCommandShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, // + new ConnectionDescription( // + new ServerId( // + new ClusterId("description"), // + new ServerAddress("localhost", 1234))), "database", "insert", // + new BsonDocument("collection", new BsonString("user")))); + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "insert", null, null, 0)); + + // then + assertThatTimerRegisteredWithTags(); + } + + @Test + void successfullyCompletedCommandWithCollectionHavingCommandNameShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, // + new ConnectionDescription( // + new ServerId( // + new ClusterId("description"), // + new ServerAddress("localhost", 1234))), // + "database", "aggregate", // + new BsonDocument("aggregate", new BsonString("user")))); + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "aggregate", null, null, 0)); + + // then + assertThatTimerRegisteredWithTags(); + } + + @Test + void successfullyCompletedCommandWithoutClusterInformationShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, null, "database", "insert", + new BsonDocument("collection", new BsonString("user")))); + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "insert", null, null, 0)); + + assertThat(meterRegistry).hasTimerWithNameAndTags(MongoObservation.MONGODB_COMMAND_OBSERVATION.getName(), + KeyValues.of(LowCardinalityCommandKeyNames.MONGODB_COLLECTION.withValue("user"), + LowCardinalityCommandKeyNames.DB_NAME.withValue("database"), + LowCardinalityCommandKeyNames.MONGODB_COMMAND.withValue("insert"), + LowCardinalityCommandKeyNames.DB_SYSTEM.withValue("mongodb")).and("error", "none")); + } + + @Test + void commandWithErrorShouldCreateTimerWhenParentSampleInRequestContext() { + + // given + Observation parent = Observation.start("name", observationRegistry); + RequestContext traceRequestContext = getContext(); + + // when + listener.commandStarted(new CommandStartedEvent(traceRequestContext, 0, 0, // + new ConnectionDescription( // + new ServerId( // + new ClusterId("description"), // + new ServerAddress("localhost", 1234))), // + "database", "insert", // + new BsonDocument("collection", new BsonString("user")))); + listener.commandFailed( // + new CommandFailedEvent(traceRequestContext, 0, 0, null, "db", "insert", 0, new IllegalAccessException())); + + // then + assertThatTimerRegisteredWithTags(); + } + + @Test // GH-4481 + void completionShouldIgnoreIncompatibleObservationContext() { + + // given + RequestContext traceRequestContext = getContext(); + + Observation observation = mock(Observation.class); + traceRequestContext.put(ObservationThreadLocalAccessor.KEY, observation); + + // when + listener.commandSucceeded(new CommandSucceededEvent(traceRequestContext, 0, 0, null, "insert", null, null, 0)); + + verify(observation).getContext(); + verifyNoMoreInteractions(observation); + } + + @Test // GH-4481 + void failureShouldIgnoreIncompatibleObservationContext() { + + // given + RequestContext traceRequestContext = getContext(); + + Observation observation = mock(Observation.class); + traceRequestContext.put(ObservationThreadLocalAccessor.KEY, observation); + + // when + listener.commandFailed(new CommandFailedEvent(traceRequestContext, 0, 0, null, "db", "insert", 0, null)); + + verify(observation).getContext(); + verifyNoMoreInteractions(observation); + } + + @Test // GH-4321 + void shouldUseObservationConvention() { + + // given + MongoHandlerObservationConvention customObservationConvention = new MongoHandlerObservationConvention() { + @Override + public boolean supportsContext(Observation.Context context) { + return MongoHandlerObservationConvention.super.supportsContext(context); + } + + @Override + public String getName() { + return "custom.name"; + } + }; + this.listener = new MongoObservationCommandListener(observationRegistry, mock(ConnectionString.class), + customObservationConvention); + + // when + listener.commandStarted(new CommandStartedEvent(new MapRequestContext(), 0, 0, null, "some name", "", null)); + + // then + assertThat(meterRegistry).hasMeterWithName("custom.name.active"); + } + + private RequestContext getContext() { + return ((SynchronousContextProvider) ContextProviderFactory.create(observationRegistry)).getContext(); + } + + private void assertThatTimerRegisteredWithTags() { + + assertThat(meterRegistry) // + .hasTimerWithNameAndTags(MongoObservation.MONGODB_COMMAND_OBSERVATION.getName(), + KeyValues.of(LowCardinalityCommandKeyNames.MONGODB_COLLECTION.withValue("user"))); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ReactiveIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ReactiveIntegrationTests.java new file mode 100644 index 0000000000..9bfe5a8ce2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/ReactiveIntegrationTests.java @@ -0,0 +1,89 @@ +/* + * Copyright 2013-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReactivePersonRepository; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.Observation; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor; +import io.micrometer.tracing.exporter.FinishedSpan; +import io.micrometer.tracing.test.SampleTestRunner; +import reactor.test.StepVerifier; +import reactor.util.context.Context; + +/** + * Collection of tests that log metrics and tracing with an external tracing tool. + * + * @author Mark Paluch + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration(classes = TestConfig.class) +public class ReactiveIntegrationTests extends SampleTestRunner { + + @Autowired ReactivePersonRepository repository; + + ReactiveIntegrationTests() { + super(SampleRunnerConfig.builder().build()); + } + + @Override + protected MeterRegistry createMeterRegistry() { + return TestConfig.METER_REGISTRY; + } + + @Override + protected ObservationRegistry createObservationRegistry() { + return TestConfig.OBSERVATION_REGISTRY; + } + + @Override + public SampleTestRunnerConsumer yourCode() { + + return (tracer, meterRegistry) -> { + + Observation intermediate = Observation.start("intermediate", createObservationRegistry()); + + repository.deleteAll() // + .then(repository.save(new Person("Dave", "Matthews", 42))) // + .contextWrite(Context.of(ObservationThreadLocalAccessor.KEY, intermediate)) // + .as(StepVerifier::create).expectNextCount(1)// + .verifyComplete(); + + repository.findByLastname("Matthews") // + .contextWrite(Context.of(ObservationThreadLocalAccessor.KEY, intermediate)) // + .as(StepVerifier::create).assertNext(actual -> { + + assertThat(actual).extracting("firstname", "lastname").containsExactly("Dave", "Matthews"); + }).verifyComplete(); + + intermediate.stop(); + System.out.println(((SimpleMeterRegistry) meterRegistry).getMetersAsString()); + + assertThat(tracer.getFinishedSpans()).hasSize(5).extracting(FinishedSpan::getName).contains("person.delete", + "person.update", "person.find"); + }; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/TestConfig.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/TestConfig.java new file mode 100644 index 0000000000..7e7e2c636c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/observability/TestConfig.java @@ -0,0 +1,165 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.observability; + +import java.util.Properties; + +import org.springframework.beans.factory.config.PropertiesFactoryBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ClassPathResource; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.PersonRepository; +import org.springframework.data.mongodb.repository.ReactivePersonRepository; +import org.springframework.data.mongodb.repository.SampleEvaluationContextExtension; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactoryBean; +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactoryBean; +import org.springframework.data.repository.core.support.PropertiesBasedNamedQueries; + +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.client.MongoClients; + +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.observation.DefaultMeterObservationHandler; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import io.micrometer.observation.ObservationRegistry; +import io.micrometer.tracing.Tracer; +import io.micrometer.tracing.test.simple.SimpleTracer; + +/** + * @author Mark Paluch + */ +@Configuration +class TestConfig { + + static final MeterRegistry METER_REGISTRY = new SimpleMeterRegistry(); + static final ObservationRegistry OBSERVATION_REGISTRY = ObservationRegistry.create(); + + static { + OBSERVATION_REGISTRY.observationConfig().observationHandler(new DefaultMeterObservationHandler(METER_REGISTRY)); + } + + @Bean + MongoDatabaseFactory mongoDatabaseFactory(MongoClientSettings settings) { + return new SimpleMongoClientDatabaseFactory(MongoClients.create(settings), "observable"); + } + + @Bean + ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory(MongoClientSettings settings) { + return new SimpleReactiveMongoDatabaseFactory(com.mongodb.reactivestreams.client.MongoClients.create(settings), + "observable"); + } + + @Bean + MongoClientSettings mongoClientSettings(ObservationRegistry observationRegistry) { + + ConnectionString connectionString = new ConnectionString( + String.format("mongodb://%s:%s/?w=majority&uuidrepresentation=javaLegacy", "127.0.0.1", 27017)); + + MongoClientSettings settings = MongoClientSettings.builder() // + .addCommandListener(new MongoObservationCommandListener(observationRegistry, connectionString)) // + .contextProvider(ContextProviderFactory.create(observationRegistry)) // + .applyConnectionString(connectionString) // + .build(); + + return settings; + } + + @Bean + MappingMongoConverter mongoConverter(MongoMappingContext mappingContext, MongoDatabaseFactory factory) { + return new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext); + } + + @Bean + MongoMappingContext mappingContext() { + return new MongoMappingContext(); + } + + @Bean + MongoTemplate mongoTemplate(MongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter) { + + MongoTemplate template = new MongoTemplate(mongoDatabaseFactory, mongoConverter); + return template; + } + + @Bean + ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, + MongoConverter mongoConverter) { + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoDatabaseFactory, mongoConverter); + return template; + } + + @Bean + public PropertiesFactoryBean namedQueriesProperties() { + + PropertiesFactoryBean bean = new PropertiesFactoryBean(); + bean.setLocation(new ClassPathResource("META-INF/mongo-named-queries.properties")); + return bean; + } + + @Bean + MongoRepositoryFactoryBean personRepositoryFactoryBean(MongoOperations operations, + Properties namedQueriesProperties) { + + MongoRepositoryFactoryBean factoryBean = new MongoRepositoryFactoryBean<>( + PersonRepository.class); + factoryBean.setNamedQueries(new PropertiesBasedNamedQueries(namedQueriesProperties)); + factoryBean.setMongoOperations(operations); + factoryBean.setCreateIndexesForQueryMethods(true); + return factoryBean; + } + + @Bean + ReactiveMongoRepositoryFactoryBean reactivePersonRepositoryFactoryBean( + ReactiveMongoOperations operations, Properties namedQueriesProperties) { + + ReactiveMongoRepositoryFactoryBean factoryBean = new ReactiveMongoRepositoryFactoryBean<>( + ReactivePersonRepository.class); + factoryBean.setNamedQueries(new PropertiesBasedNamedQueries(namedQueriesProperties)); + factoryBean.setReactiveMongoOperations(operations); + factoryBean.setCreateIndexesForQueryMethods(true); + return factoryBean; + } + + @Bean + SampleEvaluationContextExtension contextExtension() { + return new SampleEvaluationContextExtension(); + } + + @Bean + ObservationRegistry registry() { + return OBSERVATION_REGISTRY; + } + + @Bean + Tracer tracer() { + return new SimpleTracer(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java index 40a87d092a..e815cc6e7c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/PerformanceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,28 +19,18 @@ import static org.springframework.data.mongodb.core.query.Query.*; import java.text.DecimalFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; +import java.util.*; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.core.Constants; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; @@ -51,14 +41,13 @@ import org.springframework.util.StopWatch; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.DBObject; -import com.mongodb.MongoClient; import com.mongodb.WriteConcern; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; /** * Test class to execute performance tests for plain MongoDB driver usage, {@link MongoTemplate} and the repositories @@ -76,7 +65,7 @@ public class PerformanceTests { private static final StopWatch watch = new StopWatch(); private static final Collection IGNORED_WRITE_CONCERNS = Arrays.asList("MAJORITY", "REPLICAS_SAFE", "FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED", "W2", "W3"); - private static final int COLLECTION_SIZE = 1024-2018 * 1024-2018 * 256; // 256 MB + private static final int COLLECTION_SIZE = 1024 - 2018 * 1024 - 2018 * 256; // 256 MB private static final Collection COLLECTION_NAMES = Arrays.asList("template", "driver", "person"); MongoClient mongo; @@ -84,19 +73,19 @@ public class PerformanceTests { PersonRepository repository; MongoConverter converter; - @Before + @BeforeEach public void setUp() throws Exception { - this.mongo = new MongoClient(); + this.mongo = MongoClients.create(); - SimpleMongoDbFactory mongoDbFactory = new SimpleMongoDbFactory(this.mongo, DATABASE_NAME); + SimpleMongoClientDatabaseFactory mongoDbFactory = new SimpleMongoClientDatabaseFactory(this.mongo, DATABASE_NAME); MongoMappingContext context = new MongoMappingContext(); context.setInitialEntitySet(Collections.singleton(Person.class)); context.afterPropertiesSet(); this.converter = new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), context); - this.operations = new MongoTemplate(new SimpleMongoDbFactory(this.mongo, DATABASE_NAME), converter); + this.operations = new MongoTemplate(new SimpleMongoClientDatabaseFactory(this.mongo, DATABASE_NAME), converter); MongoRepositoryFactoryBean factory = new MongoRepositoryFactoryBean( PersonRepository.class); @@ -104,7 +93,10 @@ public void setUp() throws Exception { factory.afterPropertiesSet(); this.repository = factory.getObject(); + } + void afterEach() { + mongo.close(); } @Test @@ -113,11 +105,11 @@ public void writeWithWriteConcerns() { public void doWithWriteConcern(String constantName, WriteConcern concern) { writeHeadline("WriteConcern: " + constantName); System.out.println(String.format("Writing %s objects using plain driver took %sms", NUMBER_OF_PERSONS, - writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS))); + writingObjectsUsingPlainDriver(NUMBER_OF_PERSONS, concern))); System.out.println(String.format("Writing %s objects using template took %sms", NUMBER_OF_PERSONS, - writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS))); + writingObjectsUsingMongoTemplate(NUMBER_OF_PERSONS, concern))); System.out.println(String.format("Writing %s objects using repository took %sms", NUMBER_OF_PERSONS, - writingObjectsUsingRepositories(NUMBER_OF_PERSONS))); + writingObjectsUsingRepositories(NUMBER_OF_PERSONS, concern))); writeFooter(); } }); @@ -149,7 +141,7 @@ public List doInWatch() { List persons = new ArrayList(); for (Document document : documents) { - persons.add(Person.from(new BasicDBObject(document))); + persons.add(Person.from(document)); } return persons; @@ -181,13 +173,10 @@ public List doInWatch() { @Test public void writeAndRead() throws Exception { - - mongo.setWriteConcern(WriteConcern.SAFE); - - readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS); + readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.ACKNOWLEDGED); } - private void readsAndWrites(int numberOfPersons, int iterations) { + private void readsAndWrites(int numberOfPersons, int iterations, WriteConcern writeConcern) { Statistics statistics = new Statistics("Reading " + numberOfPersons + " - After %s iterations"); @@ -195,9 +184,11 @@ private void readsAndWrites(int numberOfPersons, int iterations) { setupCollections(); - statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons)); - statistics.registerTime(Api.TEMPLATE, Mode.WRITE, writingObjectsUsingMongoTemplate(numberOfPersons)); - statistics.registerTime(Api.REPOSITORY, Mode.WRITE, writingObjectsUsingRepositories(numberOfPersons)); + statistics.registerTime(Api.DRIVER, Mode.WRITE, writingObjectsUsingPlainDriver(numberOfPersons, writeConcern)); + statistics.registerTime(Api.TEMPLATE, Mode.WRITE, + writingObjectsUsingMongoTemplate(numberOfPersons, writeConcern)); + statistics.registerTime(Api.REPOSITORY, Mode.WRITE, + writingObjectsUsingRepositories(numberOfPersons, writeConcern)); statistics.registerTime(Api.DRIVER, Mode.READ, readingUsingPlainDriver()); statistics.registerTime(Api.TEMPLATE, Mode.READ, readingUsingTemplate()); @@ -247,8 +238,6 @@ private void executeWithWriteConcerns(WriteConcernCallback callback) { } WriteConcern writeConcern = (WriteConcern) constants.asObject(constantName); - mongo.setWriteConcern(writeConcern); - setupCollections(); callback.doWithWriteConcern(constantName, writeConcern); @@ -257,36 +246,49 @@ private void executeWithWriteConcerns(WriteConcernCallback callback) { private void setupCollections() { - DB db = this.mongo.getDB(DATABASE_NAME); + MongoDatabase db = this.mongo.getDatabase(DATABASE_NAME); for (String collectionName : COLLECTION_NAMES) { - DBCollection collection = db.getCollection(collectionName); + + MongoCollection collection = db.getCollection(collectionName); collection.drop(); - collection.getDB().command(getCreateCollectionCommand(collectionName)); - collection.createIndex(new BasicDBObject("firstname", -1)); - collection.createIndex(new BasicDBObject("lastname", -1)); + + CreateCollectionOptions collectionOptions = new CreateCollectionOptions(); + collectionOptions.capped(false); + collectionOptions.sizeInBytes(COLLECTION_SIZE); + + db.createCollection(collectionName, collectionOptions); + + collection.createIndex(new Document("firstname", -1)); + collection.createIndex(new Document("lastname", -1)); } } - private DBObject getCreateCollectionCommand(String name) { - DBObject document = new BasicDBObject(); + private Document getCreateCollectionCommand(String name) { + Document document = new Document(); document.put("createCollection", name); document.put("capped", false); document.put("size", COLLECTION_SIZE); return document; } - private long writingObjectsUsingPlainDriver(int numberOfPersons) { + private long writingObjectsUsingPlainDriver(int numberOfPersons, WriteConcern writeConcern) { - DBCollection collection = mongo.getDB(DATABASE_NAME).getCollection("driver"); + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver") + .withWriteConcern(writeConcern); + ; List persons = getPersonObjects(numberOfPersons); - executeWatched(() -> persons.stream().map(it -> collection.save(new BasicDBObject(it.toDocument())))); + executeWatched(() -> persons.stream().map(Person::toDocument).map(it -> { + + collection.insertOne(it); + return true; + })); return watch.getLastTaskTimeMillis(); } - private long writingObjectsUsingRepositories(int numberOfPersons) { + private long writingObjectsUsingRepositories(int numberOfPersons, WriteConcern writeConcern) { List persons = getPersonObjects(numberOfPersons); @@ -295,9 +297,10 @@ private long writingObjectsUsingRepositories(int numberOfPersons) { return watch.getLastTaskTimeMillis(); } - private long writingObjectsUsingMongoTemplate(int numberOfPersons) { + private long writingObjectsUsingMongoTemplate(int numberOfPersons, WriteConcern writeConcern) { List persons = getPersonObjects(numberOfPersons); + operations.setWriteConcern(writeConcern); executeWatched(() -> persons.stream()// .peek(it -> operations.save(it, "template"))// @@ -308,7 +311,7 @@ private long writingObjectsUsingMongoTemplate(int numberOfPersons) { private long readingUsingPlainDriver() { - executeWatched(() -> toPersons(mongo.getDB(DATABASE_NAME).getCollection("driver").find())); + executeWatched(() -> toPersons(mongo.getDatabase(DATABASE_NAME).getCollection("driver").find())); return watch.getLastTaskTimeMillis(); } @@ -329,10 +332,10 @@ private long queryUsingPlainDriver() { executeWatched(() -> { - DBCollection collection = mongo.getDB(DATABASE_NAME).getCollection("driver"); + MongoCollection collection = mongo.getDatabase(DATABASE_NAME).getCollection("driver"); - BasicDBObject regex = new BasicDBObject("$regex", Pattern.compile(".*1.*")); - BasicDBObject query = new BasicDBObject("addresses.zipCode", regex); + Document regex = new Document("$regex", Pattern.compile(".*1.*")); + Document query = new Document("addresses.zipCode", regex); return toPersons(collection.find(query)); }); @@ -385,12 +388,13 @@ private T executeWatched(WatchCallback callback) { } } - private static List toPersons(DBCursor cursor) { + private static List toPersons(FindIterable cursor) { List persons = new ArrayList(); - while (cursor.hasNext()) { - persons.add(Person.from(cursor.next())); + Iterator it = cursor.iterator(); + while (it.hasNext()) { + persons.add(Person.from(it.next())); } return persons; @@ -410,15 +414,15 @@ public Person(String firstname, String lastname, List
          addresses) { this.orders = new HashSet(); } - public static Person from(DBObject source) { + public static Person from(Document source) { - BasicDBList addressesSource = (BasicDBList) source.get("addresses"); + List addressesSource = (List) source.get("addresses"); List
          addresses = new ArrayList
          (addressesSource.size()); for (Object addressSource : addressesSource) { addresses.add(Address.from((Document) addressSource)); } - BasicDBList ordersSource = (BasicDBList) source.get("orders"); + List ordersSource = (List) source.get("orders"); Set orders = new HashSet(ordersSource.size()); for (Object orderSource : ordersSource) { orders.add(Order.from((Document) orderSource)); @@ -460,7 +464,7 @@ public Address(String zipCode, String city, Set types) { public static Address from(Document source) { String zipCode = (String) source.get("zipCode"); String city = (String) source.get("city"); - BasicDBList types = (BasicDBList) source.get("types"); + List types = (List) source.get("types"); return new Address(zipCode, city, new HashSet(readFromBasicDBList(types, AddressType.class))); } @@ -474,7 +478,7 @@ public Document toDocument() { } } - private static > List readFromBasicDBList(BasicDBList source, Class type) { + private static > List readFromBasicDBList(List source, Class type) { List result = new ArrayList(source.size()); for (Object object : source) { @@ -483,8 +487,8 @@ private static > List readFromBasicDBList(BasicDBList sourc return result; } - private static > BasicDBList toBasicDBList(Collection enums) { - BasicDBList result = new BasicDBList(); + private static > List toBasicDBList(Collection enums) { + List result = new ArrayList<>(); for (T element : enums) { result.add(element.toString()); } @@ -517,7 +521,7 @@ public Order(List lineItems, Date createdAt, Status status) { public static Order from(Document source) { - BasicDBList lineItemsSource = (BasicDBList) source.get("lineItems"); + List lineItemsSource = (List) source.get("lineItems"); List lineItems = new ArrayList(lineItemsSource.size()); for (Object lineItemSource : lineItemsSource) { lineItems.add(LineItem.from((Document) lineItemSource)); @@ -583,7 +587,7 @@ public Document toDocument() { private static List pickRandomNumerOfItemsFrom(List source) { - Assert.isTrue(!source.isEmpty(), "Source must not be empty!"); + Assert.isTrue(!source.isEmpty(), "Source must not be empty"); Random random = new Random(); int numberOfItems = random.nextInt(source.size()); @@ -623,8 +627,8 @@ private interface Convertible { Document toDocument(); } - private static BasicDBList writeAll(Collection convertibles) { - BasicDBList result = new BasicDBList(); + private static List writeAll(Collection convertibles) { + List result = new ArrayList<>(); for (Convertible convertible : convertibles) { result.add(convertible.toDocument()); } @@ -797,10 +801,6 @@ public String print(double referenceAverage, double referenceMedian) { String.format(" %s%%", DEVIATION_FORMAT.format(getMediaDeviationFrom(referenceMedian)))) + '\n'; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return times.isEmpty() ? "" @@ -856,10 +856,6 @@ public String print() { return builder.toString(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java index 37951e21b8..edda1aad01 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -28,8 +28,9 @@ import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.core.Constants; import org.springframework.data.annotation.PersistenceConstructor; import org.springframework.data.mongodb.core.ReactiveMongoOperations; @@ -40,12 +41,14 @@ import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.ReactiveMongoRepository; import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StopWatch; import org.springframework.util.StringUtils; @@ -74,7 +77,7 @@ public class ReactivePerformanceTests { private static final StopWatch watch = new StopWatch(); private static final Collection IGNORED_WRITE_CONCERNS = Arrays.asList("MAJORITY", "REPLICAS_SAFE", "FSYNC_SAFE", "FSYNCED", "JOURNAL_SAFE", "JOURNALED", "REPLICA_ACKNOWLEDGED"); - private static final int COLLECTION_SIZE = 1024-2018 * 1024-2018 * 256; // 256 MB + private static final int COLLECTION_SIZE = 1024 - 2018 * 1024 - 2018 * 256; // 256 MB private static final Collection COLLECTION_NAMES = Arrays.asList("template", "driver", "person"); MongoClient mongo; @@ -82,7 +85,7 @@ public class ReactivePerformanceTests { ReactivePersonRepository repository; MongoConverter converter; - @Before + @BeforeEach public void setUp() throws Exception { mongo = MongoClients.create(); @@ -95,9 +98,17 @@ public void setUp() throws Exception { context.afterPropertiesSet(); converter = new MappingMongoConverter(new DbRefResolver() { + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + return null; + } + @Override - public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, - DbRefResolverCallback callback, DbRefProxyHandler proxyHandler) { + public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, + DbRefProxyHandler proxyHandler) { return null; } @@ -116,6 +127,7 @@ public Document fetch(DBRef dbRef) { public List bulkFetch(List dbRefs) { return null; } + }, context); operations = new ReactiveMongoTemplate(mongoDbFactory, converter); @@ -123,6 +135,11 @@ public List bulkFetch(List dbRefs) { repository = factory.getRepository(ReactivePersonRepository.class); } + @AfterEach + void afterEach() { + mongo.close(); + } + @Test // DATAMONGO-1444 public void writeWithWriteConcerns() { executeWithWriteConcerns((constantName, concern) -> { @@ -196,7 +213,7 @@ private long convertUsingConverter(final List dbObjects) { @Test // DATAMONGO-1444 public void writeAndRead() throws Exception { - readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.SAFE); + readsAndWrites(NUMBER_OF_PERSONS, ITERATIONS, WriteConcern.ACKNOWLEDGED); } private void readsAndWrites(int numberOfPersons, int iterations, WriteConcern concern) { @@ -629,7 +646,7 @@ public Document toDocument() { private static List pickRandomNumerOfItemsFrom(List source) { - Assert.isTrue(!source.isEmpty(), "Source must not be empty!"); + Assert.isTrue(!source.isEmpty(), "Source must not be empty"); Random random = new Random(); int numberOfItems = random.nextInt(source.size()); @@ -843,10 +860,6 @@ public String print(double referenceAverage, double referenceMedian) { String.format(" %s%%", DEVIATION_FORMAT.format(getMediaDeviationFrom(referenceMedian)))) + '\n'; } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return times.isEmpty() ? "" @@ -902,10 +915,6 @@ public String print() { return builder.toString(); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index f81c05100a..3f2e60f4c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,35 +16,33 @@ package org.springframework.data.mongodb.repository; import static java.util.Arrays.*; -import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.Matchers.*; -import static org.hamcrest.Matchers.not; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assumptions.*; import static org.springframework.data.geo.Metrics.*; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.Date; import java.util.HashSet; import java.util.List; -import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.dao.IncorrectResultSizeDataAccessException; -import org.springframework.data.domain.Example; -import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Range; -import org.springframework.data.domain.Slice; -import org.springframework.data.domain.Sort; +import org.springframework.data.domain.*; +import org.springframework.data.domain.ExampleMatcher.GenericPropertyMatcher; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; @@ -55,13 +53,22 @@ import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.SampleEvaluationContextExtension.SampleSecurityContextHolder; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.DirtiesState; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.ProvidesState; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; import org.springframework.data.querydsl.QSort; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.util.ReflectionTestUtils; /** @@ -74,10 +81,9 @@ * @author Fırat KÜÇÜK * @author Edward Prentice */ -@RunWith(SpringJUnit4ClassRunner.class) -public abstract class AbstractPersonRepositoryIntegrationTests { - - public @Rule ExpectedException expectedException = ExpectedException.none(); +@ExtendWith({ SpringExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public abstract class AbstractPersonRepositoryIntegrationTests implements DirtiesStateExtension.StateFunctions { @Autowired protected PersonRepository repository; @@ -88,8 +94,11 @@ public abstract class AbstractPersonRepositoryIntegrationTests { List all; - @Before - public void setUp() throws InterruptedException { + public void clear() { + repository.deleteAll(); + } + + public void setupState() { repository.deleteAll(); @@ -97,225 +106,256 @@ public void setUp() throws InterruptedException { oliver = new Person("Oliver August", "Matthews", 4); carter = new Person("Carter", "Beauford", 49); carter.setSkills(Arrays.asList("Drums", "percussion", "vocals")); - Thread.sleep(10); + boyd = new Person("Boyd", "Tinsley", 45); boyd.setSkills(Arrays.asList("Violin", "Electric Violin", "Viola", "Mandolin", "Vocals", "Guitar")); stefan = new Person("Stefan", "Lessard", 34); leroi = new Person("Leroi", "Moore", 41); - alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); - person = new QPerson("person"); - all = repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); + Arrays.asList(boyd, stefan, leroi, alicia).forEach(it -> { + it.createdAt = new Date(dave.createdAt.getTime() + 1000L); + }); + + List toSave = asList(oliver, dave, carter, boyd, stefan, leroi, alicia); + toSave.forEach(it -> it.setId(null)); + + all = repository.saveAll(toSave); } @Test - public void findsPersonById() throws Exception { + void findsPersonById() { - assertThat(repository.findById(dave.getId().toString()), is(Optional.of(dave))); + assertThat(repository.findById(dave.getId())).contains(dave); } @Test - public void findsAllMusicians() throws Exception { + void findsAllMusicians() { List result = repository.findAll(); - assertThat(result.size(), is(all.size())); - assertThat(result.containsAll(all), is(true)); + assertThat(result).hasSameSizeAs(all).containsAll(all); } @Test - public void findsAllWithGivenIds() { + void findsAllWithGivenIds() { - Iterable result = repository.findAllById(Arrays.asList(dave.id, boyd.id)); - assertThat(result, hasItems(dave, boyd)); - assertThat(result, not(hasItems(oliver, carter, stefan, leroi, alicia))); + Iterable result = repository.findAllById(asList(dave.id, boyd.id)); + assertThat(result).contains(dave, boyd).doesNotContain(oliver, carter, stefan, leroi, alicia); } @Test - public void deletesPersonCorrectly() throws Exception { + @DirtiesState + void deletesPersonCorrectly() { repository.delete(dave); List result = repository.findAll(); - assertThat(result.size(), is(all.size() - 1)); - assertThat(result, not(hasItem(dave))); + assertThat(result).hasSize(all.size() - 1).doesNotContain(dave); } @Test - public void deletesPersonByIdCorrectly() { + @DirtiesState + void deletesPersonByIdCorrectly() { - repository.deleteById(dave.getId().toString()); + repository.deleteById(dave.getId()); List result = repository.findAll(); - assertThat(result.size(), is(all.size() - 1)); - assertThat(result, not(hasItem(dave))); + assertThat(result).hasSize(all.size() - 1).doesNotContain(dave); } @Test - public void findsPersonsByLastname() throws Exception { + void findsPersonsByLastname() { List result = repository.findByLastname("Beauford"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(carter)); + assertThat(result).hasSize(1).contains(carter); } @Test - public void findsPersonsByFirstname() { + void findsPersonsByFirstname() { List result = repository.findByThePersonsFirstname("Leroi"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(leroi)); - assertThat(result.get(0).getAge(), is(nullValue())); + assertThat(result).hasSize(1).contains(leroi); + assertThat(result.get(0).getAge()).isNull(); } @Test - public void findsPersonsByFirstnameLike() throws Exception { + void findsPersonsByFirstnameLike() { List result = repository.findByFirstnameLike("Bo*"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(boyd)); + assertThat(result).hasSize(1).contains(boyd); } @Test // DATAMONGO-1608 - public void findByFirstnameLikeWithNull() { + void findByFirstnameLikeWithNull() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("property 'firstname'"); - - repository.findByFirstnameLike(null); + assertThatIllegalArgumentException().isThrownBy(() -> repository.findByFirstnameLike(null)); } @Test - public void findsPagedPersons() throws Exception { + void findsPagedPersons() { Page result = repository.findAll(PageRequest.of(1, 2, Direction.ASC, "lastname", "firstname")); - assertThat(result.isFirst(), is(false)); - assertThat(result.isLast(), is(false)); - assertThat(result, hasItems(dave, stefan)); + assertThat(result.isFirst()).isFalse(); + assertThat(result.isLast()).isFalse(); + assertThat(result).contains(dave, stefan); + } + + @Test // GH-4308 + void appliesScrollPositionCorrectly() { + + Window page = repository.findTop2ByLastnameLikeOrderByLastnameAscFirstnameAsc("*a*", + ScrollPosition.keyset()); + + assertThat(page.isLast()).isFalse(); + assertThat(page.size()).isEqualTo(2); + assertThat(page).contains(carter); + } + + @Test // GH-4397 + void appliesLimitToScrollingCorrectly() { + + Window page = repository.findByLastnameLikeOrderByLastnameAscFirstnameAsc("*a*", + ScrollPosition.keyset(), Limit.of(2)); + + assertThat(page.isLast()).isFalse(); + assertThat(page.size()).isEqualTo(2); + assertThat(page).contains(carter); + } + + @Test // GH-4308 + void appliesScrollPositionWithProjectionCorrectly() { + + Window page = repository.findCursorProjectionByLastnameLike("*a*", + PageRequest.of(0, 2, Sort.by(Direction.ASC, "lastname", "firstname"))); + + assertThat(page.isLast()).isFalse(); + assertThat(page.size()).isEqualTo(2); + + assertThat(page).element(0).isEqualTo(new PersonSummaryDto(carter.getFirstname(), carter.getLastname())); } @Test - public void executesPagedFinderCorrectly() throws Exception { + void executesPagedFinderCorrectly() { Page page = repository.findByLastnameLike("*a*", PageRequest.of(0, 2, Direction.ASC, "lastname", "firstname")); - assertThat(page.isFirst(), is(true)); - assertThat(page.isLast(), is(false)); - assertThat(page.getNumberOfElements(), is(2)); - assertThat(page, hasItems(carter, stefan)); + assertThat(page.isFirst()).isTrue(); + assertThat(page.isLast()).isFalse(); + assertThat(page.getNumberOfElements()).isEqualTo(2); + assertThat(page).contains(carter, stefan); + } + + @Test // GH-4397 + void executesFinderCorrectlyWithSortAndLimit() { + + List page = repository.findByLastnameLike("*a*", Sort.by(Direction.ASC, "lastname", "firstname"), Limit.of(2)); + + assertThat(page).containsExactly(carter, stefan); } @Test - public void executesPagedFinderWithAnnotatedQueryCorrectly() throws Exception { + void executesPagedFinderWithAnnotatedQueryCorrectly() { Page page = repository.findByLastnameLikeWithPageable(".*a.*", PageRequest.of(0, 2, Direction.ASC, "lastname", "firstname")); - assertThat(page.isFirst(), is(true)); - assertThat(page.isLast(), is(false)); - assertThat(page.getNumberOfElements(), is(2)); - assertThat(page, hasItems(carter, stefan)); + assertThat(page.isFirst()).isTrue(); + assertThat(page.isLast()).isFalse(); + assertThat(page.getNumberOfElements()).isEqualTo(2); + assertThat(page).contains(carter, stefan); } @Test - public void findsPersonInAgeRangeCorrectly() throws Exception { + void findsPersonInAgeRangeCorrectly() { List result = repository.findByAgeBetween(40, 45); - assertThat(result.size(), is(2)); - assertThat(result, hasItems(dave, leroi)); + assertThat(result).hasSize(2).contains(dave, leroi); } @Test - public void findsPersonByShippingAddressesCorrectly() throws Exception { + void findsPersonByShippingAddressesCorrectly() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setShippingAddresses(new HashSet
          (asList(address))); repository.save(dave); - assertThat(repository.findByShippingAddresses(address), is(dave)); + assertThat(repository.findByShippingAddresses(address)).isEqualTo(dave); } @Test - public void findsPersonByAddressCorrectly() throws Exception { + void findsPersonByAddressCorrectly() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setAddress(address); repository.save(dave); List result = repository.findByAddress(address); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByZipCode() throws Exception { + void findsPeopleByZipCode() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setAddress(address); repository.save(dave); List result = repository.findByAddressZipCode(address.getZipCode()); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByQueryDslLastnameSpec() throws Exception { + void findsPeopleByQueryDslLastnameSpec() { Iterable result = repository.findAll(person.lastname.eq("Matthews")); - assertThat(result, hasItem(dave)); - assertThat(result, not(hasItems(carter, boyd, stefan, leroi, alicia))); + assertThat(result).contains(dave).doesNotContain(carter, boyd, stefan, leroi, alicia); } @Test - public void findsPeopleByzipCodePredicate() throws Exception { + void findsPeopleByzipCodePredicate() { Address address = new Address("Foo Street 1", "C0123", "Bar"); dave.setAddress(address); repository.save(dave); Iterable result = repository.findAll(person.address.zipCode.eq("C0123")); - assertThat(result, hasItem(dave)); - assertThat(result, not(hasItems(carter, boyd, stefan, leroi, alicia))); + assertThat(result).contains(dave).doesNotContain(carter, boyd, stefan, leroi, alicia); } @Test - public void findsPeopleByLocationNear() { + void findsPeopleByLocationNear() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); List result = repository.findByLocationNear(point); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test // DATAMONGO-1588 - public void findsPeopleByLocationNearUsingGeoJsonType() { + void findsPeopleByLocationNearUsingGeoJsonType() { GeoJsonPoint point = new GeoJsonPoint(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); List result = repository.findByLocationNear(point); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByLocationWithinCircle() { + void findsPeopleByLocationWithinCircle() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); List result = repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170)); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByLocationWithinBox() { + void findsPeopleByLocationWithinBox() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); repository.save(dave); @@ -323,12 +363,11 @@ public void findsPeopleByLocationWithinBox() { Box box = new Box(new Point(-78.99171, 35.738868), new Point(-68.99171, 45.738868)); List result = repository.findByLocationWithin(box); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPeopleByLocationWithinPolygon() { + void findsPeopleByLocationWithinPolygon() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -340,81 +379,79 @@ public void findsPeopleByLocationWithinPolygon() { Point fourth = new Point(-68.99171, 35.738868); List result = repository.findByLocationWithin(new Polygon(first, second, third, fourth)); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test - public void findsPagedPeopleByPredicate() throws Exception { + void findsPagedPeopleByPredicate() { Page page = repository.findAll(person.lastname.contains("a"), PageRequest.of(0, 2, Direction.ASC, "lastname")); - assertThat(page.isFirst(), is(true)); - assertThat(page.isLast(), is(false)); - assertThat(page.getNumberOfElements(), is(2)); - assertThat(page.getTotalElements(), is(4L)); - assertThat(page, hasItems(carter, stefan)); + assertThat(page.isFirst()).isTrue(); + assertThat(page.isLast()).isFalse(); + assertThat(page.getNumberOfElements()).isEqualTo(2); + assertThat(page.getTotalElements()).isEqualTo(4L); + assertThat(page).contains(carter, stefan); } @Test // DATADOC-136 - public void findsPeopleBySexCorrectly() { + void findsPeopleBySexCorrectly() { List females = repository.findBySex(Sex.FEMALE); - assertThat(females.size(), is(1)); - assertThat(females.get(0), is(alicia)); + assertThat(females).hasSize(1); + assertThat(females.get(0)).isEqualTo(alicia); } @Test // DATAMONGO-446 - public void findsPeopleBySexPaginated() { + void findsPeopleBySexPaginated() { List males = repository.findBySex(Sex.MALE, PageRequest.of(0, 2)); - assertThat(males.size(), is(2)); + assertThat(males).hasSize(2); } @Test - public void findsPeopleByNamedQuery() { + void findsPeopleByNamedQuery() { List result = repository.findByNamedQuery("Dave"); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test // DATADOC-190 - public void existsWorksCorrectly() { - assertThat(repository.existsById(dave.getId()), is(true)); + void existsWorksCorrectly() { + assertThat(repository.existsById(dave.getId())).isTrue(); } - @Test(expected = DuplicateKeyException.class) - public void rejectsDuplicateEmailAddressOnSave() { + @Test + void rejectsDuplicateEmailAddressOnSave() { - assertThat(dave.getEmail(), is("dave@dmband.com")); + assumeThat(repository.findById(dave.getId()).map(Person::getEmail)).contains("dave@dmband.com"); Person daveSyer = new Person("Dave", "Syer"); - assertThat(daveSyer.getEmail(), is("dave@dmband.com")); + assertThat(daveSyer.getEmail()).isEqualTo("dave@dmband.com"); - repository.save(daveSyer); + Assertions.assertThatExceptionOfType(DuplicateKeyException.class).isThrownBy(() -> repository.save(daveSyer)); } @Test // DATADOC-236 - public void findsPeopleByLastnameAndOrdersCorrectly() { + void findsPeopleByLastnameAndOrdersCorrectly() { List result = repository.findByLastnameOrderByFirstnameAsc("Matthews"); - assertThat(result.size(), is(2)); - assertThat(result.get(0), is(dave)); - assertThat(result.get(1), is(oliver)); + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(dave); + assertThat(result.get(1)).isEqualTo(oliver); } @Test // DATADOC-236 - public void appliesStaticAndDynamicSorting() { + void appliesStaticAndDynamicSorting() { List result = repository.findByFirstnameLikeOrderByLastnameAsc("*e*", Sort.by("age")); - assertThat(result.size(), is(5)); - assertThat(result.get(0), is(carter)); - assertThat(result.get(1), is(stefan)); - assertThat(result.get(2), is(oliver)); - assertThat(result.get(3), is(dave)); - assertThat(result.get(4), is(leroi)); + assertThat(result).hasSize(5); + assertThat(result.get(0)).isEqualTo(carter); + assertThat(result.get(1)).isEqualTo(stefan); + assertThat(result.get(2)).isEqualTo(oliver); + assertThat(result.get(3)).isEqualTo(dave); + assertThat(result.get(4)).isEqualTo(leroi); } @Test - public void executesGeoNearQueryForResultsCorrectly() { + void executesGeoNearQueryForResultsCorrectly() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -422,11 +459,11 @@ public void executesGeoNearQueryForResultsCorrectly() { GeoResults results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS)); - assertThat(results.getContent().isEmpty(), is(false)); + assertThat(results.getContent()).isNotEmpty(); } @Test - public void executesGeoPageQueryForResultsCorrectly() { + void executesGeoPageQueryForResultsCorrectly() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -434,31 +471,31 @@ public void executesGeoPageQueryForResultsCorrectly() { GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), PageRequest.of(0, 20)); - assertThat(results.getContent().isEmpty(), is(false)); + assertThat(results.getContent()).isNotEmpty(); // DATAMONGO-607 - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } @Test // DATAMONGO-323 - public void considersSortForAnnotatedQuery() { + void considersSortForAnnotatedQuery() { List result = repository.findByAgeLessThan(60, Sort.by("firstname")); - assertThat(result.size(), is(7)); - assertThat(result.get(0), is(alicia)); - assertThat(result.get(1), is(boyd)); - assertThat(result.get(2), is(carter)); - assertThat(result.get(3), is(dave)); - assertThat(result.get(4), is(leroi)); - assertThat(result.get(5), is(oliver)); - assertThat(result.get(6), is(stefan)); + assertThat(result).hasSize(7); + assertThat(result.get(0)).isEqualTo(alicia); + assertThat(result.get(1)).isEqualTo(boyd); + assertThat(result.get(2)).isEqualTo(carter); + assertThat(result.get(3)).isEqualTo(dave); + assertThat(result.get(4)).isEqualTo(leroi); + assertThat(result.get(5)).isEqualTo(oliver); + assertThat(result.get(6)).isEqualTo(stefan); } @Test // DATAMONGO-347 - public void executesQueryWithDBRefReferenceCorrectly() { + void executesQueryWithDBRefReferenceCorrectly() { - operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); + operations.remove(new Query(), User.class); User user = new User(); user.username = "Oliver"; @@ -469,73 +506,65 @@ public void executesQueryWithDBRefReferenceCorrectly() { repository.save(dave); List result = repository.findByCreator(user); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test // DATAMONGO-425 - public void bindsDateParameterForLessThanPredicateCorrectly() { + void bindsDateParameterForLessThanPredicateCorrectly() { List result = repository.findByCreatedAtLessThan(boyd.createdAt); - assertThat(result.size(), is(3)); - assertThat(result, hasItems(dave, oliver, carter)); + assertThat(result).hasSize(3).contains(dave, oliver, carter); } @Test // DATAMONGO-425 - public void bindsDateParameterForGreaterThanPredicateCorrectly() { + void bindsDateParameterForGreaterThanPredicateCorrectly() { List result = repository.findByCreatedAtGreaterThan(carter.createdAt); - assertThat(result.size(), is(4)); - assertThat(result, hasItems(boyd, stefan, leroi, alicia)); + assertThat(result).hasSize(4).contains(boyd, stefan, leroi, alicia); } @Test // DATAMONGO-427 - public void bindsDateParameterToBeforePredicateCorrectly() { + void bindsDateParameterToBeforePredicateCorrectly() { List result = repository.findByCreatedAtBefore(boyd.createdAt); - assertThat(result.size(), is(3)); - assertThat(result, hasItems(dave, oliver, carter)); + assertThat(result).hasSize(3).contains(dave, oliver, carter); } @Test // DATAMONGO-427 - public void bindsDateParameterForAfterPredicateCorrectly() { + void bindsDateParameterForAfterPredicateCorrectly() { List result = repository.findByCreatedAtAfter(carter.createdAt); - assertThat(result.size(), is(4)); - assertThat(result, hasItems(boyd, stefan, leroi, alicia)); + assertThat(result).hasSize(4).contains(boyd, stefan, leroi, alicia); } @Test // DATAMONGO-425 - public void bindsDateParameterForManuallyDefinedQueryCorrectly() { + void bindsDateParameterForManuallyDefinedQueryCorrectly() { List result = repository.findByCreatedAtLessThanManually(boyd.createdAt); - assertThat(result.isEmpty(), is(false)); + assertThat(result).isNotEmpty(); } @Test // DATAMONGO-472 - public void findsPeopleUsingNotPredicate() { + void findsPeopleUsingNotPredicate() { List result = repository.findByLastnameNot("Matthews"); - assertThat(result, not(hasItem(dave))); - assertThat(result, hasSize(5)); + assertThat(result).doesNotContain(dave).hasSize(5); } @Test // DATAMONGO-521 - public void executesAndQueryCorrectly() { + void executesAndQueryCorrectly() { List result = repository.findByFirstnameAndLastname("Dave", "Matthews"); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); result = repository.findByFirstnameAndLastname("Oliver August", "Matthews"); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(oliver)); + assertThat(result).hasSize(1).contains(oliver); } @Test // DATAMONGO-600 - public void readsDocumentsWithNestedPolymorphismCorrectly() { + void readsDocumentsWithNestedPolymorphismCorrectly() { UsernameAndPassword usernameAndPassword = new UsernameAndPassword(); usernameAndPassword.username = "dave"; @@ -546,55 +575,52 @@ public void readsDocumentsWithNestedPolymorphismCorrectly() { repository.save(dave); List result = repository.findByCredentials(usernameAndPassword); - assertThat(result, hasSize(1)); - assertThat(result, hasItem(dave)); + assertThat(result).hasSize(1).contains(dave); } @Test // DATAMONGO-636 - public void executesDerivedCountProjection() { - assertThat(repository.countByLastname("Matthews"), is(2L)); + void executesDerivedCountProjection() { + assertThat(repository.countByLastname("Matthews")).isEqualTo(2L); } @Test // DATAMONGO-636 - public void executesDerivedCountProjectionToInt() { - assertThat(repository.countByFirstname("Oliver August"), is(1)); + void executesDerivedCountProjectionToInt() { + assertThat(repository.countByFirstname("Oliver August")).isEqualTo(1); } @Test // DATAMONGO-636 - public void executesAnnotatedCountProjection() { - assertThat(repository.someCountQuery("Matthews"), is(2L)); + void executesAnnotatedCountProjection() { + assertThat(repository.someCountQuery("Matthews")).isEqualTo(2L); } @Test // DATAMONGO-1454 - public void executesDerivedExistsProjectionToBoolean() { + void executesDerivedExistsProjectionToBoolean() { - assertThat(repository.existsByFirstname("Oliver August"), is(true)); - assertThat(repository.existsByFirstname("Hans Peter"), is(false)); + assertThat(repository.existsByFirstname("Oliver August")).isTrue(); + assertThat(repository.existsByFirstname("Hans Peter")).isFalse(); } @Test // DATAMONGO-1454 - public void executesAnnotatedExistProjection() { - assertThat(repository.someExistQuery("Matthews"), is(true)); + void executesAnnotatedExistProjection() { + assertThat(repository.someExistQuery("Matthews")).isTrue(); } @Test // DATAMONGO-701 - public void executesDerivedStartsWithQueryCorrectly() { + void executesDerivedStartsWithQueryCorrectly() { List result = repository.findByLastnameStartsWith("Matt"); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(dave, oliver)); + assertThat(result).hasSize(2).contains(dave, oliver); } @Test // DATAMONGO-701 - public void executesDerivedEndsWithQueryCorrectly() { + void executesDerivedEndsWithQueryCorrectly() { List result = repository.findByLastnameEndsWith("thews"); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(dave, oliver)); + assertThat(result).hasSize(2).contains(dave, oliver); } @Test // DATAMONGO-445 - public void executesGeoPageQueryForWithPageRequestForPageInBetween() { + void executesGeoPageQueryForWithPageRequestForPageInBetween() { Point farAway = new Point(-73.9, 40.7); Point here = new Point(-73.99, 40.73); @@ -610,16 +636,16 @@ public void executesGeoPageQueryForWithPageRequestForPageInBetween() { GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), PageRequest.of(1, 2)); - assertThat(results.getContent().isEmpty(), is(false)); - assertThat(results.getNumberOfElements(), is(2)); - assertThat(results.isFirst(), is(false)); - assertThat(results.isLast(), is(false)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); - assertThat(results.getAverageDistance().getNormalizedValue(), is(0.0)); + assertThat(results.getContent()).isNotEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(2); + assertThat(results.isFirst()).isFalse(); + assertThat(results.isLast()).isFalse(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); + assertThat(results.getAverageDistance().getNormalizedValue()).isEqualTo(0.0); } @Test // DATAMONGO-445 - public void executesGeoPageQueryForWithPageRequestForPageAtTheEnd() { + void executesGeoPageQueryForWithPageRequestForPageAtTheEnd() { Point point = new Point(-73.99171, 40.738868); @@ -631,15 +657,15 @@ public void executesGeoPageQueryForWithPageRequestForPageAtTheEnd() { GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), PageRequest.of(1, 2)); - assertThat(results.getContent().isEmpty(), is(false)); - assertThat(results.getNumberOfElements(), is(1)); - assertThat(results.isFirst(), is(false)); - assertThat(results.isLast(), is(true)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getContent()).isNotEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(1); + assertThat(results.isFirst()).isFalse(); + assertThat(results.isLast()).isTrue(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } @Test // DATAMONGO-445 - public void executesGeoPageQueryForWithPageRequestForJustOneElement() { + void executesGeoPageQueryForWithPageRequestForJustOneElement() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -648,15 +674,15 @@ public void executesGeoPageQueryForWithPageRequestForJustOneElement() { GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), PageRequest.of(0, 2)); - assertThat(results.getContent().isEmpty(), is(false)); - assertThat(results.getNumberOfElements(), is(1)); - assertThat(results.isFirst(), is(true)); - assertThat(results.isLast(), is(true)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getContent()).isNotEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(1); + assertThat(results.isFirst()).isTrue(); + assertThat(results.isLast()).isTrue(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } @Test // DATAMONGO-445 - public void executesGeoPageQueryForWithPageRequestForJustOneElementEmptyPage() { + void executesGeoPageQueryForWithPageRequestForJustOneElementEmptyPage() { dave.setLocation(new Point(-73.99171, 40.738868)); repository.save(dave); @@ -664,83 +690,79 @@ public void executesGeoPageQueryForWithPageRequestForJustOneElementEmptyPage() { GeoPage results = repository.findByLocationNear(new Point(-73.99, 40.73), new Distance(2000, Metrics.KILOMETERS), PageRequest.of(1, 2)); - assertThat(results.getContent().isEmpty(), is(true)); - assertThat(results.getNumberOfElements(), is(0)); - assertThat(results.isFirst(), is(false)); - assertThat(results.isLast(), is(true)); - assertThat(results.getAverageDistance().getMetric(), is((Metric) Metrics.KILOMETERS)); + assertThat(results.getContent()).isEmpty(); + assertThat(results.getNumberOfElements()).isEqualTo(0); + assertThat(results.isFirst()).isFalse(); + assertThat(results.isLast()).isTrue(); + assertThat(results.getAverageDistance().getMetric()).isEqualTo((Metric) Metrics.KILOMETERS); } @Test // DATAMONGO-1608 - public void findByFirstNameIgnoreCaseWithNull() { + void findByFirstNameIgnoreCaseWithNull() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("property 'firstname'"); - - repository.findByFirstnameIgnoreCase(null); + assertThatIllegalArgumentException().isThrownBy(() -> repository.findByFirstnameIgnoreCase(null)); } @Test // DATAMONGO-770 - public void findByFirstNameIgnoreCase() { + void findByFirstNameIgnoreCase() { List result = repository.findByFirstnameIgnoreCase("dave"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } @Test // DATAMONGO-770 - public void findByFirstnameNotIgnoreCase() { + void findByFirstnameNotIgnoreCase() { List result = repository.findByFirstnameNotIgnoreCase("dave"); - assertThat(result.size(), is(6)); - assertThat(result, not(hasItem(dave))); + assertThat(result).hasSize(6).doesNotContain(dave); } @Test // DATAMONGO-770 - public void findByFirstnameStartingWithIgnoreCase() { + void findByFirstnameStartingWithIgnoreCase() { List result = repository.findByFirstnameStartingWithIgnoreCase("da"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } @Test // DATAMONGO-770 - public void findByFirstnameEndingWithIgnoreCase() { + void findByFirstnameEndingWithIgnoreCase() { List result = repository.findByFirstnameEndingWithIgnoreCase("VE"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } @Test // DATAMONGO-770 - public void findByFirstnameContainingIgnoreCase() { + void findByFirstnameContainingIgnoreCase() { List result = repository.findByFirstnameContainingIgnoreCase("AV"); - assertThat(result.size(), is(1)); - assertThat(result.get(0), is(dave)); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(dave); } @Test // DATAMONGO-870 - public void findsSliceOfPersons() { + void findsSliceOfPersons() { Slice result = repository.findByAgeGreaterThan(40, PageRequest.of(0, 2, Direction.DESC, "firstname")); - assertThat(result.hasNext(), is(true)); + assertThat(result.hasNext()).isTrue(); } @Test // DATAMONGO-871 - public void findsPersonsByFirstnameAsArray() { + void findsPersonsByFirstnameAsArray() { Person[] result = repository.findByThePersonsFirstnameAsArray("Leroi"); - assertThat(result, is(arrayWithSize(1))); - assertThat(result, is(arrayContaining(leroi))); + assertThat(result).hasSize(1).containsExactly(leroi); } @Test // DATAMONGO-821 - public void findUsingAnnotatedQueryOnDBRef() { + @DirtiesState + void findUsingAnnotatedQueryOnDBRef() { operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); @@ -753,66 +775,81 @@ public void findUsingAnnotatedQueryOnDBRef() { Page result = repository.findByHavingCreator(PageRequest.of(0, 100)); - assertThat(result.getNumberOfElements(), is(1)); - assertThat(result.getContent().get(0), is(alicia)); + assertThat(result.getNumberOfElements()).isEqualTo(1); + assertThat(result.getContent().get(0)).isEqualTo(alicia); } @Test // DATAMONGO-566 - public void deleteByShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { + @DirtiesState + void deleteByShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { List result = repository.deleteByLastname("Beauford"); - assertThat(result, hasItem(carter)); - assertThat(result, hasSize(1)); + assertThat(result).contains(carter).hasSize(1); } @Test // DATAMONGO-566 - public void deleteByShouldRemoveElementsMatchingDerivedQuery() { + @DirtiesState + void deleteByShouldRemoveElementsMatchingDerivedQuery() { repository.deleteByLastname("Beauford"); - assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class), is(0L)); + assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class)).isEqualTo(0L); } @Test // DATAMONGO-566 - public void deleteByShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { - assertThat(repository.deletePersonByLastname("Beauford"), is(1L)); + @DirtiesState + void deleteByShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { + assertThat(repository.deletePersonByLastname("Beauford")).isEqualTo(1L); + } + + @Test // DATAMONGO-1997 + @DirtiesState + void deleteByShouldResultWrappedInOptionalCorrectly() { + + assertThat(repository.deleteOptionalByLastname("Beauford")).isPresent(); + assertThat(repository.deleteOptionalByLastname("dorfuaeB")).isNotPresent(); } @Test // DATAMONGO-566 - public void deleteByShouldReturnZeroInCaseNoDocumentHasBeenRemovedAndReturnTypeIsNumber() { - assertThat(repository.deletePersonByLastname("dorfuaeB"), is(0L)); + @DirtiesState + void deleteByShouldReturnZeroInCaseNoDocumentHasBeenRemovedAndReturnTypeIsNumber() { + assertThat(repository.deletePersonByLastname("dorfuaeB")).isEqualTo(0L); } @Test // DATAMONGO-566 - public void deleteByShouldReturnEmptyListInCaseNoDocumentHasBeenRemovedAndReturnTypeIsCollectionLike() { - assertThat(repository.deleteByLastname("dorfuaeB"), empty()); + @DirtiesState + void deleteByShouldReturnEmptyListInCaseNoDocumentHasBeenRemovedAndReturnTypeIsCollectionLike() { + assertThat(repository.deleteByLastname("dorfuaeB")).isEmpty(); } @Test // DATAMONGO-566 - public void deleteByUsingAnnotatedQueryShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { + @DirtiesState + void deleteByUsingAnnotatedQueryShouldReturnListOfDeletedElementsWhenRetunTypeIsCollectionLike() { List result = repository.removeByLastnameUsingAnnotatedQuery("Beauford"); - assertThat(result, hasItem(carter)); - assertThat(result, hasSize(1)); + assertThat(result).contains(carter).hasSize(1); } @Test // DATAMONGO-566 - public void deleteByUsingAnnotatedQueryShouldRemoveElementsMatchingDerivedQuery() { + @DirtiesState + void deleteByUsingAnnotatedQueryShouldRemoveElementsMatchingDerivedQuery() { repository.removeByLastnameUsingAnnotatedQuery("Beauford"); - assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class), is(0L)); + assertThat(operations.count(new BasicQuery("{'lastname':'Beauford'}"), Person.class)).isEqualTo(0L); } @Test // DATAMONGO-566 - public void deleteByUsingAnnotatedQueryShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { - assertThat(repository.removePersonByLastnameUsingAnnotatedQuery("Beauford"), is(1L)); + @DirtiesState + void deleteByUsingAnnotatedQueryShouldReturnNumberOfDocumentsRemovedIfReturnTypeIsLong() { + assertThat(repository.removePersonByLastnameUsingAnnotatedQuery("Beauford")).isEqualTo(1L); } @Test // DATAMONGO-893 - public void findByNestedPropertyInCollectionShouldFindMatchingDocuments() { + @ProvidesState + void findByNestedPropertyInCollectionShouldFindMatchingDocuments() { Person p = new Person("Mary", "Poppins"); Address adr = new Address("some", "2", "where"); @@ -822,11 +859,12 @@ public void findByNestedPropertyInCollectionShouldFindMatchingDocuments() { Page result = repository.findByAddressIn(Arrays.asList(adr), PageRequest.of(0, 10)); - assertThat(result.getContent(), hasSize(1)); + assertThat(result.getContent()).hasSize(1); } @Test // DATAMONGO-745 - public void findByCustomQueryFirstnamesInListAndLastname() { + @ProvidesState + void findByCustomQueryFirstnamesInListAndLastname() { repository.save(new Person("foo", "bar")); repository.save(new Person("bar", "bar")); @@ -836,13 +874,14 @@ public void findByCustomQueryFirstnamesInListAndLastname() { Page result = repository.findByCustomQueryFirstnamesAndLastname(Arrays.asList("bar", "foo", "fuu"), "bar", PageRequest.of(0, 2)); - assertThat(result.getContent(), hasSize(2)); - assertThat(result.getTotalPages(), is(2)); - assertThat(result.getTotalElements(), is(3L)); + assertThat(result.getContent()).hasSize(2); + assertThat(result.getTotalPages()).isEqualTo(2); + assertThat(result.getTotalElements()).isEqualTo(3L); } @Test // DATAMONGO-745 - public void findByCustomQueryLastnameAndStreetInList() { + @ProvidesState + void findByCustomQueryLastnameAndStreetInList() { repository.save(new Person("foo", "bar").withAddress(new Address("street1", "1", "SB"))); repository.save(new Person("bar", "bar").withAddress(new Address("street2", "1", "SB"))); @@ -852,63 +891,68 @@ public void findByCustomQueryLastnameAndStreetInList() { Page result = repository.findByCustomQueryLastnameAndAddressStreetInList("bar", Arrays.asList("street1", "street2"), PageRequest.of(0, 2)); - assertThat(result.getContent(), hasSize(2)); - assertThat(result.getTotalPages(), is(2)); - assertThat(result.getTotalElements(), is(3L)); + assertThat(result.getContent()).hasSize(2); + assertThat(result.getTotalPages()).isEqualTo(2); + assertThat(result.getTotalElements()).isEqualTo(3L); } @Test // DATAMONGO-950 - public void shouldLimitCollectionQueryToMaxResultsWhenPresent() { + @ProvidesState + void shouldLimitCollectionQueryToMaxResultsWhenPresent() { repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); List result = repository.findTop3ByLastnameStartingWith("Dylan"); - assertThat(result.size(), is(3)); + assertThat(result).hasSize(3); } @Test // DATAMONGO-950, DATAMONGO-1464 - public void shouldNotLimitPagedQueryWhenPageRequestWithinBounds() { + @ProvidesState + void shouldNotLimitPagedQueryWhenPageRequestWithinBounds() { repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); Page result = repository.findTop3ByLastnameStartingWith("Dylan", PageRequest.of(0, 2)); - assertThat(result.getContent().size(), is(2)); - assertThat(result.getTotalElements(), is(3L)); + assertThat(result.getContent()).hasSize(2); + assertThat(result.getTotalElements()).isEqualTo(3L); } @Test // DATAMONGO-950 - public void shouldLimitPagedQueryWhenPageRequestExceedsUpperBoundary() { + @ProvidesState + void shouldLimitPagedQueryWhenPageRequestExceedsUpperBoundary() { repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); Page result = repository.findTop3ByLastnameStartingWith("Dylan", PageRequest.of(1, 2)); - assertThat(result.getContent().size(), is(1)); + assertThat(result.getContent()).hasSize(1); } @Test // DATAMONGO-950, DATAMONGO-1464 - public void shouldReturnEmptyWhenPageRequestedPageIsTotallyOutOfScopeForLimit() { + @ProvidesState + void shouldReturnEmptyWhenPageRequestedPageIsTotallyOutOfScopeForLimit() { repository.saveAll(Arrays.asList(new Person("Bob-1", "Dylan"), new Person("Bob-2", "Dylan"), new Person("Bob-3", "Dylan"), new Person("Bob-4", "Dylan"), new Person("Bob-5", "Dylan"))); Page result = repository.findTop3ByLastnameStartingWith("Dylan", PageRequest.of(100, 2)); - assertThat(result.getContent().size(), is(0)); - assertThat(result.getTotalElements(), is(3L)); + assertThat(result.getContent()).isEmpty(); + assertThat(result.getTotalElements()).isEqualTo(3L); } @Test // DATAMONGO-996, DATAMONGO-950, DATAMONGO-1464 - public void gettingNonFirstPageWorksWithoutLimitBeingSet() { + void gettingNonFirstPageWorksWithoutLimitBeingSet() { Page slice = repository.findByLastnameLike("Matthews", PageRequest.of(1, 1)); - assertThat(slice.getContent(), hasSize(1)); - assertThat(slice.hasPrevious(), is(true)); - assertThat(slice.hasNext(), is(false)); - assertThat(slice.getTotalElements(), is(2L)); + assertThat(slice.getContent()).hasSize(1); + assertThat(slice.hasPrevious()).isTrue(); + assertThat(slice.hasNext()).isFalse(); + assertThat(slice.getTotalElements()).isEqualTo(2L); } @Test // DATAMONGO-972 - public void shouldExecuteFindOnDbRefCorrectly() { + @DirtiesState + void shouldExecuteFindOnDbRefCorrectly() { operations.remove(new org.springframework.data.mongodb.core.query.Query(), User.class); @@ -920,28 +964,42 @@ public void shouldExecuteFindOnDbRefCorrectly() { dave.setCreator(user); operations.save(dave); - assertThat(repository.findOne(QPerson.person.creator.eq(user)).get(), is(dave)); + assertThat(repository.findOne(QPerson.person.creator.eq(user)).get()).isEqualTo(dave); + } + + @Test // DATAMONGO-969 + void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { + assertThat(repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id)))).contains(dave, carter); } @Test // DATAMONGO-969 - public void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { - assertThat(repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))), containsInAnyOrder(dave, carter)); + void shouldScrollPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { + + Window scroll = repository.findBy(person.id.in(asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(ScrollPosition.keyset())); + + assertThat(scroll).containsExactly(boyd, carter); + + ScrollPosition resumeFrom = scroll.positionAt(scroll.size() - 1); + scroll = repository.findBy(person.id.in(asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(resumeFrom)); + + assertThat(scroll).containsOnly(dave); } @Test // DATAMONGO-1030 - public void executesSingleEntityQueryWithProjectionCorrectly() { + void executesSingleEntityQueryWithProjectionCorrectly() { PersonSummaryDto result = repository.findSummaryByLastname("Beauford"); - assertThat(result, is(notNullValue())); - assertThat(result.firstname, is("Carter")); - assertThat(result.lastname, is("Beauford")); + assertThat(result).isNotNull(); + assertThat(result.firstname).isEqualTo("Carter"); + assertThat(result.lastname).isEqualTo("Beauford"); } @Test // DATAMONGO-1057 - public void sliceShouldTraverseElementsWithoutSkippingOnes() { - - repository.deleteAll(); + @ProvidesState + void sliceShouldTraverseElementsWithoutSkippingOnes() { List persons = new ArrayList(100); for (int i = 0; i < 100; i++) { @@ -949,34 +1007,34 @@ public void sliceShouldTraverseElementsWithoutSkippingOnes() { persons.add(new Person(String.format("%03d", i), "ln" + 1, 100)); } - repository.saveAll(persons); + operations.bulkOps(BulkMode.UNORDERED, Person.class).insert(persons).execute(); Slice slice = repository.findByAgeGreaterThan(50, PageRequest.of(0, 20, Direction.ASC, "firstname")); - assertThat(slice, contains(persons.subList(0, 20).toArray())); + assertThat(slice).containsExactlyElementsOf(persons.subList(0, 20)); slice = repository.findByAgeGreaterThan(50, slice.nextPageable()); - assertThat(slice, contains(persons.subList(20, 40).toArray())); + assertThat(slice).containsExactlyElementsOf(persons.subList(20, 40)); } @Test // DATAMONGO-1072 - public void shouldBindPlaceholdersUsedAsKeysCorrectly() { + void shouldBindPlaceholdersUsedAsKeysCorrectly() { List persons = repository.findByKeyValue("firstname", alicia.getFirstname()); - assertThat(persons, hasSize(1)); - assertThat(persons, hasItem(alicia)); + assertThat(persons).hasSize(1).contains(alicia); } @Test // DATAMONGO-1105 - public void returnsOrderedResultsForQuerydslOrderSpecifier() { + void returnsOrderedResultsForQuerydslOrderSpecifier() { Iterable result = repository.findAll(person.firstname.asc()); - assertThat(result, contains(alicia, boyd, carter, dave, leroi, oliver, stefan)); + assertThat(result).containsExactly(alicia, boyd, carter, dave, leroi, oliver, stefan); } @Test // DATAMONGO-1085 - public void shouldSupportSortingByQueryDslOrderSpecifier() { + @ProvidesState + void shouldSupportSortingByQueryDslOrderSpecifier() { repository.deleteAll(); @@ -994,14 +1052,13 @@ public void shouldSupportSortingByQueryDslOrderSpecifier() { Iterable result = repository.findAll(person.firstname.isNotNull(), person.address.street.desc()); - assertThat(result, is(Matchers. iterableWithSize(persons.size()))); - assertThat(result.iterator().next().getFirstname(), is(persons.get(2).getFirstname())); + assertThat(result).hasSize(persons.size()); + assertThat(result.iterator().next().getFirstname()).isEqualTo(persons.get(2).getFirstname()); } @Test // DATAMONGO-1085 - public void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() throws Exception { - - repository.deleteAll(); + @ProvidesState + void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() { List persons = new ArrayList(); @@ -1016,14 +1073,13 @@ public void shouldSupportSortingWithQSortByQueryDslOrderSpecifier() throws Excep PageRequest pageRequest = PageRequest.of(0, 2, new QSort(person.address.street.desc())); Iterable result = repository.findAll(pageRequest); - assertThat(result, is(Matchers. iterableWithSize(2))); - assertThat(result.iterator().next().getFirstname(), is("Siggi 2")); + assertThat(result).hasSize(2); + assertThat(result.iterator().next().getFirstname()).isEqualTo("Siggi 2"); } @Test // DATAMONGO-1085 - public void shouldSupportSortingWithQSort() throws Exception { - - repository.deleteAll(); + @ProvidesState + void shouldSupportSortingWithQSort() { List persons = new ArrayList(); @@ -1037,24 +1093,25 @@ public void shouldSupportSortingWithQSort() throws Exception { Iterable result = repository.findAll(new QSort(person.address.street.desc())); - assertThat(result, is(Matchers. iterableWithSize(persons.size()))); - assertThat(result.iterator().next().getFirstname(), is("Siggi 2")); + assertThat(result).hasSize(persons.size()); + assertThat(result.iterator().next().getFirstname()).isEqualTo("Siggi 2"); } @Test // DATAMONGO-1165 - public void shouldAllowReturningJava8StreamInCustomQuery() throws Exception { + void shouldAllowReturningJava8StreamInCustomQuery() { Stream result = repository.findByCustomQueryWithStreamingCursorByFirstnames(Arrays.asList("Dave")); try { - assertThat(result.collect(Collectors. toList()), hasItems(dave)); + assertThat(result.collect(Collectors. toList())).contains(dave); } finally { result.close(); } } @Test // DATAMONGO-1110 - public void executesGeoNearQueryForResultsCorrectlyWhenGivenMinAndMaxDistance() { + @DirtiesState + void executesGeoNearQueryForResultsCorrectlyWhenGivenMinAndMaxDistance() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); @@ -1063,39 +1120,51 @@ public void executesGeoNearQueryForResultsCorrectlyWhenGivenMinAndMaxDistance() Range range = Distance.between(new Distance(0.01, KILOMETERS), new Distance(2000, KILOMETERS)); GeoResults results = repository.findPersonByLocationNear(new Point(-73.99, 40.73), range); - assertThat(results.getContent().isEmpty(), is(false)); + assertThat(results.getContent()).isNotEmpty(); } @Test // DATAMONGO-990 - public void shouldFindByFirstnameForSpELExpressionWithParameterIndexOnly() { + void shouldFindByFirstnameForSpELExpressionWithParameterIndexOnly() { List users = repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Dave"); - assertThat(users, hasSize(1)); - assertThat(users.get(0), is(dave)); + assertThat(users).hasSize(1); + assertThat(users.get(0)).isEqualTo(dave); } @Test // DATAMONGO-990 - public void shouldFindByFirstnameAndCurrentUserWithCustomQuery() { + void shouldFindByFirstnameAndCurrentUserWithCustomQuery() { SampleSecurityContextHolder.getCurrent().setPrincipal(dave); List users = repository.findWithSpelByFirstnameAndCurrentUserWithCustomQuery("Dave"); - assertThat(users, hasSize(1)); - assertThat(users.get(0), is(dave)); + assertThat(users).hasSize(1); + assertThat(users.get(0)).isEqualTo(dave); } @Test // DATAMONGO-990 - public void shouldFindByFirstnameForSpELExpressionWithParameterVariableOnly() { + void shouldFindByFirstnameForSpELExpressionWithParameterVariableOnly() { List users = repository.findWithSpelByFirstnameForSpELExpressionWithParameterVariableOnly("Dave"); - assertThat(users, hasSize(1)); - assertThat(users.get(0), is(dave)); + assertThat(users).hasSize(1); + assertThat(users.get(0)).isEqualTo(dave); + } + + @Test // DATAMONGO-1911 + @DirtiesState + void findByUUIDShouldReturnCorrectResult() { + + dave.setUniqueId(UUID.randomUUID()); + repository.save(dave); + + Person dave = repository.findByUniqueId(this.dave.getUniqueId()); + + assertThat(dave).isEqualTo(dave); } @Test // DATAMONGO-1245 - public void findByExampleShouldResolveStuffCorrectly() { + void findByExampleShouldResolveStuffCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -1106,11 +1175,11 @@ public void findByExampleShouldResolveStuffCorrectly() { ReflectionTestUtils.setField(sample, "email", null); Page result = repository.findAll(Example.of(sample), PageRequest.of(0, 10)); - assertThat(result.getNumberOfElements(), is(2)); + assertThat(result.getNumberOfElements()).isEqualTo(2); } @Test // DATAMONGO-1245 - public void findAllByExampleShouldResolveStuffCorrectly() { + void findAllByExampleShouldResolveStuffCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -1121,76 +1190,498 @@ public void findAllByExampleShouldResolveStuffCorrectly() { ReflectionTestUtils.setField(sample, "email", null); List result = repository.findAll(Example.of(sample)); - assertThat(result.size(), is(2)); + assertThat(result).hasSize(2); + } + + @Test // GH-4308 + void scrollByExampleShouldReturnCorrectResult() { + + Person sample = new Person(); + sample.setLastname("M"); + + // needed to tweak stuff a bit since some field are automatically set - so we need to undo this + ReflectionTestUtils.setField(sample, "id", null); + ReflectionTestUtils.setField(sample, "createdAt", null); + ReflectionTestUtils.setField(sample, "email", null); + + Window result = repository.findBy( + Example.of(sample, ExampleMatcher.matching().withMatcher("lastname", GenericPropertyMatcher::startsWith)), + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(ScrollPosition.keyset())); + + assertThat(result).containsOnly(dave, leroi); + assertThat(result.hasNext()).isTrue(); + + ScrollPosition position = result.positionAt(result.size() - 1); + result = repository.findBy( + Example.of(sample, ExampleMatcher.matching().withMatcher("lastname", GenericPropertyMatcher::startsWith)), + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(position)); + + assertThat(result).containsOnly(oliver); + assertThat(result.hasNext()).isFalse(); } @Test // DATAMONGO-1425 - public void findsPersonsByFirstnameNotContains() throws Exception { + void findsPersonsByFirstnameNotContains() { List result = repository.findByFirstnameNotContains("Boyd"); - assertThat(result.size(), is((int) (repository.count() - 1))); - assertThat(result, not(hasItem(boyd))); + assertThat(result).hasSize((int) (repository.count() - 1)); + assertThat(result).doesNotContain(boyd); } @Test // DATAMONGO-1425 - public void findBySkillsContains() throws Exception { + void findBySkillsContains() { - List result = repository.findBySkillsContains(Arrays.asList("Drums")); - assertThat(result.size(), is(1)); - assertThat(result, hasItem(carter)); + List result = repository.findBySkillsContains(asList("Drums")); + assertThat(result).hasSize(1).contains(carter); } @Test // DATAMONGO-1425 - public void findBySkillsNotContains() throws Exception { + void findBySkillsNotContains() { List result = repository.findBySkillsNotContains(Arrays.asList("Drums")); - assertThat(result.size(), is((int) (repository.count() - 1))); - assertThat(result, not(hasItem(carter))); + assertThat(result).hasSize((int) (repository.count() - 1)); + assertThat(result).doesNotContain(carter); } @Test // DATAMONGO-1424 - public void findsPersonsByFirstnameNotLike() throws Exception { + void findsPersonsByFirstnameNotLike() { List result = repository.findByFirstnameNotLike("Bo*"); - assertThat(result.size(), is((int) (repository.count() - 1))); - assertThat(result, not(hasItem(boyd))); + assertThat(result).hasSize((int) (repository.count() - 1)); + assertThat(result).doesNotContain(boyd); } @Test // DATAMONGO-1539 - public void countsPersonsByFirstname() { - assertThat(repository.countByThePersonsFirstname("Dave"), is(1L)); + void countsPersonsByFirstname() { + assertThat(repository.countByThePersonsFirstname("Dave")).isEqualTo(1L); } @Test // DATAMONGO-1539 - public void deletesPersonsByFirstname() { + @DirtiesState + void deletesPersonsByFirstname() { repository.deleteByThePersonsFirstname("Dave"); - assertThat(repository.countByThePersonsFirstname("Dave"), is(0L)); + assertThat(repository.countByThePersonsFirstname("Dave")).isEqualTo(0L); } @Test // DATAMONGO-1752 - public void readsOpenProjection() { + void readsOpenProjection() { assertThat(repository.findOpenProjectionBy()).isNotEmpty(); } @Test // DATAMONGO-1752 - public void readsClosedProjection() { + void readsClosedProjection() { assertThat(repository.findClosedProjectionBy()).isNotEmpty(); } @Test // DATAMONGO-1865 - public void findFirstEntityReturnsFirstResultEvenForNonUniqueMatches() { + void findFirstEntityReturnsFirstResultEvenForNonUniqueMatches() { assertThat(repository.findFirstBy()).isNotNull(); } - @Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1865 - public void findSingleEntityThrowsErrorWhenNotUnique() { - repository.findPersonByLastnameLike(dave.getLastname()); + @Test // DATAMONGO-1865 + void findSingleEntityThrowsErrorWhenNotUnique() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> repository.findPersonByLastnameLike(dave.getLastname())); + } + + @Test // DATAMONGO-1865 + void findOptionalSingleEntityThrowsErrorWhenNotUnique() { + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> repository.findOptionalPersonByLastnameLike(dave.getLastname())); + } + + @Test // DATAMONGO-1979 + void findAppliesAnnotatedSort() { + assertThat(repository.findByAgeGreaterThan(40)).containsExactly(carter, boyd, dave, leroi); + } + + @Test // DATAMONGO-1979 + void findWithSortOverwritesAnnotatedSort() { + assertThat(repository.findByAgeGreaterThan(40, Sort.by(Direction.ASC, "age"))).containsExactly(leroi, dave, boyd, + carter); + } + + @Test // DATAMONGO-2003 + void findByRegexWithPattern() { + assertThat(repository.findByFirstnameRegex(Pattern.compile(alicia.getFirstname()))).hasSize(1); + } + + @Test // DATAMONGO-2003 + void findByRegexWithPatternAndOptions() { + + String fn = alicia.getFirstname().toUpperCase(); + + assertThat(repository.findByFirstnameRegex(Pattern.compile(fn))).hasSize(0); + assertThat(repository.findByFirstnameRegex(Pattern.compile(fn, Pattern.CASE_INSENSITIVE))).hasSize(1); + } + + @Test // DATAMONGO-2149 + @DirtiesState + void annotatedQueryShouldAllowSliceInFieldsProjectionWithDbRef() { + + operations.remove(new Query(), User.class); + + List users = IntStream.range(0, 10).mapToObj(it -> { + + User user = new User(); + user.id = "id" + it; + user.username = "user" + it; + + return user; + }).collect(Collectors.toList()); + + users.forEach(operations::save); + + alicia.fans = new ArrayList<>(users); + operations.save(alicia); + + Person target = repository.findWithSliceInProjection(alicia.getId(), 0, 5); + assertThat(target.getFans()).hasSize(5); + } + + @Test // DATAMONGO-2149 + @DirtiesState + void annotatedQueryShouldAllowPositionalParameterInFieldsProjection() { + + Set
          addressList = IntStream.range(0, 10).mapToObj(it -> new Address("street-" + it, "zip", "lnz")) + .collect(Collectors.toSet()); + + alicia.setShippingAddresses(addressList); + operations.save(alicia); + + Person target = repository.findWithArrayPositionInProjection(1); + + assertThat(target).isNotNull(); + assertThat(target.getShippingAddresses()).hasSize(1); + } + + @Test // DATAMONGO-2149, DATAMONGO-2154, DATAMONGO-2199 + @DirtiesState + void annotatedQueryShouldAllowPositionalParameterInFieldsProjectionWithDbRef() { + + List userList = IntStream.range(0, 10).mapToObj(it -> { + + User user = new User(); + user.id = "" + it; + user.username = "user" + it; + + return user; + }).collect(Collectors.toList()); + + userList.forEach(operations::save); + + alicia.setFans(userList); + operations.save(alicia); + + Person target = repository.findWithArrayPositionInProjectionWithDbRef(1); + + assertThat(target).isNotNull(); + assertThat(target.getFans()).hasSize(1); + } + + @Test // DATAMONGO-2153 + void findListOfSingleValue() { + + assertThat(repository.findAllLastnames()).contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); } - @Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1865 - public void findOptionalSingleEntityThrowsErrorWhenNotUnique() { - repository.findOptionalPersonByLastnameLike(dave.getLastname()); + @Test // GH-3543 + void findStreamOfSingleValue() { + + try (Stream lastnames = repository.findAllLastnamesAsStream()) { + assertThat(lastnames) // + .contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + } + } + + @Test // DATAMONGO-4841 + void annotatedAggregationStreamWithPlaceholderValue() { + + assertThat(repository.groupStreamByLastnameAnd("firstname")) + .contains(new PersonAggregate("Lessard", Collections.singletonList("Stefan"))) // + .contains(new PersonAggregate("Keys", Collections.singletonList("Alicia"))) // + .contains(new PersonAggregate("Tinsley", Collections.singletonList("Boyd"))) // + .contains(new PersonAggregate("Beauford", Collections.singletonList("Carter"))) // + .contains(new PersonAggregate("Moore", Collections.singletonList("Leroi"))) // + .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPlaceholderValue() { + + assertThat(repository.groupByLastnameAnd("firstname")) + .contains(new PersonAggregate("Lessard", Collections.singletonList("Stefan"))) // + .contains(new PersonAggregate("Keys", Collections.singletonList("Alicia"))) // + .contains(new PersonAggregate("Tinsley", Collections.singletonList("Boyd"))) // + .contains(new PersonAggregate("Beauford", Collections.singletonList("Carter"))) // + .contains(new PersonAggregate("Moore", Collections.singletonList("Leroi"))) // + .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + } + + @Test // GH-3543 + void annotatedAggregationWithPlaceholderAsSlice() { + + Slice slice = repository.groupByLastnameAndAsSlice("firstname", Pageable.ofSize(5)); + assertThat(slice).hasSize(5); + assertThat(slice.hasNext()).isTrue(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSort() { + + assertThat(repository.groupByLastnameAnd("firstname", Sort.by("lastname"))) // + .containsSequence( // + new PersonAggregate("Beauford", Collections.singletonList("Carter")), // + new PersonAggregate("Keys", Collections.singletonList("Alicia")), // + new PersonAggregate("Lessard", Collections.singletonList("Stefan")), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August")), // + new PersonAggregate("Moore", Collections.singletonList("Leroi")), // + new PersonAggregate("Tinsley", Collections.singletonList("Boyd"))); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPageable() { + + assertThat(repository.groupByLastnameAnd("firstname", PageRequest.of(1, 2, Sort.by("lastname")))) // + .containsExactly( // + new PersonAggregate("Lessard", Collections.singletonList("Stefan")), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSingleSimpleResult() { + assertThat(repository.sumAge()).isEqualTo(245); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnType() { + + assertThat(repository.sumAgeAndReturnAggregationResultWrapper()) // + .isInstanceOf(AggregationResults.class) // + .containsExactly(new Document("_id", null).append("total", 245)); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnTypeAndProjection() { + + assertThat(repository.sumAgeAndReturnAggregationResultWrapperWithConcreteType()) // + .isInstanceOf(AggregationResults.class) // + .containsExactly(new SumAge(245L)); + } + + @Test // GH-4839 + void annotatedAggregationWithAggregationResultAsClosedInterfaceProjection() { + + assertThat(repository.findAggregatedClosedInterfaceProjectionBy()).allSatisfy(it -> { + assertThat(it.getFirstname()).isIn(dave.getFirstname(), oliver.getFirstname()); + assertThat(it.getLastname()).isEqualTo(dave.getLastname()); + }); + } + + @Test // DATAMONGO-2374 + void findsWithNativeProjection() { + + assertThat(repository.findDocumentById(dave.getId()).get()).containsEntry("firstname", dave.getFirstname()) + .containsEntry("lastname", dave.getLastname()); + } + + @Test // DATAMONGO-1677 + @DirtiesState + void findWithMoreThan10Arguments() { + + alicia.setSkills(Arrays.asList("musician", "singer", "composer", "actress", "pianist")); + alicia.setAddress(new Address("street", "zipCode", "city")); + alicia.setUniqueId(UUID.randomUUID()); + UsernameAndPassword credentials = new UsernameAndPassword(); + credentials.password = "keys"; + credentials.username = "alicia"; + alicia.credentials = credentials; + + alicia = repository.save(this.alicia); + + assertThat(repository.findPersonByManyArguments(this.alicia.getFirstname(), this.alicia.getLastname(), + this.alicia.getEmail(), this.alicia.getAge(), Sex.FEMALE, this.alicia.createdAt, alicia.getSkills(), "street", + "zipCode", "city", alicia.getUniqueId(), credentials.username, credentials.password)).isNotNull(); + } + + @Test // DATAMONGO-1894 + void spelExpressionArgumentsGetReevaluatedOnEveryInvocation() { + + assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Dave")).containsExactly(dave); + assertThat(repository.findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly("Carter")) + .containsExactly(carter); + } + + @Test // DATAMONGO-1902 + @DirtiesState + void findByValueInsideUnwrapped() { + + Person bart = new Person("bart", "simpson"); + User user = new User(); + user.setUsername("bartman"); + user.setId("84r1m4n"); + bart.setUnwrappedUser(user); + + operations.save(bart); + + List result = repository.findByUnwrappedUserUsername(user.getUsername()); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getId().equals(bart.getId())); + } + + @Test // DATAMONGO-1902 + @DirtiesState + void findByUnwrapped() { + + Person bart = new Person("bart", "simpson"); + User user = new User(); + user.setUsername("bartman"); + user.setId("84r1m4n"); + bart.setUnwrappedUser(user); + + operations.save(bart); + + List result = repository.findByUnwrappedUser(user); + + assertThat(result).hasSize(1); + assertThat(result.get(0).getId().equals(bart.getId())); + } + + @Test // GH-3395, GH-4404 + void caseInSensitiveInClause() { + + assertThat(repository.findByLastnameIgnoreCaseIn("bEAuFoRd", "maTTheWs")).hasSize(3); + + repository.save(new Person("the-first", "The First")); + repository.save(new Person("the-first-one", "The First One")); + repository.save(new Person("the-second", "The Second")); + + assertThat(repository.findByLastnameIgnoreCaseIn("tHE fIRsT")).hasSize(1); + } + + @Test // GH-3395 + void caseInSensitiveInClauseQuotesExpressions() { + assertThat(repository.findByLastnameIgnoreCaseIn(".*")).isEmpty(); + } + + @Test // GH-3395 + void caseSensitiveInClauseIgnoresExpressions() { + assertThat(repository.findByFirstnameIn(".*")).isEmpty(); + } + + @Test // GH-3583 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.4") + void annotatedQueryShouldAllowAggregationInProjection() { + + Person target = repository.findWithAggregationInProjection(alicia.getId()); + assertThat(target.getFirstname()).isEqualTo(alicia.getFirstname().toUpperCase()); + } + + @Test // GH-3633 + @DirtiesState + void annotatedQueryWithNullEqualityCheckShouldWork() { + + operations.updateFirst(Query.query(Criteria.where("id").is(dave.getId())), Update.update("age", null), + Person.class); + + Person byQueryWithNullEqualityCheck = repository.findByQueryWithNullEqualityCheck(); + assertThat(byQueryWithNullEqualityCheck.getId()).isEqualTo(dave.getId()); + } + + @Test // GH-3602 + @DirtiesState + void executesQueryWithDocumentReferenceCorrectly() { + + Person josh = new Person("Josh", "Long"); + User dave = new User(); + dave.id = "dave"; + + josh.setSpiritAnimal(dave); + + operations.save(josh); + + List result = repository.findBySpiritAnimal(dave); + assertThat(result).map(Person::getId).containsExactly(josh.getId()); + } + + @Test // GH-3656 + @DirtiesState + void resultProjectionWithOptionalIsExcecutedCorrectly() { + + carter.setAddress(new Address("batman", "robin", "gotham")); + repository.save(carter); + + PersonSummaryWithOptional result = repository.findSummaryWithOptionalByLastname("Beauford"); + + assertThat(result).isNotNull(); + assertThat(result.getAddress()).isPresent(); + assertThat(result.getFirstname()).contains("Carter"); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElements() { + assertThat(repository.findAndUpdateViaMethodArgAllByLastname("Matthews", new Update().inc("visits", 1337))) + .isEqualTo(2); + } + + @Test // GH-2107 + @DirtiesState + void annotatedUpdateIsAppliedCorrectly() { + + assertThat(repository.findAndIncrementVisitsByLastname("Matthews", 1337)).isEqualTo(2); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @DirtiesState + void mixAnnotatedUpdateWithAnnotatedQuery() { + + assertThat(repository.updateAllByLastname("Matthews", 1337)).isEqualTo(2); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @DirtiesState + void annotatedUpdateWithSpELIsAppliedCorrectly() { + + assertThat(repository.findAndIncrementVisitsUsingSpELByLastname("Matthews", 1337)).isEqualTo(2); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + @DirtiesState + void annotatedAggregationUpdateIsAppliedCorrectly() { + + repository.findAndIncrementVisitsViaPipelineByLastname("Matthews", 1337); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(it -> it.equals(1337)); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElementsWithVoidReturn() { + + repository.findAndUpdateViaMethodArgAllByLastname("Matthews", new Update().inc("visits", 1337)); + + assertThat(repository.findByLastname("Matthews")).extracting(Person::getVisits).allMatch(visits -> visits == 1337); + } + + @Test // GH-2107 + @DirtiesState + void allowsToUseComplexTypesInUpdate() { + + Address address = new Address("1007 Mountain Drive", "53540", "Gotham"); + + assertThat(repository.findAndPushShippingAddressByEmail(dave.getEmail(), address)).isEqualTo(1); + assertThat(repository.findById(dave.getId()).map(Person::getShippingAddresses)) + .contains(Collections.singleton(address)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java index 16a6168676..534f44c8fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Address.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,9 @@ */ package org.springframework.data.mongodb.repository; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + import com.querydsl.core.annotations.QueryEmbeddable; /** @@ -32,9 +35,9 @@ protected Address() { } /** - * @param string - * @param string2 - * @param string3 + * @param street + * @param zipcode + * @param city */ public Address(String street, String zipcode, String city) { this.street = street; @@ -83,4 +86,28 @@ public String getCity() { public void setCity(String city) { this.city = city; } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Address address = (Address) o; + + if (!ObjectUtils.nullSafeEquals(street, address.street)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(zipCode, address.zipCode)) { + return false; + } + return ObjectUtils.nullSafeEquals(city, address.city); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(street); + result = 31 * result + ObjectUtils.nullSafeHashCode(zipCode); + result = 31 * result + ObjectUtils.nullSafeHashCode(city); + return result; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java index 765c11910f..a4f533f0be 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ComplexIdRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,43 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.Set; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.config.AbstractMongoConfiguration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.junit.jupiter.SpringExtension; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * @author Christoph Strobl * @author Oliver Gierke * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) public class ComplexIdRepositoryIntegrationTests { + static @Client MongoClient mongoClient; + @Configuration - @EnableMongoRepositories - static class Config extends AbstractMongoConfiguration { + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = UserWithComplexIdRepository.class)) + static class Config extends AbstractMongoClientConfiguration { @Override protected String getDatabaseName() { @@ -57,9 +60,13 @@ protected String getDatabaseName() { @Override public MongoClient mongoClient() { - return new MongoClient(); + return mongoClient; } + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.emptySet(); + } } @Autowired UserWithComplexIdRepository repo; @@ -68,7 +75,7 @@ public MongoClient mongoClient() { MyId id; UserWithComplexId userWithId; - @Before + @BeforeEach public void setUp() { repo.deleteAll(); @@ -87,7 +94,7 @@ public void annotatedFindQueryShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.getUserByComplexId(id), is(userWithId)); + assertThat(repo.getUserByComplexId(id)).isEqualTo(userWithId); } @Test // DATAMONGO-1078 @@ -97,8 +104,8 @@ public void annotatedFindQueryShouldWorkWhenUsingComplexIdWithinCollection() { List loaded = repo.findByUserIds(Collections.singleton(id)); - assertThat(loaded, hasSize(1)); - assertThat(loaded, contains(userWithId)); + assertThat(loaded).hasSize(1); + assertThat(loaded).containsExactly(userWithId); } @Test // DATAMONGO-1078 @@ -106,7 +113,7 @@ public void findOneShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.findById(id), is(Optional.of(userWithId))); + assertThat(repo.findById(id)).isEqualTo(Optional.of(userWithId)); } @Test // DATAMONGO-1078 @@ -116,8 +123,8 @@ public void findAllShouldWorkWhenUsingComplexId() { Iterable loaded = repo.findAllById(Collections.singleton(id)); - assertThat(loaded, is(Matchers. iterableWithSize(1))); - assertThat(loaded, contains(userWithId)); + assertThat(loaded).hasSize(1); + assertThat(loaded).containsExactly(userWithId); } @Test // DATAMONGO-1373 @@ -125,7 +132,7 @@ public void composedAnnotationFindQueryShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.getUserUsingComposedAnnotationByComplexId(id), is(userWithId)); + assertThat(repo.getUserUsingComposedAnnotationByComplexId(id)).isEqualTo(userWithId); } @Test // DATAMONGO-1373 @@ -133,7 +140,7 @@ public void composedAnnotationFindMetaShouldWorkWhenUsingComplexId() { repo.save(userWithId); - assertThat(repo.findUsersUsingComposedMetaAnnotationByUserIds(Arrays.asList(id)), hasSize(0)); + assertThat(repo.findUsersUsingComposedMetaAnnotationByUserIds(Arrays.asList(id))).hasSize(1); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java index 9792fbc250..a3dbae74c7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Contact.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -27,8 +27,7 @@ @Document public abstract class Contact { - @Id - protected final String id; + @Id protected String id; public Contact() { this.id = new ObjectId().toString(); @@ -37,4 +36,8 @@ public Contact() { public String getId() { return id; } + + public void setId(String id) { + this.id = id; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java index 359d0c4bdf..b9a0652d01 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.repository; -import org.springframework.data.mongodb.repository.MongoRepository; - /** * Simple repository interface managing {@link Contact}s. * diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java index 62c620a5aa..5f502a22e5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ContactRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,7 +23,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Example; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link ContactRepository}. Mostly related to mapping inheritance. @@ -32,7 +32,7 @@ * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("config/MongoNamespaceIntegrationTests-context.xml") public class ContactRepositoryIntegrationTests { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java index c4ad81113f..94a77f003a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ConvertingReactiveMongoRepositoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,18 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import io.reactivex.Flowable; -import io.reactivex.Maybe; -import io.reactivex.observers.TestObserver; -import lombok.Data; -import lombok.NoArgsConstructor; +import io.reactivex.rxjava3.core.Observable; +import io.reactivex.rxjava3.core.Single; +import io.reactivex.rxjava3.observers.TestObserver; +import io.reactivex.rxjava3.subscribers.TestSubscriber; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; -import rx.Observable; -import rx.Single; import java.util.Arrays; -import java.util.List; +import java.util.Objects; import org.junit.Before; import org.junit.Test; @@ -38,42 +34,46 @@ import org.reactivestreams.Publisher; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.ImportResource; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; import org.springframework.data.repository.reactive.ReactiveSortingRepository; -import org.springframework.data.repository.reactive.RxJava2SortingRepository; -import org.springframework.stereotype.Repository; +import org.springframework.data.repository.reactive.RxJava3CrudRepository; +import org.springframework.data.repository.reactive.RxJava3SortingRepository; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Test for {@link ReactiveMongoRepository} using reactive wrapper type conversion. * * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration(classes = ConvertingReactiveMongoRepositoryTests.Config.class) public class ConvertingReactiveMongoRepositoryTests { - @EnableReactiveMongoRepositories(includeFilters = @Filter(value = Repository.class), + @EnableReactiveMongoRepositories( + includeFilters = { @Filter(value = ReactivePersonRepostitory.class, type = FilterType.ASSIGNABLE_TYPE), + @Filter(value = RxJava3PersonRepostitory.class, type = FilterType.ASSIGNABLE_TYPE), + @Filter(value = MixedReactivePersonRepostitory.class, type = FilterType.ASSIGNABLE_TYPE) }, considerNestedRepositories = true) @ImportResource("classpath:reactive-infrastructure.xml") static class Config {} @Autowired MixedReactivePersonRepostitory reactiveRepository; @Autowired ReactivePersonRepostitory reactivePersonRepostitory; - @Autowired RxJava1PersonRepostitory rxJava1PersonRepostitory; - @Autowired RxJava2PersonRepostitory rxJava2PersonRepostitory; + @Autowired RxJava3PersonRepostitory rxJava3PersonRepostitory; ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia; @Before public void setUp() { - StepVerifier.create(reactiveRepository.deleteAll()).verifyComplete(); + reactiveRepository.deleteAll().as(StepVerifier::create).verifyComplete(); dave = new ReactivePerson("Dave", "Matthews", 42); oliver = new ReactivePerson("Oliver August", "Matthews", 4); @@ -83,14 +83,15 @@ public void setUp() { leroi = new ReactivePerson("Leroi", "Moore", 41); alicia = new ReactivePerson("Alicia", "Keys", 30); - StepVerifier.create(reactiveRepository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia))) // + reactiveRepository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)) + .as(StepVerifier::create) // .expectNextCount(7) // .verifyComplete(); } @Test // DATAMONGO-1444 public void reactiveStreamsMethodsShouldWork() { - StepVerifier.create(reactivePersonRepostitory.existsById(dave.getId())).expectNext(true).verifyComplete(); + reactivePersonRepostitory.existsById(dave.getId()).as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1444 @@ -98,160 +99,101 @@ public void reactiveStreamsQueryMethodsShouldWork() { StepVerifier.create(reactivePersonRepostitory.findByLastname(boyd.getLastname())).expectNext(boyd).verifyComplete(); } - @Test // DATAMONGO-1444 - public void simpleRxJava1MethodsShouldWork() throws Exception { - - rxJava1PersonRepostitory.existsById(dave.getId()) // - .test() // - .awaitTerminalEvent() // - .assertValue(true) // - .assertNoErrors() // - .assertCompleted(); - } - - @Test // DATAMONGO-1444 - public void existsWithSingleRxJava1IdMethodsShouldWork() throws Exception { - - rxJava1PersonRepostitory.existsById(Single.just(dave.getId())) // - .test() // - .awaitTerminalEvent() // - .assertValue(true) // - .assertNoErrors() // - .assertCompleted(); - } - - @Test // DATAMONGO-1444 - public void singleRxJava1QueryMethodShouldWork() throws Exception { - - rxJava1PersonRepostitory.findByFirstnameAndLastname(dave.getFirstname(), dave.getLastname()) // - .test() // - .awaitTerminalEvent() // - .assertValue(dave) // - .assertNoErrors() // - .assertCompleted(); - } - - @Test // DATAMONGO-1444 - public void singleProjectedRxJava1QueryMethodShouldWork() throws Exception { - - List people = rxJava1PersonRepostitory.findProjectedByLastname(carter.getLastname()) // - .test() // - .awaitTerminalEvent() // - .assertValueCount(1) // - .assertNoErrors() // - .assertCompleted() // - .getOnNextEvents(); - - ProjectedPerson projectedPerson = people.get(0); - assertThat(projectedPerson.getFirstname(), is(equalTo(carter.getFirstname()))); - } - - @Test // DATAMONGO-1444 - public void observableRxJava1QueryMethodShouldWork() throws Exception { - - rxJava1PersonRepostitory.findByLastname(boyd.getLastname()) // - .test() // - .awaitTerminalEvent() // - .assertValue(boyd) // - .assertNoErrors() // - .assertCompleted() // - .getOnNextEvents(); - } + @Test // DATAMONGO-2558 + public void simpleRxJava3MethodsShouldWork() throws InterruptedException { - @Test // DATAMONGO-1610 - public void simpleRxJava2MethodsShouldWork() throws Exception { + TestObserver testObserver = rxJava3PersonRepostitory.existsById(dave.getId()).test(); - TestObserver testObserver = rxJava2PersonRepostitory.existsById(dave.getId()).test(); - - testObserver.awaitTerminalEvent(); + testObserver.await(); testObserver.assertComplete(); testObserver.assertNoErrors(); testObserver.assertValue(true); } - @Test // DATAMONGO-1610 - public void existsWithSingleRxJava2IdMethodsShouldWork() throws Exception { + @Test // DATAMONGO-2558 + public void existsWithSingleRxJava3IdMethodsShouldWork() throws InterruptedException { - TestObserver testObserver = rxJava2PersonRepostitory.existsById(io.reactivex.Single.just(dave.getId())) + TestObserver testObserver = rxJava3PersonRepostitory.existsById(Single.just(dave.getId())) .test(); - testObserver.awaitTerminalEvent(); + testObserver.await(); testObserver.assertComplete(); testObserver.assertNoErrors(); testObserver.assertValue(true); } - @Test // DATAMONGO-1610 - public void flowableRxJava2QueryMethodShouldWork() throws Exception { + @Test // DATAMONGO-2558 + public void flowableRxJava3QueryMethodShouldWork() throws InterruptedException { - io.reactivex.subscribers.TestSubscriber testSubscriber = rxJava2PersonRepostitory + TestSubscriber testSubscriber = rxJava3PersonRepostitory .findByFirstnameAndLastname(dave.getFirstname(), dave.getLastname()).test(); - testSubscriber.awaitTerminalEvent(); + testSubscriber.await(); testSubscriber.assertComplete(); testSubscriber.assertNoErrors(); testSubscriber.assertValue(dave); } - @Test // DATAMONGO-1610 - public void singleProjectedRxJava2QueryMethodShouldWork() throws Exception { + @Test // DATAMONGO-2558 + public void singleProjectedRxJava3QueryMethodShouldWork() throws InterruptedException { - TestObserver testObserver = rxJava2PersonRepostitory - .findProjectedByLastname(Maybe.just(carter.getLastname())).test(); + io.reactivex.rxjava3.observers.TestObserver testObserver = rxJava3PersonRepostitory + .findProjectedByLastname(io.reactivex.rxjava3.core.Maybe.just(carter.getLastname())).test(); - testObserver.awaitTerminalEvent(); + testObserver.await(); testObserver.assertComplete(); testObserver.assertNoErrors(); testObserver.assertValue(actual -> { - assertThat(actual.getFirstname(), is(equalTo(carter.getFirstname()))); + assertThat(actual.getFirstname()).isEqualTo(carter.getFirstname()); return true; }); } - @Test // DATAMONGO-1610 - public void observableProjectedRxJava2QueryMethodShouldWork() throws Exception { + @Test // DATAMONGO-2558 + public void observableProjectedRxJava3QueryMethodShouldWork() throws InterruptedException { - TestObserver testObserver = rxJava2PersonRepostitory - .findProjectedByLastname(Single.just(carter.getLastname())).test(); + io.reactivex.rxjava3.observers.TestObserver testObserver = rxJava3PersonRepostitory + .findProjectedByLastname(io.reactivex.rxjava3.core.Single.just(carter.getLastname())).test(); - testObserver.awaitTerminalEvent(); + testObserver.await(); testObserver.assertComplete(); testObserver.assertNoErrors(); testObserver.assertValue(actual -> { - assertThat(actual.getFirstname(), is(equalTo(carter.getFirstname()))); + assertThat(actual.getFirstname()).isEqualTo(carter.getFirstname()); return true; }); } - @Test // DATAMONGO-1610 - public void maybeRxJava2QueryMethodShouldWork() throws Exception { + @Test // DATAMONGO-2558 + public void maybeRxJava3QueryMethodShouldWork() throws InterruptedException { - TestObserver testObserver = rxJava2PersonRepostitory.findByLastname(boyd.getLastname()).test(); + io.reactivex.rxjava3.observers.TestObserver testObserver = rxJava3PersonRepostitory + .findByLastname(boyd.getLastname()).test(); - testObserver.awaitTerminalEvent(); + testObserver.await(); testObserver.assertComplete(); testObserver.assertNoErrors(); testObserver.assertValue(boyd); } - @Test // DATAMONGO-1444 - public void mixedRepositoryShouldWork() { - - reactiveRepository.findByLastname(boyd.getLastname()) // - .test() // - .awaitTerminalEvent() // - .assertValue(boyd) // - .assertNoErrors() // - .assertCompleted() // - .getOnNextEvents(); - } +// @Test // DATAMONGO-1444 +// public void mixedRepositoryShouldWork() { +// +// reactiveRepository.findByLastname(boyd.getLastname()) // +// .test() // +// .awaitTerminalEvent() // +// .assertValue(boyd) // +// .assertNoErrors() // +// .assertCompleted() // +// .getOnNextEvents(); +// } @Test // DATAMONGO-1444 public void shouldFindOneBySingleOfLastName() { - StepVerifier.create(reactiveRepository.findByLastname(Single.just(carter.getLastname()))) // + reactiveRepository.findByLastname(Single.just(carter.getLastname())).as(StepVerifier::create) // .expectNext(carter) // .verifyComplete(); } @@ -259,58 +201,46 @@ public void shouldFindOneBySingleOfLastName() { @Test // DATAMONGO-1444 public void shouldFindByObservableOfLastNameIn() { - StepVerifier.create(reactiveRepository.findByLastnameIn(Observable.just(carter.getLastname(), dave.getLastname()))) // + reactiveRepository.findByLastnameIn(Observable.just(carter.getLastname(), dave.getLastname())) + .as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); } - @Test // DATAMONGO-1444 - public void shouldFindByPublisherOfLastNameInAndAgeGreater() { - - List people = reactiveRepository - .findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41).test() // - .awaitTerminalEvent() // - .assertValueCount(2) // - .assertNoErrors() // - .assertCompleted() // - .getOnNextEvents(); - - assertThat(people, hasItems(carter, dave)); - } - - @Repository - interface ReactivePersonRepostitory extends ReactiveSortingRepository { +// @Test // DATAMONGO-1444 +// public void shouldFindByPublisherOfLastNameInAndAgeGreater() { +// +// List people = reactiveRepository +// .findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41).test() // +// .awaitTerminalEvent() // +// .assertValueCount(2) // +// .assertNoErrors() // +// .assertCompleted() // +// .getOnNextEvents(); +// +// assertThat(people).contains(carter, dave); +// } + +interface ReactivePersonRepostitory + extends ReactiveCrudRepository, ReactiveSortingRepository { Publisher findByLastname(String lastname); } - @Repository - interface RxJava1PersonRepostitory extends org.springframework.data.repository.Repository { - - Observable findByFirstnameAndLastname(String firstname, String lastname); - - Single findByLastname(String lastname); - - Single findProjectedByLastname(String lastname); - - Single existsById(String id); - - Single existsById(Single id); - } - - @Repository - interface RxJava2PersonRepostitory extends RxJava2SortingRepository { + interface RxJava3PersonRepostitory + extends RxJava3CrudRepository, RxJava3SortingRepository { - Flowable findByFirstnameAndLastname(String firstname, String lastname); + io.reactivex.rxjava3.core.Flowable findByFirstnameAndLastname(String firstname, String lastname); - Maybe findByLastname(String lastname); + io.reactivex.rxjava3.core.Maybe findByLastname(String lastname); - io.reactivex.Single findProjectedByLastname(Maybe lastname); + io.reactivex.rxjava3.core.Single findProjectedByLastname( + io.reactivex.rxjava3.core.Maybe lastname); - io.reactivex.Observable findProjectedByLastname(Single lastname); + io.reactivex.rxjava3.core.Observable findProjectedByLastname( + io.reactivex.rxjava3.core.Single lastname); } - @Repository interface MixedReactivePersonRepostitory extends ReactiveMongoRepository { Single findByLastname(String lastname); @@ -325,8 +255,6 @@ interface MixedReactivePersonRepostitory extends ReactiveMongoRepository result = repo.findAllBy(TextCriteria.forDefaultLanguage().matchingAny("stallone", "payne")); - assertThat(result, hasSize(2)); - assertThat(result, hasItems(PASSENGER_57, DEMOLITION_MAN)); + assertThat(result).hasSize(2); + assertThat(result).contains(PASSENGER_57, DEMOLITION_MAN); } @Test // DATAMONGO-973 - public void derivedFinderWithTextCriteriaReturnsCorrectResult() { + void derivedFinderWithTextCriteriaReturnsCorrectResult() { initRepoWithDefaultDocuments(); FullTextDocument blade = new FullTextDocument("4", "Blade", @@ -111,26 +98,26 @@ public void derivedFinderWithTextCriteriaReturnsCorrectResult() { List result = repo.findByNonTextIndexProperty("foo", TextCriteria.forDefaultLanguage().matching("snipes")); - assertThat(result, hasSize(1)); - assertThat(result, hasItems(blade)); + assertThat(result).hasSize(1); + assertThat(result).contains(blade); } @Test // DATAMONGO-973 - public void findByWithPaginationWorksCorrectlyWhenUsingTextCriteria() { + void findByWithPaginationWorksCorrectlyWhenUsingTextCriteria() { initRepoWithDefaultDocuments(); Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("film"), PageRequest.of(1, 1, Direction.ASC, "id")); - assertThat(page.hasNext(), is(true)); - assertThat(page.hasPrevious(), is(true)); - assertThat(page.getTotalElements(), is(3L)); - assertThat(page.getContent().get(0), equalTo(DEMOLITION_MAN)); + assertThat(page.hasNext()).isTrue(); + assertThat(page.hasPrevious()).isTrue(); + assertThat(page.getTotalElements()).isEqualTo(3L); + assertThat(page.getContent().get(0)).isEqualTo(DEMOLITION_MAN); } @Test // DATAMONGO-973 - public void findAllByTextCriteriaWithSortWorksCorrectly() { + void findAllByTextCriteriaWithSortWorksCorrectly() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); @@ -139,12 +126,12 @@ public void findAllByTextCriteriaWithSortWorksCorrectly() { List result = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), Sort.by("score")); - assertThat(result.size(), is(4)); - assertThat(result.get(0), equalTo(snipes)); + assertThat(result.size()).isEqualTo(4); + assertThat(result.get(0)).isEqualTo(snipes); } @Test // DATAMONGO-973 - public void findByWithSortByScoreViaPageRequestTriggersSortingCorrectly() { + void findByWithSortByScoreViaPageRequestTriggersSortingCorrectly() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); @@ -153,12 +140,12 @@ public void findByWithSortByScoreViaPageRequestTriggersSortingCorrectly() { Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), PageRequest.of(0, 10, Direction.ASC, "score")); - assertThat(page.getTotalElements(), is(4L)); - assertThat(page.getContent().get(0), equalTo(snipes)); + assertThat(page.getTotalElements()).isEqualTo(4L); + assertThat(page.getContent().get(0)).isEqualTo(snipes); } @Test // DATAMONGO-973 - public void findByWithSortViaPageRequestIgnoresTextScoreWhenSortedByOtherProperty() { + void findByWithSortViaPageRequestIgnoresTextScoreWhenSortedByOtherProperty() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); @@ -167,12 +154,12 @@ public void findByWithSortViaPageRequestIgnoresTextScoreWhenSortedByOtherPropert Page page = repo.findAllBy(TextCriteria.forDefaultLanguage().matching("snipes"), PageRequest.of(0, 10, Direction.ASC, "id")); - assertThat(page.getTotalElements(), is(4L)); - assertThat(page.getContent().get(0), equalTo(PASSENGER_57)); + assertThat(page.getTotalElements()).isEqualTo(4L); + assertThat(page.getContent().get(0)).isEqualTo(PASSENGER_57); } @Test // DATAMONGO-973 - public void derivedSortForTextScorePropertyWorksCorrectly() { + void derivedSortForTextScorePropertyWorksCorrectly() { initRepoWithDefaultDocuments(); FullTextDocument snipes = new FullTextDocument("4", "Snipes", "Wesley Trent Snipes is an actor and film producer."); @@ -180,37 +167,23 @@ public void derivedSortForTextScorePropertyWorksCorrectly() { List result = repo .findByNonTextIndexPropertyIsNullOrderByScoreDesc(TextCriteria.forDefaultLanguage().matching("snipes")); - assertThat(result.get(0), equalTo(snipes)); + assertThat(result.get(0)).isEqualTo(snipes); } - @Test // DATAMONGO-973 - public void derivedFinderMethodWithoutFullTextShouldNoCauseTroubleWhenHavingEntityWithTextScoreProperty() { + @Test // DATAMONGO-973, DATAMONGO-2516 + void derivedFinderMethodWithoutFullTextShouldNoCauseTroubleWhenHavingEntityWithTextScoreProperty() { initRepoWithDefaultDocuments(); List result = repo.findByTitle(DROP_ZONE.getTitle()); - assertThat(result.get(0), equalTo(DROP_ZONE)); - assertThat(result.get(0).score, equalTo(0.0F)); + + assertThat(result.get(0)).isEqualTo(DROP_ZONE); + assertThat(result.get(0).score).isNull(); } private void initRepoWithDefaultDocuments() { repo.saveAll(Arrays.asList(PASSENGER_57, DEMOLITION_MAN, DROP_ZONE)); } - @org.springframework.context.annotation.Configuration - public static class Configuration extends AbstractMongoConfiguration { - - @Override - protected String getDatabaseName() { - return ClassUtils.getShortNameAsProperty(MongoRepositoryTextSearchIntegrationTests.class); - } - - @Override - public MongoClient mongoClient() { - return new MongoClient(); - } - - } - static class FullTextDocument { private @Id String id; @@ -260,7 +233,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java index 6b39d789c5..3dace8928b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/MyId.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import java.io.Serializable; +import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; /** @@ -42,7 +43,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java index 172e3a65f8..664b5279c8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import java.util.Date; import java.util.List; import java.util.Set; +import java.util.UUID; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; @@ -26,7 +27,10 @@ import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.Unwrapped; +import org.springframework.lang.Nullable; /** * Sample domain class. @@ -34,6 +38,7 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ @Document public class Person extends Contact { @@ -44,7 +49,7 @@ public enum Sex { private String firstname; private String lastname; - @Indexed(unique = true, dropDups = true) private String email; + @Indexed(unique = true) private String email; private Integer age; @SuppressWarnings("unused") private Sex sex; Date createdAt; @@ -56,6 +61,8 @@ public enum Sex { private @Field("add") Address address; private Set
          shippingAddresses; + private UUID uniqueId; + @DBRef User creator; @DBRef(lazy = true) User coworker; @@ -66,6 +73,13 @@ public enum Sex { Credentials credentials; + @Unwrapped.Nullable(prefix = "u") // + User unwrappedUser; + + @DocumentReference User spiritAnimal; + + int visits; + public Person() { this(null, null); @@ -196,6 +210,14 @@ public void setShippingAddresses(Set
          addresses) { this.shippingAddresses = addresses; } + public UUID getUniqueId() { + return uniqueId; + } + + public void setUniqueId(UUID uniqueId) { + this.uniqueId = uniqueId; + } + /* (non-Javadoc) * @see org.springframework.data.mongodb.repository.Contact#getName() */ @@ -245,13 +267,16 @@ public void setCoworker(User coworker) { this.coworker = coworker; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#equals(java.lang.Object) - */ + public int getVisits() { + return visits; + } + + public void setVisits(int visits) { + this.visits = visits; + } + @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (this == obj) { return true; @@ -284,11 +309,22 @@ public List getSkills() { return skills; } - /* - * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ + public User getUnwrappedUser() { + return unwrappedUser; + } + + public void setUnwrappedUser(User unwrappedUser) { + this.unwrappedUser = unwrappedUser; + } + + public User getSpiritAnimal() { + return spiritAnimal; + } + + public void setSpiritAnimal(User spiritAnimal) { + this.spiritAnimal = spiritAnimal; + } + @Override public int hashCode() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonAggregate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonAggregate.java new file mode 100644 index 0000000000..16b2157bc8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonAggregate.java @@ -0,0 +1,75 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.PersistenceConstructor; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +final class PersonAggregate { + + @Id private final String lastname; + private final Set names; + + public PersonAggregate(String lastname, String name) { + this(lastname, Collections.singletonList(name)); + } + + @PersistenceConstructor + public PersonAggregate(String lastname, Collection names) { + + this.lastname = lastname; + this.names = new HashSet<>(names); + } + + public String getLastname() { + return this.lastname; + } + + public Set getNames() { + return this.names; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonAggregate that = (PersonAggregate) o; + return Objects.equals(lastname, that.lastname) && Objects.equals(names, that.names); + } + + @Override + public int hashCode() { + return Objects.hash(lastname, names); + } + + public String toString() { + return "PersonAggregate(lastname=" + this.getLastname() + ", names=" + this.getNames() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java index 094cfd9118..e531af2212 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonExcerpt.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index 8a6f35d70c..c66b554078 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,13 +19,18 @@ import java.util.Date; import java.util.List; import java.util.Optional; +import java.util.UUID; +import java.util.regex.Pattern; import java.util.stream.Stream; +import org.springframework.data.domain.Limit; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Box; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; @@ -33,6 +38,8 @@ import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.querydsl.QuerydslPredicateExecutor; import org.springframework.data.repository.query.Param; @@ -111,7 +118,30 @@ public interface PersonRepository extends MongoRepository, Query List findByAgeLessThan(int age, Sort sort); /** - * Returns a page of {@link Person}s with a lastname mathing the given one (*-wildcards supported). + * Returns a scroll of {@link Person}s with a lastname matching the given one (*-wildcards supported). + * + * @param lastname + * @param scrollPosition + * @return + */ + Window findTop2ByLastnameLikeOrderByLastnameAscFirstnameAsc(String lastname, + ScrollPosition scrollPosition); + + Window findByLastnameLikeOrderByLastnameAscFirstnameAsc(String lastname, + ScrollPosition scrollPosition, Limit limit); + + /** + * Returns a scroll of {@link Person}s applying projections with a lastname matching the given one (*-wildcards + * supported). + * + * @param lastname + * @param pageable + * @return + */ + Window findCursorProjectionByLastnameLike(String lastname, Pageable pageable); + + /** + * Returns a page of {@link Person}s with a lastname matching the given one (*-wildcards supported). * * @param lastname * @param pageable @@ -119,9 +149,13 @@ public interface PersonRepository extends MongoRepository, Query */ Page findByLastnameLike(String lastname, Pageable pageable); + List findByLastnameLike(String lastname, Sort sort, Limit limit); + @Query("{ 'lastname' : { '$regex' : '?0', '$options' : 'i'}}") Page findByLastnameLikeWithPageable(String lastname, Pageable pageable); + List findByLastnameIgnoreCaseIn(String... lastname); + /** * Returns all {@link Person}s with a firstname contained in the given varargs. * @@ -261,6 +295,9 @@ public interface PersonRepository extends MongoRepository, Query // DATAMONGO-566 Long deletePersonByLastname(String lastname); + // DATAMONGO-1997 + Optional deleteOptionalByLastname(String lastname); + // DATAMONGO-566 @Query(value = "{ 'lastname' : ?0 }", delete = true) List removeByLastnameUsingAnnotatedQuery(String lastname); @@ -299,6 +336,8 @@ Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, Li // DATAMONGO-1030 PersonSummaryDto findSummaryByLastname(String lastname); + PersonSummaryWithOptional findSummaryWithOptionalByLastname(String lastname); + @Query("{ ?0 : ?1 }") List findByKeyValue(String key, String value); @@ -318,6 +357,10 @@ Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, Li @Query("{ firstname : :#{#firstname}}") List findWithSpelByFirstnameForSpELExpressionWithParameterVariableOnly(@Param("firstname") String firstname); + // DATAMONGO-1911 + @Query("{ uniqueId: ?0}") + Person findByUniqueId(UUID uniqueId); + /** * Returns the count of {@link Person} with the given firstname. Uses {@link CountQuery} annotation to define the * query to be executed. @@ -342,4 +385,99 @@ Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, Li // DATAMONGO-1752 Iterable findClosedProjectionBy(); + + @Query(sort = "{ age : -1 }") + List findByAgeGreaterThan(int age); + + @Query(sort = "{ age : -1 }") + List findByAgeGreaterThan(int age, Sort sort); + + List findByFirstnameRegex(Pattern pattern); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'fans': { '$slice': [ ?1, ?2 ] } }") + Person findWithSliceInProjection(String id, int skip, int limit); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'firstname': { '$toUpper': '$firstname' } }") + Person findWithAggregationInProjection(String id); + + @Query(value = "{ 'shippingAddresses' : { '$elemMatch' : { 'city' : { '$eq' : 'lnz' } } } }", + fields = "{ 'shippingAddresses.$': ?0 }") + Person findWithArrayPositionInProjection(int position); + + @Query(value = "{ 'fans' : { '$elemMatch' : { '$ref' : 'user' } } }", fields = "{ 'fans.$': ?0 }") + Person findWithArrayPositionInProjectionWithDbRef(int position); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + List findAllLastnames(); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + Stream findAllLastnamesAsStream(); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Stream groupStreamByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Slice groupByLastnameAndAsSlice(String property, Pageable pageable); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property, Sort sort); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property, Pageable page); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + int sumAge(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + AggregationResults sumAgeAndReturnAggregationResultWrapper(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + AggregationResults sumAgeAndReturnAggregationResultWrapperWithConcreteType(); + + @Aggregation({ + "{ '$match' : { 'lastname' : 'Matthews'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" + }) + Iterable findAggregatedClosedInterfaceProjectionBy(); + + @Query(value = "{_id:?0}") + Optional findDocumentById(String id); + + @Query(value = "{ 'firstname' : ?0, 'lastname' : ?1, 'email' : ?2 , 'age' : ?3, 'sex' : ?4, " + + "'createdAt' : ?5, 'skills' : ?6, 'address.street' : ?7, 'address.zipCode' : ?8, " // + + "'address.city' : ?9, 'uniqueId' : ?10, 'credentials.username' : ?11, 'credentials.password' : ?12 }") + Person findPersonByManyArguments(String firstname, String lastname, String email, Integer age, Sex sex, + Date createdAt, List skills, String street, String zipCode, // + String city, UUID uniqueId, String username, String password); + + List findByUnwrappedUserUsername(String username); + + List findByUnwrappedUser(User user); + + int findAndUpdateViaMethodArgAllByLastname(String lastname, UpdateDefinition update); + + @Update("{ '$inc' : { 'visits' : ?1 } }") + int findAndIncrementVisitsByLastname(String lastname, int increment); + + @Query("{ 'lastname' : ?0 }") + @Update("{ '$inc' : { 'visits' : ?1 } }") + int updateAllByLastname(String lastname, int increment); + + @Update(pipeline = { "{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }" }) + void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); + + @Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + int findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); + + @Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + int findAndPushShippingAddressByEmail(String email, Address address); + + @Query("{ 'age' : null }") + Person findByQueryWithNullEqualityCheck(); + + List findBySpiritAnimal(User user); + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java index 4db65c2180..c407d76276 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java index c98996e79c..f94a52e916 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,22 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Integration test for {@link PersonRepository} for lazy loading support. @@ -38,13 +39,13 @@ * @author Oliver Gierke */ @ContextConfiguration(locations = "PersonRepositoryIntegrationTests-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) public class PersonRepositoryLazyLoadingIntegrationTests { @Autowired PersonRepository repository; @Autowired MongoOperations operations; - @Before + @BeforeEach public void setUp() throws InterruptedException { repository.deleteAll(); @@ -61,7 +62,6 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr Person person = new Person(); person.setFirstname("Oliver"); person.setFans(Arrays.asList(thomas)); - person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); Person oliver = repository.findById(person.id).get(); @@ -71,11 +71,12 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr User user = fans.get(0); assertProxyIsResolved(fans, true); - assertThat(user.getUsername(), is(thomas.getUsername())); + assertThat(user.getUsername()).isEqualTo(thomas.getUsername()); } @Test // DATAMONGO-348 - public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() throws Exception { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg; ArrayList require to open java.util.") + public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() { User thomas = new User(); thomas.username = "Thomas"; @@ -83,7 +84,6 @@ public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnab Person person = new Person(); person.setFirstname("Oliver"); - person.setFans(Arrays.asList(thomas)); person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); @@ -93,13 +93,13 @@ public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnab assertProxyIsResolved(realFans, false); User realFan = realFans.get(0); assertProxyIsResolved(realFans, true); - assertThat(realFan.getUsername(), is(thomas.getUsername())); + assertThat(realFan.getUsername()).isEqualTo(thomas.getUsername()); realFans = oliver.getRealFans(); assertProxyIsResolved(realFans, true); realFan = realFans.get(0); - assertThat(realFan.getUsername(), is(thomas.getUsername())); + assertThat(realFan.getUsername()).isEqualTo(thomas.getUsername()); } @Test // DATAMONGO-348 @@ -119,8 +119,8 @@ public void shouldLoadAssociationWithDbRefOnConcreteDomainClassAndLazyLoadingEna User coworker = oliver.getCoworker(); assertProxyIsResolved(coworker, false); - assertThat(coworker.getUsername(), is(thomas.getUsername())); + assertThat(coworker.getUsername()).isEqualTo(thomas.getUsername()); assertProxyIsResolved(coworker, true); - assertThat(coworker.getUsername(), is(thomas.getUsername())); + assertThat(coworker.getUsername()).isEqualTo(thomas.getUsername()); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryTransactionalTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryTransactionalTests.java new file mode 100644 index 0000000000..0af684b9c1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryTransactionalTests.java @@ -0,0 +1,229 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.MongoTestUtils.*; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; +import org.springframework.data.domain.Persistable; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; +import org.springframework.data.mongodb.test.util.AfterTransactionAssertion; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.ReplSetClient; +import org.springframework.lang.Nullable; +import org.springframework.test.annotation.Rollback; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.test.context.transaction.AfterTransaction; +import org.springframework.test.context.transaction.BeforeTransaction; +import org.springframework.transaction.annotation.Transactional; + +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.model.Filters; + +/** + * @author Christoph Strobl + * @currentRead Shadow's Edge - Brent Weeks + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfReplicaSetAvailable +@EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") +@Transactional(transactionManager = "txManager") +public class PersonRepositoryTransactionalTests { + + static final String DB_NAME = "repository-tx-tests"; + static @ReplSetClient MongoClient mongoClient; + + @Configuration + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = PersonRepository.class)) + static class Config extends AbstractMongoClientConfiguration { + + @Bean + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return DB_NAME; + } + + @Bean + MongoTransactionManager txManager(MongoDatabaseFactory dbFactory) { + return new MongoTransactionManager(dbFactory); + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return Collections.singleton(Person.class); + } + } + + @Autowired MongoClient client; + @Autowired PersonRepository repository; + @Autowired MongoTemplate template; + + Person durzo, kylar, vi; + + List all; + + List>> assertionList; + + @BeforeEach + public void setUp() { + assertionList = new CopyOnWriteArrayList<>(); + } + + @BeforeTransaction + public void beforeTransaction() { + + createOrReplaceCollection(DB_NAME, template.getCollectionName(Person.class), client); + createOrReplaceCollection(DB_NAME, template.getCollectionName(User.class), client); + + durzo = new Person("Durzo", "Blint", 700); + kylar = new Person("Kylar", "Stern", 21); + vi = new Person("Viridiana", "Sovari", 20); + + all = repository.saveAll(Arrays.asList(durzo, kylar, vi)); + } + + @AfterTransaction + public void verifyDbState() throws InterruptedException { + + Thread.sleep(100); + + MongoCollection collection = client.getDatabase(DB_NAME) // + .withWriteConcern(WriteConcern.MAJORITY) // + .withReadPreference(ReadPreference.primary()) // + .getCollection(template.getCollectionName(Person.class)); + + try { + assertionList.forEach(it -> { + + boolean isPresent = collection.find(Filters.eq("_id", new ObjectId(it.getId().toString()))).iterator() + .hasNext(); + + assertThat(isPresent) // + .withFailMessage(String.format("After transaction entity %s should %s.", it.getPersistable(), + it.shouldBePresent() ? "be present" : "NOT be present")) + .isEqualTo(it.shouldBePresent()); + + }); + } finally { + assertionList.clear(); + } + } + + @Rollback(false) + @Test // DATAMONGO-1920 + public void shouldHonorCommitForDerivedQuery() { + + repository.removePersonByLastnameUsingAnnotatedQuery(durzo.getLastname()); + + assertAfterTransaction(durzo).isNotPresent(); + } + + @Rollback(false) + @Test // DATAMONGO-1920 + public void shouldHonorCommit() { + + Person hu = new Person("Hu", "Gibbet", 43); + + repository.save(hu); + + assertAfterTransaction(hu).isPresent(); + } + + @Test // DATAMONGO-1920 + public void shouldHonorRollback() { + + Person hu = new Person("Hu", "Gibbet", 43); + + repository.save(hu); + + assertAfterTransaction(hu).isNotPresent(); + } + + @Test // DATAMONGO-2490 + public void shouldBeAbleToReadDbRefDuringTransaction() { + + User rat = new User(); + rat.setUsername("rat"); + + template.save(rat); + + Person elene = new Person("Elene", "Cromwyll", 18); + elene.setCoworker(rat); + + repository.save(elene); + + Optional loaded = repository.findById(elene.getId()); + assertThat(loaded).isPresent(); + assertThat(loaded.get().getCoworker()).isNotNull(); + assertThat(loaded.get().getCoworker().getUsername()).isEqualTo(rat.getUsername()); + } + + private AfterTransactionAssertion assertAfterTransaction(Person person) { + + AfterTransactionAssertion assertion = new AfterTransactionAssertion<>(new Persistable() { + + @Nullable + @Override + public Object getId() { + return person.id; + } + + @Override + public boolean isNew() { + return person.id != null; + } + + @Override + public String toString() { + return getId() + " - " + person.toString(); + } + }); + + assertionList.add(assertion); + return assertion; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java index 09bade542d..dd6378fc90 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummary.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java index 5c6dfa9790..621eb3e647 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryDto.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.repository; +import java.util.Objects; + /** * @author Oliver Gierke */ @@ -22,4 +24,32 @@ public class PersonSummaryDto { String firstname; String lastname; + + public PersonSummaryDto() {} + + public PersonSummaryDto(String firstname, String lastname) { + this.firstname = firstname; + this.lastname = lastname; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + PersonSummaryDto that = (PersonSummaryDto) o; + return Objects.equals(firstname, that.firstname) && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(firstname, lastname); + } + + public String toString() { + return "PersonSummaryDto(firstname=" + this.firstname + ", lastname=" + this.lastname + ")"; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java new file mode 100644 index 0000000000..317aea81bd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java @@ -0,0 +1,24 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Optional; + +public interface PersonSummaryWithOptional { + + Optional
          getAddress(); + Optional getFirstname(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java index 78481e682a..e89dec21bd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactiveMongoRepositoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,100 +15,147 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assertions.assertThat; import static org.springframework.data.domain.Sort.Direction.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import lombok.NoArgsConstructor; import reactor.core.Disposable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; +import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; +import java.util.function.Function; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; import org.reactivestreams.Publisher; -import org.springframework.beans.BeansException; -import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.beans.factory.BeanFactory; -import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Circle; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.DirtiesState; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension.ProvidesState; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.data.querydsl.ReactiveQuerydslPredicateExecutor; import org.springframework.data.repository.Repository; -import org.springframework.data.repository.query.DefaultEvaluationContextProvider; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.util.ClassUtils; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Test for {@link ReactiveMongoRepository} query methods. * * @author Mark Paluch * @author Christoph Strobl + * @author Jens Schauder */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:reactive-infrastructure.xml") -public class ReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware { +@ExtendWith({ SpringExtension.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class ReactiveMongoRepositoryTests implements DirtiesStateExtension.StateFunctions { + private static final int PERSON_COUNT = 7; @Autowired ReactiveMongoTemplate template; - ReactiveMongoRepositoryFactory factory; - ClassLoader classLoader; - BeanFactory beanFactory; - ReactivePersonRepository repository; - ReactiveCappedCollectionRepository cappedRepository; + @Autowired ReactivePersonRepository repository; + @Autowired ReactiveContactRepository contactRepository; + @Autowired ReactiveCappedCollectionRepository cappedRepository; - Person dave, oliver, carter, boyd, stefan, leroi, alicia; + private Person dave, oliver, carter, boyd, stefan, leroi, alicia; + private QPerson person = QPerson.person; - @Override - public void setBeanClassLoader(ClassLoader classLoader) { - this.classLoader = classLoader == null ? ClassUtils.getDefaultClassLoader() : classLoader; + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + protected String getDatabaseName() { + return "reactive"; + } + + @Bean + ReactiveMongoRepositoryFactory factory(ReactiveMongoOperations template, BeanFactory beanFactory) { + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(template); + factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); + factory.setBeanClassLoader(beanFactory.getClass().getClassLoader()); + factory.setBeanFactory(beanFactory); + factory.setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + + return factory; + } + + @Bean + ReactivePersonRepository reactivePersonRepository(ReactiveMongoRepositoryFactory factory) { + return factory.getRepository(ReactivePersonRepository.class); + } + + @Bean + ReactiveContactRepository reactiveContactRepository(ReactiveMongoRepositoryFactory factory) { + return factory.getRepository(ReactiveContactRepository.class); + } + + @Bean + ReactiveCappedCollectionRepository reactiveCappedCollectionRepository(ReactiveMongoRepositoryFactory factory) { + return factory.getRepository(ReactiveCappedCollectionRepository.class); + } + + @Override + protected boolean autoIndexCreation() { + return true; + } } @Override - public void setBeanFactory(BeanFactory beanFactory) throws BeansException { - this.beanFactory = beanFactory; + public void clear() { + repository.deleteAll().as(StepVerifier::create).verifyComplete(); } - @Before - public void setUp() throws Exception { - - factory = new ReactiveMongoRepositoryFactory(template); - factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); - factory.setBeanClassLoader(classLoader); - factory.setBeanFactory(beanFactory); - factory.setEvaluationContextProvider(DefaultEvaluationContextProvider.INSTANCE); - - repository = factory.getRepository(ReactivePersonRepository.class); - cappedRepository = factory.getRepository(ReactiveCappedCollectionRepository.class); + @Override + public void setupState() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); dave = new Person("Dave", "Matthews", 42); oliver = new Person("Oliver August", "Matthews", 4); carter = new Person("Carter", "Beauford", 49); carter.setSkills(Arrays.asList("Drums", "percussion", "vocals")); - Thread.sleep(10); + try { + Thread.sleep(10); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } boyd = new Person("Boyd", "Tinsley", 45); boyd.setSkills(Arrays.asList("Violin", "Electric Violin", "Viola", "Mandolin", "Vocals", "Guitar")); stefan = new Person("Stefan", "Lessard", 34); @@ -116,208 +163,610 @@ public void setUp() throws Exception { alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); - StepVerifier.create(repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia))) // - .expectNextCount(7) // + repository.saveAll(Arrays.asList(oliver, carter, boyd, stefan, leroi, alicia, dave)).as(StepVerifier::create) // + .expectNextCount(PERSON_COUNT) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindByLastName() { - StepVerifier.create(repository.findByLastname(dave.getLastname())).expectNextCount(2).verifyComplete(); + void shouldFindByLastName() { + repository.findByLastname(dave.getLastname()).as(StepVerifier::create).expectNextCount(2).verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindOneByLastName() { - StepVerifier.create(repository.findOneByLastname(carter.getLastname())).expectNext(carter); + void shouldFindOneByLastName() { + repository.findOneByLastname(carter.getLastname()).as(StepVerifier::create).expectNext(carter).verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindOneByPublisherOfLastName() { - StepVerifier.create(repository.findByLastname(Mono.just(carter.getLastname()))).expectNext(carter); + void shouldFindOneByPublisherOfLastName() { + repository.findByLastname(Mono.just(carter.getLastname())).as(StepVerifier::create).expectNext(carter) + .verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindByPublisherOfLastNameIn() { - StepVerifier.create(repository.findByLastnameIn(Flux.just(carter.getLastname(), dave.getLastname()))) // + void shouldFindByPublisherOfLastNameIn() { + repository.findByLastnameIn(Flux.just(carter.getLastname(), dave.getLastname())).as(StepVerifier::create) // .expectNextCount(3) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindByPublisherOfLastNameInAndAgeGreater() { + void shouldFindByPublisherOfLastNameInAndAgeGreater() { - StepVerifier - .create(repository.findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41)) // + repository.findByLastnameInAndAgeGreaterThan(Flux.just(carter.getLastname(), dave.getLastname()), 41) + .as(StepVerifier::create) // .expectNextCount(2) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindUsingPublishersInStringQuery() { + void shouldFindUsingPublishersInStringQuery() { - StepVerifier.create(repository.findStringQuery(Flux.just("Beauford", "Matthews"), Mono.just(41))) // + repository.findStringQuery(Flux.just("Beauford", "Matthews"), Mono.just(41)).as(StepVerifier::create) // .expectNextCount(2) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldFindByLastNameAndSort() { - StepVerifier.create(repository.findByLastname("Matthews", Sort.by(ASC, "age"))) // + void shouldFindByLastNameAndSort() { + + repository.findByLastname("Matthews", Sort.by(ASC, "age")).as(StepVerifier::create) // .expectNext(oliver, dave) // .verifyComplete(); - StepVerifier.create(repository.findByLastname("Matthews", Sort.by(DESC, "age"))) // + repository.findByLastname("Matthews", Sort.by(DESC, "age")).as(StepVerifier::create) // .expectNext(dave, oliver) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void shouldUseTailableCursor() throws Exception { + void shouldUseTailableCursor() throws Exception { - StepVerifier - .create(template.dropCollection(Capped.class) // - .then(template.createCollection(Capped.class, // - CollectionOptions.empty().size(1000).maxDocuments(100).capped()))) // + template.dropCollection(Capped.class) // + .then(template.createCollection(Capped.class, // + CollectionOptions.empty().size(1000).maxDocuments(100).capped())) + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(template.insert(new Capped("value", Math.random()))).expectNextCount(1).verifyComplete(); + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue documents = new LinkedBlockingDeque<>(100); Disposable disposable = cappedRepository.findByKey("value").doOnNext(documents::add).subscribe(); - assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); - StepVerifier.create(template.insert(new Capped("value", Math.random()))).expectNextCount(1).verifyComplete(); - assertThat(documents.poll(5, TimeUnit.SECONDS), is(notNullValue())); - assertThat(documents.isEmpty(), is(true)); + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + assertThat(documents.poll(5, TimeUnit.SECONDS)).isNotNull(); + assertThat(documents).isEmpty(); disposable.dispose(); } @Test // DATAMONGO-1444 - public void shouldUseTailableCursorWithProjection() throws Exception { + void shouldUseTailableCursorWithProjection() throws Exception { - StepVerifier - .create(template.dropCollection(Capped.class) // - .then(template.createCollection(Capped.class, // - CollectionOptions.empty().size(1000).maxDocuments(100).capped()))) // + template.dropCollection(Capped.class) // + .then(template.createCollection(Capped.class, // + CollectionOptions.empty().size(1000).maxDocuments(100).capped())) + .as(StepVerifier::create) // .expectNextCount(1) // .verifyComplete(); - StepVerifier.create(template.insert(new Capped("value", Math.random()))).expectNextCount(1).verifyComplete(); + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); BlockingQueue documents = new LinkedBlockingDeque<>(100); Disposable disposable = cappedRepository.findProjectionByKey("value").doOnNext(documents::add).subscribe(); CappedProjection projection1 = documents.poll(5, TimeUnit.SECONDS); - assertThat(projection1, is(notNullValue())); - assertThat(projection1.getRandom(), is(not(0))); + assertThat(projection1).isNotNull(); + assertThat(projection1.getRandom()).isNotEqualTo(0); - StepVerifier.create(template.insert(new Capped("value", Math.random()))).expectNextCount(1).verifyComplete(); + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); CappedProjection projection2 = documents.poll(5, TimeUnit.SECONDS); - assertThat(projection2, is(notNullValue())); - assertThat(projection2.getRandom(), is(not(0))); + assertThat(projection2).isNotNull(); + assertThat(projection2.getRandom()).isNotEqualTo(0); - assertThat(documents.isEmpty(), is(true)); + assertThat(documents).isEmpty(); disposable.dispose(); } + @Test // DATAMONGO-2080 + void shouldUseTailableCursorWithDtoProjection() { + + template.dropCollection(Capped.class) // + .then(template.createCollection(Capped.class, // + CollectionOptions.empty().size(1000).maxDocuments(100).capped())) // + .as(StepVerifier::create).expectNextCount(1) // + .verifyComplete(); + + template.insert(new Capped("value", Math.random())).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + cappedRepository.findDtoProjectionByKey("value").as(StepVerifier::create).expectNextCount(1).thenCancel().verify(); + } + + @Test // GH-4308 + void appliesScrollingCorrectly() { + + Window scroll = repository + .findTop2ByLastnameLikeOrderByFirstnameAscLastnameAsc("*", ScrollPosition.keyset()).block(); + + assertThat(scroll).hasSize(2); + assertThat(scroll).containsSequence(alicia, boyd); + assertThat(scroll.isLast()).isFalse(); + + Window nextScroll = repository + .findTop2ByLastnameLikeOrderByFirstnameAscLastnameAsc("*", scroll.positionAt(scroll.size() - 1)).block(); + + assertThat(nextScroll).hasSize(2); + assertThat(nextScroll).containsSequence(carter, dave); + assertThat(nextScroll.isLast()).isFalse(); + } + + @Test // GH-4308 + void appliesScrollingWithProjectionCorrectly() { + + repository + .findCursorProjectionByLastnameLike("*", PageRequest.of(0, 2, Sort.by(Direction.ASC, "firstname", "lastname"))) // + .flatMapIterable(Function.identity()) // + .as(StepVerifier::create) // + .expectNext(new PersonSummaryDto(alicia.getFirstname(), alicia.getLastname())) // + .expectNextCount(1) // + .verifyComplete(); + } + @Test // DATAMONGO-1444 - public void findsPeopleByLocationWithinCircle() { + @DirtiesState + void findsPeopleByLocationWithinCircle() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); - StepVerifier.create(repository.save(dave)).expectNextCount(1).verifyComplete(); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170))) // + repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170)).as(StepVerifier::create) // .expectNext(dave) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void findsPeopleByPageableLocationWithinCircle() { + @DirtiesState + void findsPeopleByPageableLocationWithinCircle() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); - StepVerifier.create(repository.save(dave)).expectNextCount(1).verifyComplete(); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier - .create(repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170), // - PageRequest.of(0, 10))) // + repository.findByLocationWithin(new Circle(-78.99171, 45.738868, 170), // + PageRequest.of(0, 10)).as(StepVerifier::create) // .expectNext(dave) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void findsPeopleGeoresultByLocationWithinBox() { + @DirtiesState + void findsPeopleGeoresultByLocationWithinBox() { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); - StepVerifier.create(repository.save(dave)).expectNextCount(1).verifyComplete(); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); - StepVerifier.create(repository.findByLocationNear(new Point(-73.99, 40.73), // - new Distance(2000, Metrics.KILOMETERS)) // - ).consumeNextWith(actual -> { + repository.findByLocationNear(new Point(-73.99, 40.73), // + new Distance(2000, Metrics.KILOMETERS)).as(StepVerifier::create).consumeNextWith(actual -> { - assertThat(actual.getDistance().getValue(), is(closeTo(1, 1))); - assertThat(actual.getContent(), is(equalTo(dave))); - }).verifyComplete(); + assertThat(actual.getDistance().getValue()).isCloseTo(1, offset(1d)); + assertThat(actual.getContent()).isEqualTo(dave); + }).verifyComplete(); } @Test // DATAMONGO-1444 - public void findsPeoplePageableGeoresultByLocationWithinBox() { + @DirtiesState + void findsPeoplePageableGeoresultByLocationWithinBox() throws InterruptedException { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); - StepVerifier.create(repository.save(dave)).expectNextCount(1).verifyComplete(); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + // Allow for index creation + Thread.sleep(500); - StepVerifier - .create(repository.findByLocationNear(new Point(-73.99, 40.73), // - new Distance(2000, Metrics.KILOMETERS), // - PageRequest.of(0, 10))) // + repository.findByLocationNear(new Point(-73.99, 40.73), // + new Distance(2000, Metrics.KILOMETERS), // + PageRequest.of(0, 10)).as(StepVerifier::create) // .consumeNextWith(actual -> { - assertThat(actual.getDistance().getValue(), is(closeTo(1, 1))); - assertThat(actual.getContent(), is(equalTo(dave))); + assertThat(actual.getDistance().getValue()).isCloseTo(1, offset(1d)); + assertThat(actual.getContent()).isEqualTo(dave); }).verifyComplete(); } @Test // DATAMONGO-1444 - public void findsPeopleByLocationWithinBox() { + @DirtiesState + void findsPeopleByLocationWithinBox() throws InterruptedException { Point point = new Point(-73.99171, 40.738868); dave.setLocation(point); - StepVerifier.create(repository.save(dave)).expectNextCount(1).verifyComplete(); + repository.save(dave).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + // Allow for index creation + Thread.sleep(500); - StepVerifier - .create(repository.findPersonByLocationNear(new Point(-73.99, 40.73), // - new Distance(2000, Metrics.KILOMETERS))) // + repository.findPersonByLocationNear(new Point(-73.99, 40.73), // + new Distance(2000, Metrics.KILOMETERS)).as(StepVerifier::create) // .expectNext(dave) // .verifyComplete(); } @Test // DATAMONGO-1865 - public void shouldErrorOnFindOneWithNonUniqueResult() { - StepVerifier.create(repository.findOneByLastname(dave.getLastname())) + void shouldErrorOnFindOneWithNonUniqueResult() { + repository.findOneByLastname(dave.getLastname()).as(StepVerifier::create) .expectError(IncorrectResultSizeDataAccessException.class).verify(); } @Test // DATAMONGO-1865 - public void shouldReturnFirstFindFirstWithMoreResults() { - StepVerifier.create(repository.findFirstByLastname(dave.getLastname())).expectNextCount(1).verifyComplete(); + void shouldReturnFirstFindFirstWithMoreResults() { + repository.findFirstByLastname(dave.getLastname()).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-2030 + void shouldReturnExistsBy() { + repository.existsByLastname(dave.getLastname()).as(StepVerifier::create).expectNext(true).verifyComplete(); + } + + @Test // DATAMONGO-1979 + void findAppliesAnnotatedSort() { + + repository.findByAgeGreaterThan(40).collectList().as(StepVerifier::create).consumeNextWith(result -> { + assertThat(result).containsSequence(carter, boyd, dave, leroi); + }).verifyComplete(); + } + + @Test // DATAMONGO-1979 + void findWithSortOverwritesAnnotatedSort() { + + repository.findByAgeGreaterThan(40, Sort.by(Direction.ASC, "age")).collectList().as(StepVerifier::create) + .consumeNextWith(result -> { + assertThat(result).containsSequence(leroi, dave, boyd, carter); + }).verifyComplete(); + } + + @Test // DATAMONGO-2181 + @ProvidesState + void considersRepositoryCollectionName() { + + contactRepository.deleteAll() // + .as(StepVerifier::create) // + .verifyComplete(); + + leroi.id = null; + boyd.id = null; + contactRepository.saveAll(Arrays.asList(leroi, boyd)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + repository.count() // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + + contactRepository.count() // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + void shouldFindPersonsWhenUsingQueryDslPerdicatedOnIdProperty() { + + repository.findAll(person.id.in(Arrays.asList(dave.id, carter.id))) // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual).containsExactlyInAnyOrder(dave, carter); + }).verifyComplete(); + } + + @Test // GH-4308 + void shouldScrollWithId() { + + List> capture = new ArrayList<>(); + repository.findBy(person.id.in(Arrays.asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(ScrollPosition.keyset())) // + .as(StepVerifier::create) // + .recordWith(() -> capture).assertNext(actual -> { + assertThat(actual).hasSize(2).containsExactly(boyd, carter); + }).verifyComplete(); + + Window scroll = capture.get(0); + + repository.findBy(person.id.in(Arrays.asList(dave.id, carter.id, boyd.id)), // + q -> q.limit(2).sortBy(Sort.by("firstname")).scroll(scroll.positionAt(scroll.size() - 1))) // + .as(StepVerifier::create) // + .recordWith(() -> capture).assertNext(actual -> { + assertThat(actual).containsOnly(dave); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void findListOfSingleValue() { + + repository.findAllLastnames() // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual).contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPlaceholderValue() { + + repository.groupByLastnameAnd("firstname") // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual) // + .contains(new PersonAggregate("Lessard", "Stefan")) // + .contains(new PersonAggregate("Keys", "Alicia")) // + .contains(new PersonAggregate("Tinsley", "Boyd")) // + .contains(new PersonAggregate("Beauford", "Carter")) // + .contains(new PersonAggregate("Moore", "Leroi")) // + .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSort() { + + repository.groupByLastnameAnd("firstname", Sort.by("lastname")) // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual) // + .containsSequence( // + new PersonAggregate("Beauford", "Carter"), // + new PersonAggregate("Keys", "Alicia"), // + new PersonAggregate("Lessard", "Stefan"), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August")), // + new PersonAggregate("Moore", "Leroi"), // + new PersonAggregate("Tinsley", "Boyd")); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithPageable() { + + repository.groupByLastnameAnd("firstname", PageRequest.of(1, 2, Sort.by("lastname"))) // + .collectList() // + .as(StepVerifier::create) // + .assertNext(actual -> { + assertThat(actual) // + .containsExactly( // + new PersonAggregate("Lessard", "Stefan"), // + new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); + }) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithSingleSimpleResult() { + + repository.sumAge() // + .as(StepVerifier::create) // + .expectNext(245L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnType() { + + repository.sumAgeAndReturnRawResult() // + .as(StepVerifier::create) // + .expectNext(new org.bson.Document("_id", null).append("total", 245)) // + .verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsReturnTypeAndProjection() { + + repository.sumAgeAndReturnSumWrapper() // + .as(StepVerifier::create) // + .expectNext(new SumAge(245L)) // + .verifyComplete(); + } + + @Test // DATAMONGO-2374 + void findsWithNativeProjection() { + + repository.findDocumentById(dave.getId()) // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + assertThat(it).containsEntry("firstname", dave.getFirstname()).containsEntry("lastname", dave.getLastname()); + }).verifyComplete(); + } + + @Test // DATAMONGO-2153 + void annotatedAggregationWithAggregationResultAsMap() { + + repository.sumAgeAndReturnSumAsMap() // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + assertThat(it).isInstanceOf(Map.class); + }).verifyComplete(); + } + + @Test // GH-4839 + void annotatedAggregationWithAggregationResultAsClosedInterfaceProjection() { + + repository.findAggregatedClosedInterfaceProjectionBy() // + .as(StepVerifier::create) // + .consumeNextWith(it -> { + assertThat(it.getFirstname()).isIn(dave.getFirstname(), oliver.getFirstname()); + assertThat(it.getLastname()).isEqualTo(dave.getLastname()); + }).expectNextCount(1).verifyComplete(); + } + + @Test // DATAMONGO-2403 + @DirtiesState + void annotatedAggregationExtractingSimpleValueIsEmptyForEmptyDocument() { + + Person p = new Person("project-on-lastanme", null); + repository.save(p).then().as(StepVerifier::create).verifyComplete(); + + repository.projectToLastnameAndRemoveId(p.getFirstname()) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-2403 + @DirtiesState + void annotatedAggregationSkipsEmptyDocumentsWhenExtractingSimpleValue() { + + String firstname = "project-on-lastanme"; + + Person p1 = new Person(firstname, null); + p1.setEmail("p1@example.com"); + Person p2 = new Person(firstname, "lastname"); + p2.setEmail("p2@example.com"); + Person p3 = new Person(firstname, null); + p3.setEmail("p3@example.com"); + + repository.saveAll(Arrays.asList(p1, p2, p3)).then().as(StepVerifier::create).verifyComplete(); + + repository.projectToLastnameAndRemoveId(firstname) // + .as(StepVerifier::create) // + .expectNext("lastname").verifyComplete(); + } + + @Test // DATAMONGO-2406 + @DirtiesState + void deleteByShouldHandleVoidResultTypeCorrectly() { + + repository.deleteByLastname(dave.getLastname()) // + .as(StepVerifier::create) // + .verifyComplete(); + + template.find(query(where("lastname").is(dave.getLastname())), Person.class) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-1997 + @DirtiesState + void deleteByShouldAllowDeletedCountAsResult() { + + repository.deleteCountByLastname(dave.getLastname()) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + } + + @Test // DATAMONGO-1997 + @DirtiesState + void deleteByShouldAllowSingleDocumentRemovalCorrectly() { + + repository.deleteSinglePersonByLastname(carter.getLastname()) // + .as(StepVerifier::create) // + .expectNext(carter) // + .verifyComplete(); + + repository.deleteSinglePersonByLastname("dorfuaeB") // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-2652 + @DirtiesState + void deleteAllById() { + + repository.deleteAllById(Arrays.asList(carter.id, dave.id)) // + .as(StepVerifier::create) // + .verifyComplete(); + + repository.count().as(StepVerifier::create) // + .expectNext(PERSON_COUNT - 2L) // + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElements() { + repository.findAndUpdateViaMethodArgAllByLastname("Matthews", new Update().inc("visits", 1337)) + .as(StepVerifier::create).expectNext(2L).verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void mixAnnotatedUpdateWithAnnotatedQuery() { + + repository.updateAllByLastname("Matthews", 1337).as(StepVerifier::create).expectNext(2L).verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void annotatedUpdateWithSpELIsAppliedCorrectly() { + + repository.findAndIncrementVisitsUsingSpELByLastname("Matthews", 1337).as(StepVerifier::create).expectNext(2L) + .verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.2") + void annotatedAggregationUpdateIsAppliedCorrectly() { + + repository.findAndIncrementVisitsViaPipelineByLastname("Matthews", 1337).as(StepVerifier::create).verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void shouldAllowToUpdateAllElementsWithVoidReturn() { + + repository.findAndIncrementVisitsByLastname("Matthews", 1337).as(StepVerifier::create).expectNext(2L) + .verifyComplete(); + + repository.findByLastname("Matthews").map(Person::getVisits).as(StepVerifier::create).expectNext(1337, 1337) + .verifyComplete(); + } + + @Test // GH-2107 + @DirtiesState + void allowsToUseComplexTypesInUpdate() { + + Address address = new Address("1007 Mountain Drive", "53540", "Gotham"); + + repository.findAndPushShippingAddressByEmail(dave.getEmail(), address) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + repository.findById(dave.getId()).map(Person::getShippingAddresses).as(StepVerifier::create) + .consumeNextWith(it -> assertThat(it).containsExactly(address)).verifyComplete(); } - interface ReactivePersonRepository extends ReactiveMongoRepository { + interface ReactivePersonRepository + extends ReactiveMongoRepository, ReactiveQuerydslPredicateExecutor { Flux findByLastname(String lastname); Mono findOneByLastname(String lastname); + Mono findOneProjectedByLastname(String lastname); + Mono findByLastname(Publisher lastname); Flux findByLastnameIn(Publisher lastname); @@ -329,6 +778,11 @@ interface ReactivePersonRepository extends ReactiveMongoRepository findStringQuery(Flux lastname, Mono age); + Mono> findTop2ByLastnameLikeOrderByFirstnameAscLastnameAsc(String lastname, + ScrollPosition scrollPosition); + + Mono> findCursorProjectionByLastnameLike(String lastname, Pageable pageable); + Flux findByLocationWithin(Circle circle); Flux findByLocationWithin(Circle circle, Pageable pageable); @@ -339,9 +793,79 @@ interface ReactivePersonRepository extends ReactiveMongoRepository findPersonByLocationNear(Point point, Distance maxDistance); + Mono existsByLastname(String lastname); + Mono findFirstByLastname(String lastname); + + @Query(sort = "{ age : -1 }") + Flux findByAgeGreaterThan(int age); + + @Query(sort = "{ age : -1 }") + Flux findByAgeGreaterThan(int age, Sort sort); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + Flux findAllLastnames(); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Flux groupByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Flux groupByLastnameAnd(String property, Sort sort); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Flux groupByLastnameAnd(String property, Pageable page); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAge(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAgeAndReturnRawResult(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAgeAndReturnSumWrapper(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + Mono sumAgeAndReturnSumAsMap(); + + @Aggregation({ "{ '$match' : { 'lastname' : 'Matthews'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" }) + Flux findAggregatedClosedInterfaceProjectionBy(); + + @Aggregation( + pipeline = { "{ '$match' : { 'firstname' : '?0' } }", "{ '$project' : { '_id' : 0, 'lastname' : 1 } }" }) + Mono projectToLastnameAndRemoveId(String firstname); + + @Query(value = "{_id:?0}") + Mono findDocumentById(String id); + + Mono deleteByLastname(String lastname); + + Mono deleteCountByLastname(String lastname); + + Mono deleteSinglePersonByLastname(String lastname); + + Mono findAndUpdateViaMethodArgAllByLastname(String lastname, UpdateDefinition update); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : ?1 } }") + Mono findAndIncrementVisitsByLastname(String lastname, int increment); + + @Query("{ 'lastname' : ?0 }") + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : ?1 } }") + Mono updateAllByLastname(String lastname, int increment); + + @org.springframework.data.mongodb.repository.Update( + pipeline = { "{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }" }) + Mono findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + Mono findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); + + @org.springframework.data.mongodb.repository.Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + Mono findAndPushShippingAddressByEmail(String email, Address address); } + interface ReactiveContactRepository extends ReactiveMongoRepository {} + interface ReactiveCappedCollectionRepository extends Repository { @Tailable @@ -349,17 +873,21 @@ interface ReactiveCappedCollectionRepository extends Repository @Tailable Flux findProjectionByKey(String key); + + @Tailable + Flux findDtoProjectionByKey(String key); } @Document - @NoArgsConstructor static class Capped { String id; String key; double random; - public Capped(String key, double random) { + public Capped() {} + + Capped(String key, double random) { this.key = key; this.random = random; } @@ -368,4 +896,30 @@ public Capped(String key, double random) { interface CappedProjection { double getRandom(); } + + static class DtoProjection { + + String id; + double unknown; + + public String getId() { + return this.id; + } + + public double getUnknown() { + return this.unknown; + } + + public void setId(String id) { + this.id = id; + } + + public void setUnknown(double unknown) { + this.unknown = unknown; + } + + public String toString() { + return "ReactiveMongoRepositoryTests.DtoProjection(id=" + this.getId() + ", unknown=" + this.getUnknown() + ")"; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java index 82a53249d8..878d3974c0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/ReactivePersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,4 +32,5 @@ public interface ReactivePersonRepository extends ReactiveMongoRepository findByLastname(String lastname); + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java index 166f96cbba..47594aa985 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java index 1c6cac53d7..837b6801ea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RedeclaringRepositoryMethodsTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,11 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; @@ -31,24 +30,24 @@ * @author Mark Paluch */ @ContextConfiguration("config/MongoNamespaceIntegrationTests-context.xml") -public class RedeclaringRepositoryMethodsTests extends AbstractPersonRepositoryIntegrationTests { +class RedeclaringRepositoryMethodsTests extends AbstractPersonRepositoryIntegrationTests { @Autowired RedeclaringRepositoryMethodsRepository repository; @Test // DATAMONGO-760 - public void adjustedWellKnownPagedFindAllMethodShouldReturnOnlyTheUserWithFirstnameOliverAugust() { + void adjustedWellKnownPagedFindAllMethodShouldReturnOnlyTheUserWithFirstnameOliverAugust() { Page page = repository.findAll(PageRequest.of(0, 2)); - assertThat(page.getNumberOfElements(), is(1)); - assertThat(page.getContent().get(0).getFirstname(), is(oliver.getFirstname())); + assertThat(page.getNumberOfElements()).isEqualTo(1); + assertThat(page.getContent().get(0).getFirstname()).isEqualTo(oliver.getFirstname()); } @Test // DATAMONGO-760 - public void adjustedWllKnownFindAllMethodShouldReturnAnEmptyList() { + void adjustedWllKnownFindAllMethodShouldReturnAnEmptyList() { List result = repository.findAll(); - assertThat(result.isEmpty(), is(true)); + assertThat(result.isEmpty()).isTrue(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java index 9adb7039c3..320f2206b9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,25 @@ */ package org.springframework.data.mongodb.repository; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.bson.Document; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.data.mongodb.core.CollectionCallback; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import org.bson.Document; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; @@ -41,7 +42,7 @@ * * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class RepositoryIndexCreationIntegrationTests { @@ -77,6 +78,7 @@ public void testname() { assertHasIndexForField(indexInfo, "lastname"); assertHasIndexForField(indexInfo, "firstname"); + assertHasIndexForField(indexInfo, "add"); } private static void assertHasIndexForField(List indexInfo, String... fields) { @@ -87,6 +89,6 @@ private static void assertHasIndexForField(List indexInfo, String... } } - fail(String.format("Did not find index for field(s) %s in %s!", fields, indexInfo)); + fail(String.format("Did not find index for field(s) %s in %s", fields, indexInfo)); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java index 21fae0369f..4f28d2efb9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SampleEvaluationContextExtension.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,15 +18,14 @@ import java.util.Collections; import java.util.Map; -import org.springframework.data.repository.query.spi.EvaluationContextExtension; -import org.springframework.data.repository.query.spi.EvaluationContextExtensionSupport; +import org.springframework.data.spel.spi.EvaluationContextExtension; /** * A sample implementation of a custom {@link EvaluationContextExtension}. * * @author Thomas Darimont */ -public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport { +public class SampleEvaluationContextExtension implements EvaluationContextExtension { @Override public String getExtensionId() { @@ -45,6 +44,7 @@ public static class SampleSecurityContextHolder { private static ThreadLocal auth = new ThreadLocal() { + @Override protected SampleAuthentication initialValue() { return new SampleAuthentication(new SampleUser(-1, "anonymous")); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java index 40685fddd1..44235c54ef 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,21 @@ */ package org.springframework.data.mongodb.repository; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.springframework.data.domain.ExampleMatcher.*; -import lombok.Data; -import lombok.NoArgsConstructor; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import java.util.Arrays; +import java.util.Objects; +import java.util.stream.Stream; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.RepeatedTest; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.BeansException; import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.beans.factory.BeanFactory; @@ -38,15 +38,22 @@ import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.annotation.Id; import org.springframework.data.domain.Example; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.ReactiveMongoTransactionManager; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; -import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.lang.Nullable; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.reactive.TransactionalOperator; import org.springframework.util.ClassUtils; /** @@ -54,19 +61,23 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Ruben J Garcia + * @author Clément Petit */ -@RunWith(SpringJUnit4ClassRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration("classpath:reactive-infrastructure.xml") public class SimpleReactiveMongoRepositoryTests implements BeanClassLoaderAware, BeanFactoryAware { @Autowired private ReactiveMongoTemplate template; - ReactiveMongoRepositoryFactory factory; - ClassLoader classLoader; - BeanFactory beanFactory; - ReactivePersonRepostitory repository; + private ReactiveMongoRepositoryFactory factory; + private ClassLoader classLoader; + private BeanFactory beanFactory; + private ReactivePersonRepository repository; + private ReactiveImmutablePersonRepository immutableRepository; private ReactivePerson dave, oliver, carter, boyd, stefan, leroi, alicia; + private ImmutableReactivePerson keith, james, mariah; @Override public void setBeanClassLoader(ClassLoader classLoader) { @@ -78,18 +89,20 @@ public void setBeanFactory(BeanFactory beanFactory) throws BeansException { this.beanFactory = beanFactory; } - @Before - public void setUp() { + @BeforeEach + void setUp() { factory = new ReactiveMongoRepositoryFactory(template); factory.setRepositoryBaseClass(SimpleReactiveMongoRepository.class); factory.setBeanClassLoader(classLoader); factory.setBeanFactory(beanFactory); - factory.setEvaluationContextProvider(DefaultEvaluationContextProvider.INSTANCE); + factory.setEvaluationContextProvider(ReactiveQueryMethodEvaluationContextProvider.DEFAULT); - repository = factory.getRepository(ReactivePersonRepostitory.class); + repository = factory.getRepository(ReactivePersonRepository.class); + immutableRepository = factory.getRepository(ReactiveImmutablePersonRepository.class); - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + immutableRepository.deleteAll().as(StepVerifier::create).verifyComplete(); dave = new ReactivePerson("Dave", "Matthews", 42); oliver = new ReactivePerson("Oliver August", "Matthews", 4); @@ -98,357 +111,579 @@ public void setUp() { stefan = new ReactivePerson("Stefan", "Lessard", 34); leroi = new ReactivePerson("Leroi", "Moore", 41); alicia = new ReactivePerson("Alicia", "Keys", 30); + keith = new ImmutableReactivePerson(null, "Keith", "Urban", 53); + james = new ImmutableReactivePerson(null, "James", "Arthur", 33); + mariah = new ImmutableReactivePerson(null, "Mariah", "Carey", 51); - StepVerifier.create(repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia))) // + repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)).as(StepVerifier::create) // .expectNextCount(7) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void existsByIdShouldReturnTrueForExistingObject() { - StepVerifier.create(repository.existsById(dave.id)).expectNext(true).verifyComplete(); + void existsByIdShouldReturnTrueForExistingObject() { + repository.existsById(dave.id).as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1444 - public void existsByIdShouldReturnFalseForAbsentObject() { - StepVerifier.create(repository.existsById("unknown")).expectNext(false).verifyComplete(); + void existsByIdShouldReturnFalseForAbsentObject() { + repository.existsById("unknown").as(StepVerifier::create).expectNext(false).verifyComplete(); } @Test // DATAMONGO-1444 - public void existsByMonoOfIdShouldReturnTrueForExistingObject() { - StepVerifier.create(repository.existsById(Mono.just(dave.id))).expectNext(true).verifyComplete(); + void existsByMonoOfIdShouldReturnTrueForExistingObject() { + repository.existsById(Mono.just(dave.id)).as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1712 - public void existsByFluxOfIdShouldReturnTrueForExistingObject() { - StepVerifier.create(repository.existsById(Flux.just(dave.id, oliver.id))).expectNext(true).verifyComplete(); + void existsByFluxOfIdShouldReturnTrueForExistingObject() { + repository.existsById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1444 - public void existsByEmptyMonoOfIdShouldReturnEmptyMono() { - StepVerifier.create(repository.existsById(Mono.empty())).verifyComplete(); + void existsByEmptyMonoOfIdShouldReturnEmptyMono() { + repository.existsById(Mono.empty()).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void findByIdShouldReturnObject() { - StepVerifier.create(repository.findById(dave.id)).expectNext(dave).verifyComplete(); + void findByIdShouldReturnObject() { + repository.findById(dave.id).as(StepVerifier::create).expectNext(dave).verifyComplete(); } @Test // DATAMONGO-1444 - public void findByIdShouldCompleteWithoutValueForAbsentObject() { - StepVerifier.create(repository.findById("unknown")).verifyComplete(); + void findByIdShouldCompleteWithoutValueForAbsentObject() { + repository.findById("unknown").as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void findByIdByMonoOfIdShouldReturnTrueForExistingObject() { - StepVerifier.create(repository.findById(Mono.just(dave.id))).expectNext(dave).verifyComplete(); + void findByIdByMonoOfIdShouldReturnTrueForExistingObject() { + repository.findById(Mono.just(dave.id)).as(StepVerifier::create).expectNext(dave).verifyComplete(); } @Test // DATAMONGO-1712 - public void findByIdByFluxOfIdShouldReturnTrueForExistingObject() { - StepVerifier.create(repository.findById(Flux.just(dave.id, oliver.id))).expectNext(dave).verifyComplete(); + void findByIdByFluxOfIdShouldReturnTrueForExistingObject() { + repository.findById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).expectNext(dave).verifyComplete(); } @Test // DATAMONGO-1444 - public void findByIdByEmptyMonoOfIdShouldReturnEmptyMono() { - StepVerifier.create(repository.findById(Mono.empty())).verifyComplete(); + void findByIdByEmptyMonoOfIdShouldReturnEmptyMono() { + repository.findById(Mono.empty()).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void findAllShouldReturnAllResults() { - StepVerifier.create(repository.findAll()).expectNextCount(7).verifyComplete(); + void findAllShouldReturnAllResults() { + repository.findAll().as(StepVerifier::create).expectNextCount(7).verifyComplete(); } @Test // DATAMONGO-1444 - public void findAllByIterableOfIdShouldReturnResults() { - StepVerifier.create(repository.findAllById(Arrays.asList(dave.id, boyd.id))).expectNextCount(2).verifyComplete(); + void findAllByIterableOfIdShouldReturnResults() { + repository.findAllById(Arrays.asList(dave.id, boyd.id)).as(StepVerifier::create).expectNextCount(2) + .verifyComplete(); } @Test // DATAMONGO-1444 - public void findAllByPublisherOfIdShouldReturnResults() { - StepVerifier.create(repository.findAllById(Flux.just(dave.id, boyd.id))).expectNextCount(2).verifyComplete(); + void findAllByPublisherOfIdShouldReturnResults() { + repository.findAllById(Flux.just(dave.id, boyd.id)).as(StepVerifier::create).expectNextCount(2).verifyComplete(); } @Test // DATAMONGO-1444 - public void findAllByEmptyPublisherOfIdShouldReturnResults() { - StepVerifier.create(repository.findAllById(Flux.empty())).verifyComplete(); + void findAllByEmptyPublisherOfIdShouldReturnResults() { + repository.findAllById(Flux.empty()).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void findAllWithSortShouldReturnResults() { + void findAllWithSortShouldReturnResults() { - StepVerifier.create(repository.findAll(Sort.by(new Order(Direction.ASC, "age")))) // + repository.findAll(Sort.by(new Order(Direction.ASC, "age"))).as(StepVerifier::create) // .expectNextCount(7) // .verifyComplete(); } @Test // DATAMONGO-1444 - public void countShouldReturnNumberOfRecords() { - StepVerifier.create(repository.count()).expectNext(7L).verifyComplete(); + void countShouldReturnNumberOfRecords() { + repository.count().as(StepVerifier::create).expectNext(7L).verifyComplete(); } @Test // DATAMONGO-1444 - public void insertEntityShouldInsertEntity() { + void insertEntityShouldInsertEntity() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); - StepVerifier.create(repository.insert(person)).expectNext(person).verifyComplete(); + repository.insert(person).as(StepVerifier::create).expectNext(person).verifyComplete(); - assertThat(person.getId(), is(notNullValue())); + assertThat(person.getId()).isNotNull(); } @Test // DATAMONGO-1444 - public void insertShouldDeferredWrite() { + void insertShouldDeferredWrite() { ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); repository.insert(person); - assertThat(person.getId(), is(nullValue())); + assertThat(person.getId()).isNull(); } @Test // DATAMONGO-1444 - public void insertIterableOfEntitiesShouldInsertEntity() { + void insertIterableOfEntitiesShouldInsertEntity() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); dave.setId(null); oliver.setId(null); boyd.setId(null); - StepVerifier.create(repository.insert(Arrays.asList(dave, oliver, boyd))) // + repository.insert(Arrays.asList(dave, oliver, boyd)).as(StepVerifier::create) // .expectNext(dave, oliver, boyd) // .verifyComplete(); - assertThat(dave.getId(), is(notNullValue())); - assertThat(oliver.getId(), is(notNullValue())); - assertThat(boyd.getId(), is(notNullValue())); + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); } @Test // DATAMONGO-1444 - public void insertPublisherOfEntitiesShouldInsertEntity() { + void insertPublisherOfEntitiesShouldInsertEntity() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); dave.setId(null); oliver.setId(null); boyd.setId(null); - StepVerifier.create(repository.insert(Flux.just(dave, oliver, boyd))).expectNextCount(3).verifyComplete(); + repository.insert(Flux.just(dave, oliver, boyd)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); - assertThat(dave.getId(), is(notNullValue())); - assertThat(oliver.getId(), is(notNullValue())); - assertThat(boyd.getId(), is(notNullValue())); + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); } @Test // DATAMONGO-1444 - public void saveEntityShouldUpdateExistingEntity() { + void saveEntityShouldUpdateExistingEntity() { dave.setFirstname("Hello, Dave"); dave.setLastname("Bowman"); - StepVerifier.create(repository.save(dave)).expectNext(dave).verifyComplete(); + repository.save(dave).as(StepVerifier::create).expectNext(dave).verifyComplete(); - StepVerifier.create(repository.findByLastname("Matthews")).expectNext(oliver).verifyComplete(); + repository.findByLastname("Matthews").as(StepVerifier::create).expectNext(oliver).verifyComplete(); - StepVerifier.create(repository.findById(dave.id)).consumeNextWith(actual -> { + repository.findById(dave.id).as(StepVerifier::create).consumeNextWith(actual -> { - assertThat(actual.getFirstname(), is(equalTo(dave.getFirstname()))); - assertThat(actual.getLastname(), is(equalTo(dave.getLastname()))); + assertThat(actual.getFirstname()).isEqualTo(dave.getFirstname()); + assertThat(actual.getLastname()).isEqualTo(dave.getLastname()); }).verifyComplete(); } @Test // DATAMONGO-1444 - public void saveEntityShouldInsertNewEntity() { + void saveEntityShouldInsertNewEntity() { ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); - StepVerifier.create(repository.save(person)).expectNext(person).verifyComplete(); + repository.save(person).as(StepVerifier::create).expectNext(person).verifyComplete(); - StepVerifier.create(repository.findById(person.id)).consumeNextWith(actual -> { + repository.findById(person.id).as(StepVerifier::create).consumeNextWith(actual -> { - assertThat(actual.getFirstname(), is(equalTo(person.getFirstname()))); - assertThat(actual.getLastname(), is(equalTo(person.getLastname()))); + assertThat(actual.getFirstname()).isEqualTo(person.getFirstname()); + assertThat(actual.getLastname()).isEqualTo(person.getLastname()); }).verifyComplete(); } @Test // DATAMONGO-1444 - public void saveIterableOfNewEntitiesShouldInsertEntity() { + void saveIterableOfNewEntitiesShouldInsertEntity() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); dave.setId(null); oliver.setId(null); boyd.setId(null); - StepVerifier.create(repository.saveAll(Arrays.asList(dave, oliver, boyd))).expectNextCount(3).verifyComplete(); + repository.saveAll(Arrays.asList(dave, oliver, boyd)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); - assertThat(dave.getId(), is(notNullValue())); - assertThat(oliver.getId(), is(notNullValue())); - assertThat(boyd.getId(), is(notNullValue())); + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); } @Test // DATAMONGO-1444 - public void saveIterableOfMixedEntitiesShouldInsertEntity() { + void saveIterableOfMixedEntitiesShouldInsertEntity() { ReactivePerson person = new ReactivePerson("Homer", "Simpson", 36); dave.setFirstname("Hello, Dave"); dave.setLastname("Bowman"); - StepVerifier.create(repository.saveAll(Arrays.asList(person, dave))).expectNextCount(2).verifyComplete(); + repository.saveAll(Arrays.asList(person, dave)).as(StepVerifier::create).expectNextCount(2).verifyComplete(); - StepVerifier.create(repository.findById(dave.id)).expectNext(dave).verifyComplete(); + repository.findById(dave.id).as(StepVerifier::create).expectNext(dave).verifyComplete(); - assertThat(person.id, is(notNullValue())); - StepVerifier.create(repository.findById(person.id)).expectNext(person).verifyComplete(); + assertThat(person.id).isNotNull(); + repository.findById(person.id).as(StepVerifier::create).expectNext(person).verifyComplete(); } @Test // DATAMONGO-1444 - public void savePublisherOfEntitiesShouldInsertEntity() { + void savePublisherOfEntitiesShouldInsertEntity() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); dave.setId(null); oliver.setId(null); boyd.setId(null); - StepVerifier.create(repository.saveAll(Flux.just(dave, oliver, boyd))).expectNextCount(3).verifyComplete(); + repository.saveAll(Flux.just(dave, oliver, boyd)).as(StepVerifier::create).expectNextCount(3).verifyComplete(); + + assertThat(dave.getId()).isNotNull(); + assertThat(oliver.getId()).isNotNull(); + assertThat(boyd.getId()).isNotNull(); + } + + @RepeatedTest(10) // GH-4838 + @EnableIfReplicaSetAvailable + void transactionalSaveAllForStuffThatIsConsideredAnUpdateOfExistingData() { + + ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionalOperator.create(txmgr, TransactionDefinition.withDefaults()).execute(callback -> { + return repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); + }).as(StepVerifier::create) // + .expectNext(oliver, dave, carter, boyd, stefan, leroi, alicia).verifyComplete(); + } + + @RepeatedTest(10) // GH-4838 + @EnableIfReplicaSetAvailable + void transactionalSaveAllWithPublisherForStuffThatIsConsideredAnUpdateOfExistingData() { + + ReactiveMongoTransactionManager txmgr = new ReactiveMongoTransactionManager(template.getMongoDatabaseFactory()); + Flux personFlux = Flux.fromStream(Stream.of(oliver, dave, carter, boyd, stefan, leroi, alicia)); + + TransactionalOperator.create(txmgr, TransactionDefinition.withDefaults()).execute(callback -> { + return repository.saveAll(personFlux); + }).as(StepVerifier::create) // + .expectNextCount(7).verifyComplete(); + } + + @Test // GH-3609 + void savePublisherOfImmutableEntitiesShouldInsertEntity() { + + immutableRepository.deleteAll().as(StepVerifier::create).verifyComplete(); - assertThat(dave.getId(), is(notNullValue())); - assertThat(oliver.getId(), is(notNullValue())); - assertThat(boyd.getId(), is(notNullValue())); + immutableRepository.saveAll(Flux.just(keith)).as(StepVerifier::create) // + .consumeNextWith(actual -> { + assertThat(actual.id).isNotNull(); + }) // + .verifyComplete(); } @Test // DATAMONGO-1444 - public void deleteAllShouldRemoveEntities() { + void deleteAllShouldRemoveEntities() { - StepVerifier.create(repository.deleteAll()).verifyComplete(); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findAll()).verifyComplete(); + repository.findAll().as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void deleteByIdShouldRemoveEntity() { + void deleteByIdShouldRemoveEntity() { - StepVerifier.create(repository.deleteById(dave.id)).verifyComplete(); + repository.deleteById(dave.id).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findById(dave.id)).verifyComplete(); + repository.findById(dave.id).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1712 - public void deleteByIdUsingMonoShouldRemoveEntity() { + void deleteByIdUsingMonoShouldRemoveEntity() { - StepVerifier.create(repository.deleteById(Mono.just(dave.id))).verifyComplete(); + repository.deleteById(Mono.just(dave.id)).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.existsById(dave.id)).expectNext(false).verifyComplete(); + repository.existsById(dave.id).as(StepVerifier::create).expectNext(false).verifyComplete(); } @Test // DATAMONGO-1712 - public void deleteByIdUsingFluxShouldRemoveEntity() { + void deleteByIdUsingFluxShouldRemoveEntity() { - StepVerifier.create(repository.deleteById(Flux.just(dave.id, oliver.id))).verifyComplete(); + repository.deleteById(Flux.just(dave.id, oliver.id)).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.existsById(dave.id)).expectNext(false).verifyComplete(); - StepVerifier.create(repository.existsById(oliver.id)).expectNext(true).verifyComplete(); + repository.existsById(dave.id).as(StepVerifier::create).expectNext(false).verifyComplete(); + repository.existsById(oliver.id).as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1444 - public void deleteShouldRemoveEntity() { + void deleteShouldRemoveEntity() { - StepVerifier.create(repository.delete(dave)).verifyComplete(); + repository.delete(dave).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findById(dave.id)).verifyComplete(); + repository.findById(dave.id).as(StepVerifier::create).verifyComplete(); } @Test // DATAMONGO-1444 - public void deleteIterableOfEntitiesShouldRemoveEntities() { + void deleteIterableOfEntitiesShouldRemoveEntities() { - StepVerifier.create(repository.deleteAll(Arrays.asList(dave, boyd))).verifyComplete(); + repository.deleteAll(Arrays.asList(dave, boyd)).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findById(boyd.id)).verifyComplete(); + repository.findById(boyd.id).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findByLastname("Matthews")).expectNext(oliver).verifyComplete(); + repository.findByLastname("Matthews").as(StepVerifier::create).expectNext(oliver).verifyComplete(); } @Test // DATAMONGO-1444 - public void deletePublisherOfEntitiesShouldRemoveEntities() { + void deletePublisherOfEntitiesShouldRemoveEntities() { - StepVerifier.create(repository.deleteAll(Flux.just(dave, boyd))).verifyComplete(); + repository.deleteAll(Flux.just(dave, boyd)).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findById(boyd.id)).verifyComplete(); + repository.findById(boyd.id).as(StepVerifier::create).verifyComplete(); - StepVerifier.create(repository.findByLastname("Matthews")).expectNext(oliver).verifyComplete(); + repository.findByLastname("Matthews").as(StepVerifier::create).expectNext(oliver).verifyComplete(); } @Test // DATAMONGO-1619 - public void findOneByExampleShouldReturnObject() { + void findOneByExampleShouldReturnObject() { Example example = Example.of(dave); - StepVerifier.create(repository.findOne(example)).expectNext(dave).verifyComplete(); + repository.findOne(example).as(StepVerifier::create).expectNext(dave).verifyComplete(); } @Test // DATAMONGO-1619 - public void findAllByExampleShouldReturnObjects() { + void findAllByExampleShouldReturnObjects() { Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); - StepVerifier.create(repository.findAll(example)).expectNextCount(2).verifyComplete(); + repository.findAll(example).as(StepVerifier::create).expectNextCount(2).verifyComplete(); } @Test // DATAMONGO-1619 - public void findAllByExampleAndSortShouldReturnObjects() { + void findAllByExampleAndSortShouldReturnObjects() { Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); - StepVerifier.create(repository.findAll(example, Sort.by("firstname"))).expectNext(dave, oliver).verifyComplete(); + repository.findAll(example, Sort.by("firstname")).as(StepVerifier::create).expectNext(dave, oliver) + .verifyComplete(); } @Test // DATAMONGO-1619 - public void countByExampleShouldCountObjects() { + void countByExampleShouldCountObjects() { Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); - StepVerifier.create(repository.count(example)).expectNext(2L).verifyComplete(); + repository.count(example).as(StepVerifier::create).expectNext(2L).verifyComplete(); } @Test // DATAMONGO-1619 - public void existsByExampleShouldReturnExisting() { + void existsByExampleShouldReturnExisting() { Example example = Example.of(dave, matching().withIgnorePaths("id", "age", "firstname")); - StepVerifier.create(repository.exists(example)).expectNext(true).verifyComplete(); + repository.exists(example).as(StepVerifier::create).expectNext(true).verifyComplete(); } @Test // DATAMONGO-1619 - public void existsByExampleShouldReturnNonExisting() { + void existsByExampleShouldReturnNonExisting() { Example example = Example.of(new ReactivePerson("foo", "bar", -1)); - StepVerifier.create(repository.exists(example)).expectNext(false).verifyComplete(); + repository.exists(example).as(StepVerifier::create).expectNext(false).verifyComplete(); } @Test // DATAMONGO-1619 - public void findOneShouldEmitIncorrectResultSizeDataAccessExceptionWhenMoreThanOneElementFound() { + void findOneShouldEmitIncorrectResultSizeDataAccessExceptionWhenMoreThanOneElementFound() { Example example = Example.of(new ReactivePerson(null, "Matthews", -1), matching().withIgnorePaths("age")); - StepVerifier.create(repository.findOne(example)).expectError(IncorrectResultSizeDataAccessException.class); + repository.findOne(example).as(StepVerifier::create).expectError(IncorrectResultSizeDataAccessException.class); + } + + @Test // DATAMONGO-1907 + void findOneByExampleWithoutResultShouldCompleteEmpty() { + + Example example = Example.of(new ReactivePerson("foo", "bar", -1)); + + repository.findOne(example).as(StepVerifier::create).verifyComplete(); + } + + @Test // GH-3757 + void findByShouldReturnFirstResult() { + + ReactivePerson probe = new ReactivePerson(); + probe.setFirstname(oliver.getFirstname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::first) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldReturnOneResult() { + + ReactivePerson probe = new ReactivePerson(); + probe.setFirstname(oliver.getFirstname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + + probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .verifyError(IncorrectResultSizeDataAccessException.class); + } + + @Test // GH-3757 + void findByShouldReturnAll() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::all) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldApplySortAll() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), it -> it.sortBy(Sort.by("firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(dave, oliver) // + .verifyComplete(); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.sortBy(Sort.by(Direction.DESC, "firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(oliver, dave) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldApplyProjection() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), it -> it.project("firstname").first()) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getFirstname()).isNotNull(); + assertThat(it.getLastname()).isNull(); + }).verifyComplete(); + } + + @Test // GH-3757 + void findByShouldApplyPagination() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).contains(dave); + }).verifyComplete(); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).contains(oliver); + }).verifyComplete(); + } + + @Test // GH-4889 + void findByShouldApplySlice() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isTrue(); + assertThat(it.getContent()).contains(dave); + }).verifyComplete(); + + repository + .findBy(Example.of(probe, matching().withIgnorePaths("age")), + it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isFalse(); + assertThat(it.getContent()).contains(oliver); + }).verifyComplete(); + } + + @Test // GH-3757 + void findByShouldCount() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + + probe = new ReactivePerson(); + probe.setLastname("foo"); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-3757 + void findByShouldReportExists() { + + ReactivePerson probe = new ReactivePerson(); + probe.setLastname(oliver.getLastname()); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + probe = new ReactivePerson(); + probe.setLastname("foo"); + + repository.findBy(Example.of(probe, matching().withIgnorePaths("age")), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); } - interface ReactivePersonRepostitory extends ReactiveMongoRepository { + interface ReactivePersonRepository extends ReactiveMongoRepository { Flux findByLastname(String lastname); } - @Data - @NoArgsConstructor + interface ReactiveImmutablePersonRepository extends ReactiveMongoRepository { + + } + static class ReactivePerson { @Id String id; @@ -457,11 +692,143 @@ static class ReactivePerson { String lastname; int age; - public ReactivePerson(String firstname, String lastname, int age) { + public ReactivePerson() {} + + ReactivePerson(String firstname, String lastname, int age) { + + this.firstname = firstname; + this.lastname = lastname; + this.age = age; + } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public int getAge() { + return this.age; + } + + public void setId(String id) { + this.id = id; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ReactivePerson that = (ReactivePerson) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstname, that.firstname) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, age); + } + + public String toString() { + return "SimpleReactiveMongoRepositoryTests.ReactivePerson(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", age=" + this.getAge() + ")"; + } + } + + static final class ImmutableReactivePerson { + @Id private final String id; + + private final String firstname; + private final String lastname; + private final int age; + + ImmutableReactivePerson(@Nullable String id, String firstname, String lastname, int age) { + + this.id = id; this.firstname = firstname; this.lastname = lastname; this.age = age; } + + public String getId() { + return this.id; + } + + public String getFirstname() { + return this.firstname; + } + + public String getLastname() { + return this.lastname; + } + + public int getAge() { + return this.age; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ImmutableReactivePerson that = (ImmutableReactivePerson) o; + return age == that.age && Objects.equals(id, that.id) && Objects.equals(firstname, that.firstname) + && Objects.equals(lastname, that.lastname); + } + + @Override + public int hashCode() { + return Objects.hash(id, firstname, lastname, age); + } + + public String toString() { + return "SimpleReactiveMongoRepositoryTests.ImmutableReactivePerson(id=" + this.getId() + ", firstname=" + + this.getFirstname() + ", lastname=" + this.getLastname() + ", age=" + this.getAge() + ")"; + } + + public ImmutableReactivePerson withId(String id) { + return this.id == id ? this : new ImmutableReactivePerson(id, this.firstname, this.lastname, this.age); + } + + public ImmutableReactivePerson withFirstname(String firstname) { + return this.firstname == firstname ? this + : new ImmutableReactivePerson(this.id, firstname, this.lastname, this.age); + } + + public ImmutableReactivePerson withLastname(String lastname) { + return this.lastname == lastname ? this + : new ImmutableReactivePerson(this.id, this.firstname, lastname, this.age); + } + + public ImmutableReactivePerson withAge(int age) { + return this.age == age ? this : new ImmutableReactivePerson(this.id, this.firstname, this.lastname, age); + } } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SumAge.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SumAge.java new file mode 100644 index 0000000000..abbfac5943 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SumAge.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Objects; + +/** + * @author Christoph Strobl + */ +final class SumAge { + + private final Long total; + + public SumAge(Long total) { + this.total = total; + } + + public Long getTotal() { + return this.total; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SumAge sumAge = (SumAge) o; + return Objects.equals(total, sumAge.total); + } + + @Override + public int hashCode() { + return Objects.hash(total); + } + + public String toString() { + return "SumAge(total=" + this.getTotal() + ")"; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java index 839c02c9dd..123f7a4889 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/User.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java index 9161994cac..606cca8647 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexId.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; /** @@ -40,7 +41,7 @@ public int hashCode() { } @Override - public boolean equals(Object obj) { + public boolean equals(@Nullable Object obj) { if (obj == this) { return true; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java index 4b4b6ed56c..332eeff8ea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/UserWithComplexIdRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -60,7 +60,7 @@ public interface UserWithComplexIdRepository extends CrudRepository { + return collection.find(new Document("_id", new ObjectId(person.getId()))).first(); + }); + + assertThat(document).containsEntry("firstname", "Duckling").containsEntry("version", 1L); + } + + @Test // GH-4918 + void updatesVersionedTypeCorrectlyWhenUpdateIsUsingInc() { + + VersionedPerson person = template.insert(VersionedPersonWithCounter.class) + .one(new VersionedPersonWithCounter("Donald", "Duckling")); + + int updateCount = versionedPersonRepository.findAndIncCounterByLastname(person.getLastname()); + + assertThat(updateCount).isOne(); + + Document document = template.execute(VersionedPersonWithCounter.class, collection -> { + return collection.find(new Document("_id", new ObjectId(person.getId()))).first(); + }); + + assertThat(document).containsEntry("lastname", "Duckling").containsEntry("version", 1L).containsEntry("counter", + 42); + } + + @Test // GH-4918 + void updatesVersionedTypeCorrectlyWhenUpdateCoversVersionBump() { + + VersionedPerson person = template.insert(VersionedPersonWithCounter.class) + .one(new VersionedPersonWithCounter("Donald", "Duckling")); + + int updateCount = versionedPersonRepository.findAndSetFirstnameToLastnameIncVersionByLastname(person.getLastname(), + 10); + + assertThat(updateCount).isOne(); + + Document document = template.execute(VersionedPersonWithCounter.class, collection -> { + return collection.find(new Document("_id", new ObjectId(person.getId()))).first(); + }); + + assertThat(document).containsEntry("firstname", "Duckling").containsEntry("version", 10L); + } + + interface VersionedPersonRepository extends CrudRepository { + + @Update("{ '$set': { 'firstname' : ?0 } }") + int findAndSetFirstnameToLastnameByLastname(String lastname); + + @Update("{ '$inc': { 'counter' : 42 } }") + int findAndIncCounterByLastname(String lastname); + + @Update(""" + { + '$set': { 'firstname' : ?0 }, + '$inc': { 'version' : ?1 } + }""") + int findAndSetFirstnameToLastnameIncVersionByLastname(String lastname, int incVersion); + + } + + @org.springframework.data.mongodb.core.mapping.Document("versioned-person") + static class VersionedPersonWithCounter extends VersionedPerson { + + int counter; + + public VersionedPersonWithCounter(String firstname, @Nullable String lastname) { + super(firstname, lastname); + } + + public int getCounter() { + return counter; + } + + public void setCounter(int counter) { + this.counter = counter; + } + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHintsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHintsUnitTests.java new file mode 100644 index 0000000000..2978c07b64 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/aot/RepositoryRuntimeHintsUnitTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.aot; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.predicate.RuntimeHintsPredicates; +import org.springframework.data.mongodb.classloading.HidingClassLoader; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadata; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.repository.support.ReactiveQuerydslMongoPredicateExecutor; + +import com.mongodb.client.MongoClient; + +/** + * Unit tests for {@link RepositoryRuntimeHints}. + * + * @author Christoph Strobl + */ +class RepositoryRuntimeHintsUnitTests { + + @Test // GH-4244 + void registersTypesForQuerydslIntegration() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, null); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.reflection().onType(QuerydslMongoPredicateExecutor.class) + .and(RuntimeHintsPredicates.reflection().onType(ReactiveQuerydslMongoPredicateExecutor.class))); + } + + @Test // GH-4244 + void onlyRegistersReactiveTypesForQuerydslIntegrationWhenNoSyncClientPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, HidingClassLoader.hide(MongoClient.class)); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.reflection().onType(QuerydslMongoPredicateExecutor.class) + .negate().and(RuntimeHintsPredicates.reflection().onType(ReactiveQuerydslMongoPredicateExecutor.class))); + } + + @Test // GH-4244 + @Disabled("TODO: ReactiveWrappers does not support ClassLoader") + void doesNotRegistersReactiveTypesForQuerydslIntegrationWhenReactorNotPresent() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, new HidingClassLoader("reactor.core")); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.reflection().onType(QuerydslMongoPredicateExecutor.class) + .and(RuntimeHintsPredicates.reflection().onType(ReactiveQuerydslMongoPredicateExecutor.class).negate())); + } + + @Test // GH-2971, GH-4534 + void registersProxyForCrudMethodMetadata() { + + RuntimeHints runtimeHints = new RuntimeHints(); + new RepositoryRuntimeHints().registerHints(runtimeHints, null); + + assertThat(runtimeHints).matches(RuntimeHintsPredicates.proxies().forInterfaces(CrudMethodMetadata.class, // + org.springframework.aop.SpringProxy.class, // + org.springframework.aop.framework.Advised.class, // + org.springframework.core.DecoratingProxy.class)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java index 652a2b3f98..90886d7760 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiExtensionIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,15 @@ */ package org.springframework.data.mongodb.repository.cdi; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import javax.enterprise.inject.se.SeContainer; -import javax.enterprise.inject.se.SeContainerInitializer; +import jakarta.enterprise.inject.se.SeContainer; +import jakarta.enterprise.inject.se.SeContainerInitializer; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; import org.springframework.data.mongodb.repository.Person; /** @@ -36,7 +36,7 @@ public class CdiExtensionIntegrationTests { static SeContainer container; - @BeforeClass + @BeforeAll public static void setUp() { container = SeContainerInitializer.newInstance() // @@ -45,7 +45,7 @@ public static void setUp() { .initialize(); } - @AfterClass + @AfterAll public static void tearDown() { container.close(); } @@ -56,21 +56,21 @@ public void bootstrapsRepositoryCorrectly() { RepositoryClient client = container.select(RepositoryClient.class).get(); CdiPersonRepository repository = client.getRepository(); - assertThat(repository, is(notNullValue())); + assertThat(repository).isNotNull(); repository.deleteAll(); Person person = new Person("Dave", "Matthews"); Person result = repository.save(person); - assertThat(result, is(notNullValue())); - assertThat(repository.findById(person.getId()).get().getId(), is(result.getId())); + assertThat(result).isNotNull(); + assertThat(repository.findById(person.getId()).get().getId()).isEqualTo(result.getId()); } @Test // DATAMONGO-1017, DATAMONGO-1785 public void returnOneFromCustomImpl() { RepositoryClient repositoryConsumer = container.select(RepositoryClient.class).get(); - assertThat(repositoryConsumer.getSamplePersonRepository().returnOne(), is(1)); + assertThat(repositoryConsumer.getSamplePersonRepository().returnOne()).isEqualTo(1); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java index 7537f0560a..0c79530dca 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/CdiPersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java index 2d1bfc3a83..ca094b9b8e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/MongoTemplateProducer.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.data.mongodb.repository.cdi; -import javax.enterprise.context.ApplicationScoped; -import javax.enterprise.inject.Produces; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.enterprise.inject.Produces; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; - -import com.mongodb.MongoClient; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.test.util.MongoTestUtils; /** * Simple component exposing a {@link MongoOperations} instance as CDI bean. @@ -36,7 +35,7 @@ class MongoTemplateProducer { @ApplicationScoped public MongoOperations createMongoTemplate() { - MongoDbFactory factory = new SimpleMongoDbFactory(new MongoClient(), "database"); + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(MongoTestUtils.client(), "database"); return new MongoTemplate(factory); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java index 5742aea7ab..514ed3e01a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/RepositoryClient.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.repository.cdi; -import javax.inject.Inject; +import jakarta.inject.Inject; /** * @author Oliver Gierke diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryCustom.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragment.java similarity index 80% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryCustom.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragment.java index 1d7bed8b08..12b59d86a4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryCustom.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragment.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.repository.cdi; /** * @author Mark Paluch */ -interface SamplePersonRepositoryCustom { +interface SamplePersonFragment { int returnOne(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragmentImpl.java similarity index 78% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryImpl.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragmentImpl.java index 9963053e7c..09e20b9bf2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepositoryImpl.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonFragmentImpl.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb.repository.cdi; /** * @author Mark Paluch */ -class SamplePersonRepositoryImpl implements SamplePersonRepositoryCustom { +class SamplePersonFragmentImpl implements SamplePersonFragment { @Override public int returnOne() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java index 25469ef1c3..140cb6ca36 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/cdi/SamplePersonRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,4 +21,4 @@ /** * @author Mark Paluch */ -public interface SamplePersonRepository extends Repository, SamplePersonRepositoryCustom {} +public interface SamplePersonRepository extends Repository, SamplePersonFragment {} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java index fd6a688fc8..5480fe3e1b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.repository.config; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionReader; @@ -40,35 +39,33 @@ * @author Oliver Gierke */ @ContextConfiguration -public class MongoNamespaceIntegrationTests extends AbstractPersonRepositoryIntegrationTests { +class MongoNamespaceIntegrationTests extends AbstractPersonRepositoryIntegrationTests { DefaultListableBeanFactory factory; BeanDefinitionReader reader; @Autowired ApplicationContext context; - @Before - @Override + @BeforeEach public void setUp() throws InterruptedException { - super.setUp(); factory = new DefaultListableBeanFactory(); reader = new XmlBeanDefinitionReader(factory); } @Test - public void assertDefaultMappingContextIsWired() { + void assertDefaultMappingContextIsWired() { reader.loadBeanDefinitions(new ClassPathResource("MongoNamespaceIntegrationTests-context.xml", getClass())); BeanDefinition definition = factory.getBeanDefinition("personRepository"); - assertThat(definition, is(notNullValue())); + assertThat(definition).isNotNull(); } @Test // DATAMONGO-581 - public void exposesPersistentEntity() { + void exposesPersistentEntity() { Repositories repositories = new Repositories(context); PersistentEntity entity = repositories.getPersistentEntity(Person.class); - assertThat(entity, is(notNullValue())); - assertThat(entity, is(instanceOf(MongoPersistentEntity.class))); + assertThat(entity).isNotNull(); + assertThat(entity).isInstanceOf(MongoPersistentEntity.class); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java index 5c9fb6389e..7d116e25e9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,32 +20,33 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.repository.PersonRepository; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import com.mongodb.MongoClient; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for {@link MongoRepositoriesRegistrar}. * * @author Oliver Gierke */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class MongoRepositoriesRegistrarIntegrationTests { @Configuration - @EnableMongoRepositories(basePackages = "org.springframework.data.mongodb.repository") + @EnableMongoRepositories(basePackages = "org.springframework.data.mongodb.repository", includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = PersonRepository.class)) static class Config { @Bean public MongoOperations mongoTemplate() throws Exception { - return new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "database")); + return new MongoTemplate(new SimpleMongoClientDatabaseFactory(MongoTestUtils.client(), "database")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarUnitTests.java new file mode 100644 index 0000000000..778ac61db1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoriesRegistrarUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.annotation.AnnotationBeanNameGenerator; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.data.mongodb.repository.PersonRepository; + +/** + * @author Christoph Strobl + */ +class MongoRepositoriesRegistrarUnitTests { + + private BeanDefinitionRegistry registry; + + @BeforeEach + void setUp() { + registry = new DefaultListableBeanFactory(); + } + + @ParameterizedTest // GH-499, GH-3440 + @MethodSource(value = { "args" }) + void configuresRepositoriesCorrectly(AnnotationMetadata metadata, String[] beanNames) { + + MongoRepositoriesRegistrar registrar = new MongoRepositoriesRegistrar(); + registrar.setResourceLoader(new DefaultResourceLoader()); + registrar.setEnvironment(new StandardEnvironment()); + registrar.registerBeanDefinitions(metadata, registry); + + Iterable names = Arrays.asList(registry.getBeanDefinitionNames()); + assertThat(names).contains(beanNames); + } + + static Stream args() { + return Stream.of( + Arguments.of(AnnotationMetadata.introspect(Config.class), + new String[] { "personRepository", "samplePersonRepository", "contactRepository" }), + Arguments.of(AnnotationMetadata.introspect(ConfigWithBeanNameGenerator.class), + new String[] { "personREPO", "samplePersonREPO", "contactREPO" })); + } + + @EnableMongoRepositories(basePackageClasses = PersonRepository.class) + private class Config { + + } + + @EnableMongoRepositories(basePackageClasses = PersonRepository.class, nameGenerator = MyBeanNameGenerator.class) + private class ConfigWithBeanNameGenerator { + + } + + static class MyBeanNameGenerator extends AnnotationBeanNameGenerator { + + @Override + public String generateBeanName(BeanDefinition definition, BeanDefinitionRegistry registry) { + return super.generateBeanName(definition, registry).replaceAll("Repository", "REPO"); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java index 3e992ebd2d..f613beb6d5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/MongoRepositoryConfigurationExtensionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,12 @@ */ package org.springframework.data.mongodb.repository.config; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collection; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.core.env.Environment; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java index d264c080e8..402f13a47b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; @@ -26,16 +27,16 @@ import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.repository.ReactivePersonRepository; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.reactivestreams.client.MongoClients; +import com.mongodb.reactivestreams.client.MongoClient; /** * Integration tests for {@link ReactiveMongoRepositoriesRegistrar}. * * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class ReactiveMongoRepositoriesRegistrarIntegrationTests { @@ -45,7 +46,8 @@ static class Config { @Bean public ReactiveMongoTemplate reactiveMongoTemplate() throws Exception { - return new ReactiveMongoTemplate(new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database")); + return new ReactiveMongoTemplate( + new SimpleReactiveMongoDatabaseFactory(Mockito.mock(MongoClient.class), "database")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarUnitTests.java new file mode 100644 index 0000000000..332ff06ee4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoriesRegistrarUnitTests.java @@ -0,0 +1,86 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.context.annotation.AnnotationBeanNameGenerator; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.core.io.DefaultResourceLoader; +import org.springframework.core.type.AnnotationMetadata; +import org.springframework.data.mongodb.repository.PersonRepository; + +/** + * @author Christoph Strobl + */ +class ReactiveMongoRepositoriesRegistrarUnitTests { + + private BeanDefinitionRegistry registry; + + @BeforeEach + void setUp() { + registry = new DefaultListableBeanFactory(); + } + + @ParameterizedTest // GH-499, GH-3440 + @MethodSource(value = { "args" }) + void configuresRepositoriesCorrectly(AnnotationMetadata metadata, String[] beanNames) { + + ReactiveMongoRepositoriesRegistrar registrar = new ReactiveMongoRepositoriesRegistrar(); + registrar.setResourceLoader(new DefaultResourceLoader()); + registrar.setEnvironment(new StandardEnvironment()); + registrar.registerBeanDefinitions(metadata, registry); + + Iterable names = Arrays.asList(registry.getBeanDefinitionNames()); + assertThat(names).contains(beanNames); + } + + static Stream args() { + return Stream.of( + Arguments.of(AnnotationMetadata.introspect(Config.class), new String[] { "reactivePersonRepository" }), + Arguments.of(AnnotationMetadata.introspect(ConfigWithBeanNameGenerator.class), + new String[] { "reactivePersonREPO" })); + } + + @EnableReactiveMongoRepositories(basePackageClasses = PersonRepository.class) + private class Config { + + } + + @EnableReactiveMongoRepositories(basePackageClasses = PersonRepository.class, + nameGenerator = MyBeanNameGenerator.class) + private class ConfigWithBeanNameGenerator { + + } + + static class MyBeanNameGenerator extends AnnotationBeanNameGenerator { + + @Override + public String generateBeanName(BeanDefinition definition, BeanDefinitionRegistry registry) { + return super.generateBeanName(definition, registry).replaceAll("Repository", "REPO"); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java index 8c84dd54df..45ecba992f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/ReactiveMongoRepositoryConfigurationExtensionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,12 @@ */ package org.springframework.data.mongodb.repository.config; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Collection; -import org.junit.Test; +import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.core.env.Environment; @@ -33,7 +34,7 @@ import org.springframework.data.repository.config.RepositoryConfiguration; import org.springframework.data.repository.config.RepositoryConfigurationSource; import org.springframework.data.repository.reactive.ReactiveCrudRepository; -import org.springframework.data.repository.reactive.RxJava2CrudRepository; +import org.springframework.data.repository.reactive.RxJava3CrudRepository; /** * Unit tests for {@link ReactiveMongoRepositoryConfigurationExtension}. @@ -107,7 +108,7 @@ static class Store {} interface SampleRepository extends ReactiveCrudRepository {} - interface UnannotatedRepository extends RxJava2CrudRepository {} + interface UnannotatedRepository extends RxJava3CrudRepository {} interface StoreRepository extends ReactiveMongoRepository {} } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java index 30a2a30cf9..cc36c6cafb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,22 @@ */ package org.springframework.data.mongodb.repository.config.lazy; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.repository.config.lazy.ClassWithNestedRepository.NestedUserRepository; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration test for repository namespace configuration with nested repositories. * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml") public class AllowNestedMongoRepositoriesRepositoryConfigTests { @@ -38,6 +38,6 @@ public class AllowNestedMongoRepositoriesRepositoryConfigTests { @Test // DATAMONGO-780 public void shouldFindNestedRepository() { - assertThat(fooRepository, is(notNullValue())); + assertThat(fooRepository).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java index 0cdde800d4..1827559ffd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/ClassWithNestedRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java index ff5b3397bc..c64b831270 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/config/lazy/NestedMongoRepositoriesJavaConfigTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,11 @@ */ package org.springframework.data.mongodb.repository.config.lazy; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.ImportResource; @@ -27,14 +27,14 @@ import org.springframework.data.mongodb.repository.config.lazy.ClassWithNestedRepository.NestedUserRepository; import org.springframework.data.repository.support.Repositories; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration test for the combination of JavaConfig and an {@link Repositories} wrapper. * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class NestedMongoRepositoriesJavaConfigTests { @@ -47,6 +47,6 @@ static class Config {} @Test // DATAMONGO-780 public void shouldSupportNestedRepositories() { - assertThat(nestedUserRepository, is(notNullValue())); + assertThat(nestedUserRepository).isNotNull(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java index 9abf0ecc5a..3693f60780 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java index 5dde8457e8..4361be2420 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/ComposedRepositoryImplementationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,23 +20,25 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.ImportResource; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for composed Repository implementations. * * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class ComposedRepositoryImplementationTests { @Configuration - @EnableMongoRepositories + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ComposedRepository.class)) @ImportResource("classpath:infrastructure.xml") static class Config {} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java index 647ca4a774..21e0cb3914 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java index efc85aefde..69654bc7b7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomMongoRepositoryImpl.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,10 +25,6 @@ */ public class CustomMongoRepositoryImpl implements CustomMongoRepository { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.custom.CustomMongoRepository#findByFullName() - */ @Override public List findByUsernameCustom(String username) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java index 617a9c7990..d83b53c62a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepository.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,12 +16,12 @@ package org.springframework.data.mongodb.repository.custom; import org.springframework.data.mongodb.repository.User; -import org.springframework.data.repository.reactive.RxJava2CrudRepository; +import org.springframework.data.repository.reactive.RxJava3CrudRepository; /** * @author Mark Paluch */ public interface CustomReactiveMongoRepository - extends RxJava2CrudRepository, CustomReactiveMongoRepositoryCustom { + extends RxJava3CrudRepository, CustomReactiveMongoRepositoryCustom { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java index 5efcc45eae..6b650ecaac 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryCustom.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java index f3c96aff62..4c0a7b78e8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveMongoRepositoryImpl.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java index 4fa67f6214..c7de6e72bf 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomReactiveRepositoryImplementationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,34 @@ */ package org.springframework.data.mongodb.repository.custom; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.ImportResource; import org.springframework.data.mongodb.repository.User; import org.springframework.data.mongodb.repository.config.EnableReactiveMongoRepositories; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for custom reactive Repository implementations. * * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class CustomReactiveRepositoryImplementationTests { @Configuration - @EnableReactiveMongoRepositories + @EnableReactiveMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = CustomReactiveMongoRepository.class)) @ImportResource("classpath:reactive-infrastructure.xml") static class Config {} @@ -52,8 +54,8 @@ public void shouldExecuteMethodOnCustomRepositoryImplementation() { String username = "bubu"; List users = customMongoRepository.findByUsernameCustom(username); - assertThat(users.size(), is(1)); - assertThat(users.get(0), is(notNullValue())); - assertThat(users.get(0).getUsername(), is(username)); + assertThat(users.size()).isEqualTo(1); + assertThat(users.get(0)).isNotNull(); + assertThat(users.get(0).getUsername()).isEqualTo(username); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java index 07e620a941..697799d3e8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/CustomRepositoryImplementationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,32 +15,34 @@ */ package org.springframework.data.mongodb.repository.custom; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.ComponentScan.Filter; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.ImportResource; import org.springframework.data.mongodb.repository.User; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; /** * Integration tests for custom Repository implementations. * * @author Thomas Darimont */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration public class CustomRepositoryImplementationTests { @Configuration - @EnableMongoRepositories + @EnableMongoRepositories(includeFilters=@Filter(type = FilterType.ASSIGNABLE_TYPE, classes = CustomMongoRepository.class)) @ImportResource("classpath:infrastructure.xml") static class Config {} @@ -52,8 +54,8 @@ public void shouldExecuteMethodOnCustomRepositoryImplementation() { String username = "bubu"; List users = customMongoRepository.findByUsernameCustom(username); - assertThat(users.size(), is(1)); - assertThat(users.get(0), is(notNullValue())); - assertThat(users.get(0).getUsername(), is(username)); + assertThat(users.size()).isEqualTo(1); + assertThat(users.get(0)).isNotNull(); + assertThat(users.get(0).getUsername()).isEqualTo(username); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java index 087062dce0..191e6a6b68 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixin.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java index 6e620f163b..be7ec72a91 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/custom/RepositoryMixinImpl.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java index 6f28dee9f9..ea3c9ad023 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,47 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.ArgumentMatchers.*; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; import java.lang.reflect.Method; +import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Optional; import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.domain.Limit; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.ExecutableUpdate; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithQuery; +import org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.convert.DbRefResolver; @@ -50,15 +64,25 @@ import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Hint; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.MongoRepository; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.Update; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import com.mongodb.MongoClientSettings; import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; /** * Unit tests for {@link AbstractMongoQuery}. @@ -67,37 +91,52 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Mark Paluch + * @author Jorge Rodríguez */ -@RunWith(MockitoJUnitRunner.class) -public class AbstractMongoQueryUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class AbstractMongoQueryUnitTests { @Mock MongoOperations mongoOperationsMock; @Mock ExecutableFind executableFind; @Mock FindWithQuery withQueryMock; + @Mock ExecutableUpdate executableUpdate; + @Mock UpdateWithQuery updateWithQuery; + @Mock UpdateWithUpdate updateWithUpdate; + @Mock TerminatingUpdate terminatingUpdate; @Mock BasicMongoPersistentEntity persitentEntityMock; @Mock MongoMappingContext mappingContextMock; @Mock DeleteResult deleteResultMock; + @Mock UpdateResult updateResultMock; - @Before - public void setUp() { + @BeforeEach + void setUp() { doReturn("persons").when(persitentEntityMock).getCollection(); doReturn(persitentEntityMock).when(mappingContextMock).getPersistentEntity(Mockito.any(Class.class)); doReturn(persitentEntityMock).when(mappingContextMock).getRequiredPersistentEntity(Mockito.any(Class.class)); doReturn(Person.class).when(persitentEntityMock).getType(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class)); + MongoDatabaseFactory mongoDbFactory = mock(MongoDatabaseFactory.class); + when(mongoDbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContextMock); converter.afterPropertiesSet(); doReturn(converter).when(mongoOperationsMock).getConverter(); doReturn(executableFind).when(mongoOperationsMock).query(any()); doReturn(withQueryMock).when(executableFind).as(any()); - doReturn(withQueryMock).when(withQueryMock).matching(any()); + doReturn(withQueryMock).when(withQueryMock).matching(any(Query.class)); + doReturn(executableUpdate).when(mongoOperationsMock).update(any()); + doReturn(updateWithQuery).when(executableUpdate).matching(any(Query.class)); + doReturn(terminatingUpdate).when(updateWithQuery).apply(any(UpdateDefinition.class)); + + when(mongoOperationsMock.remove(any(), any(), anyString())).thenReturn(deleteResultMock); + when(mongoOperationsMock.updateMulti(any(), any(), any(), anyString())).thenReturn(updateResultMock); } @Test // DATAMONGO-566 - public void testDeleteExecutionCallsRemoveCorrectly() { + void testDeleteExecutionCallsRemoveCorrectly() { createQueryForMethod("deletePersonByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" }); @@ -106,7 +145,7 @@ public void testDeleteExecutionCallsRemoveCorrectly() { } @Test // DATAMONGO-566, DATAMONGO-1040 - public void testDeleteExecutionLoadsListOfRemovedDocumentsWhenReturnTypeIsCollectionLike() { + void testDeleteExecutionLoadsListOfRemovedDocumentsWhenReturnTypeIsCollectionLike() { createQueryForMethod("deleteByLastname", String.class).setDeleteQuery(true).execute(new Object[] { "booh" }); @@ -114,29 +153,29 @@ public void testDeleteExecutionLoadsListOfRemovedDocumentsWhenReturnTypeIsCollec } @Test // DATAMONGO-566 - public void testDeleteExecutionReturnsZeroWhenWriteResultIsNull() { + void testDeleteExecutionReturnsZeroWhenWriteResultIsNull() { MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class); query.setDeleteQuery(true); - assertThat(query.execute(new Object[] { "fake" }), is(0L)); + assertThat(query.execute(new Object[] { "fake" })).isEqualTo(0L); } @Test // DATAMONGO-566, DATAMONGO-978 - public void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() { + void testDeleteExecutionReturnsNrDocumentsDeletedFromWriteResult() { when(deleteResultMock.getDeletedCount()).thenReturn(100L); - when(mongoOperationsMock.remove(any(), eq(Person.class), eq("persons"))).thenReturn(deleteResultMock); + when(deleteResultMock.wasAcknowledged()).thenReturn(true); MongoQueryFake query = createQueryForMethod("deletePersonByLastname", String.class); query.setDeleteQuery(true); - assertThat(query.execute(new Object[] { "fake" }), is(100L)); + assertThat(query.execute(new Object[] { "fake" })).isEqualTo(100L); verify(mongoOperationsMock, times(1)).remove(any(), eq(Person.class), eq("persons")); } @Test // DATAMONGO-957 - public void metadataShouldNotBeAddedToQueryWhenNotPresent() { + void metadataShouldNotBeAddedToQueryWhenNotPresent() { MongoQueryFake query = createQueryForMethod("findByFirstname", String.class); query.execute(new Object[] { "fake" }); @@ -146,11 +185,12 @@ public void metadataShouldNotBeAddedToQueryWhenNotPresent() { verify(executableFind).as(Person.class); verify(withQueryMock).matching(captor.capture()); - assertThat(captor.getValue().getMeta().getComment(), nullValue()); + assertThat(captor.getValue().getMeta().getComment()).isNull(); + ; } @Test // DATAMONGO-957 - public void metadataShouldBeAddedToQueryCorrectly() { + void metadataShouldBeAddedToQueryCorrectly() { MongoQueryFake query = createQueryForMethod("findByFirstname", String.class, Pageable.class); query.execute(new Object[] { "fake", PageRequest.of(0, 10) }); @@ -160,11 +200,11 @@ public void metadataShouldBeAddedToQueryCorrectly() { verify(executableFind).as(Person.class); verify(withQueryMock).matching(captor.capture()); - assertThat(captor.getValue().getMeta().getComment(), is("comment")); + assertThat(captor.getValue().getMeta().getComment()).isEqualTo("comment"); } @Test // DATAMONGO-957 - public void metadataShouldBeAddedToCountQueryCorrectly() { + void metadataShouldBeAddedToCountQueryCorrectly() { MongoQueryFake query = createQueryForMethod("findByFirstname", String.class, Pageable.class); query.execute(new Object[] { "fake", PageRequest.of(1, 10) }); @@ -172,13 +212,13 @@ public void metadataShouldBeAddedToCountQueryCorrectly() { ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); verify(executableFind).as(Person.class); - verify(withQueryMock).matching(captor.capture()); + verify(withQueryMock, atLeast(1)).matching(captor.capture()); - assertThat(captor.getValue().getMeta().getComment(), is("comment")); + assertThat(captor.getValue().getMeta().getComment()).isEqualTo("comment"); } - @Test // DATAMONGO-957 - public void metadataShouldBeAddedToStringBasedQueryCorrectly() { + @Test // DATAMONGO-957, DATAMONGO-1783 + void metadataShouldBeAddedToStringBasedQueryCorrectly() { MongoQueryFake query = createQueryForMethod("findByAnnotatedQuery", String.class, Pageable.class); query.execute(new Object[] { "fake", PageRequest.of(0, 10) }); @@ -188,11 +228,11 @@ public void metadataShouldBeAddedToStringBasedQueryCorrectly() { verify(executableFind).as(Person.class); verify(withQueryMock).matching(captor.capture()); - assertThat(captor.getValue().getMeta().getComment(), is("comment")); + assertThat(captor.getValue().getMeta().getComment()).isEqualTo("comment"); } @Test // DATAMONGO-1057 - public void slicedExecutionShouldRetainNrOfElementsToSkip() { + void slicedExecutionShouldRetainNrOfElementsToSkip() { MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class); Pageable page1 = PageRequest.of(0, 10); @@ -206,12 +246,12 @@ public void slicedExecutionShouldRetainNrOfElementsToSkip() { verify(executableFind, times(2)).as(Person.class); verify(withQueryMock, times(2)).matching(captor.capture()); - assertThat(captor.getAllValues().get(0).getSkip(), is(0L)); - assertThat(captor.getAllValues().get(1).getSkip(), is(10L)); + assertThat(captor.getAllValues().get(0).getSkip()).isZero(); + assertThat(captor.getAllValues().get(1).getSkip()).isEqualTo(10); } @Test // DATAMONGO-1057 - public void slicedExecutionShouldIncrementLimitByOne() { + void slicedExecutionShouldIncrementLimitByOne() { MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class); Pageable page1 = PageRequest.of(0, 10); @@ -225,12 +265,12 @@ public void slicedExecutionShouldIncrementLimitByOne() { verify(executableFind, times(2)).as(Person.class); verify(withQueryMock, times(2)).matching(captor.capture()); - assertThat(captor.getAllValues().get(0).getLimit(), is(11)); - assertThat(captor.getAllValues().get(1).getLimit(), is(11)); + assertThat(captor.getAllValues().get(0).getLimit()).isEqualTo(11); + assertThat(captor.getAllValues().get(1).getLimit()).isEqualTo(11); } @Test // DATAMONGO-1057 - public void slicedExecutionShouldRetainSort() { + void slicedExecutionShouldRetainSort() { MongoQueryFake query = createQueryForMethod("findByLastname", String.class, Pageable.class); Pageable page1 = PageRequest.of(0, 10, Sort.Direction.DESC, "bar"); @@ -245,12 +285,12 @@ public void slicedExecutionShouldRetainSort() { verify(withQueryMock, times(2)).matching(captor.capture()); Document expectedSortObject = new Document().append("bar", -1); - assertThat(captor.getAllValues().get(0).getSortObject(), is(expectedSortObject)); - assertThat(captor.getAllValues().get(1).getSortObject(), is(expectedSortObject)); + assertThat(captor.getAllValues().get(0).getSortObject()).isEqualTo(expectedSortObject); + assertThat(captor.getAllValues().get(1).getSortObject()).isEqualTo(expectedSortObject); } @Test // DATAMONGO-1080 - public void doesNotTryToPostProcessQueryResultIntoWrapperType() { + void doesNotTryToPostProcessQueryResultIntoWrapperType() { Person reference = new Person(); @@ -258,11 +298,11 @@ public void doesNotTryToPostProcessQueryResultIntoWrapperType() { AbstractMongoQuery query = createQueryForMethod("findByLastname", String.class); - assertThat(query.execute(new Object[] { "lastname" }), is(reference)); + assertThat(query.execute(new Object[] { "lastname" })).isEqualTo(reference); } @Test // DATAMONGO-1865 - public void limitingSingleEntityQueryCallsFirst() { + void limitingSingleEntityQueryCallsFirst() { Person reference = new Person(); @@ -270,11 +310,11 @@ public void limitingSingleEntityQueryCallsFirst() { AbstractMongoQuery query = createQueryForMethod("findFirstByLastname", String.class).setLimitingQuery(true); - assertThat(query.execute(new Object[] { "lastname" }), is(reference)); + assertThat(query.execute(new Object[] { "lastname" })).isEqualTo(reference); } @Test // DATAMONGO-1872 - public void doesNotFixCollectionOnPreparation() { + void doesNotFixCollectionOnPreparation() { AbstractMongoQuery query = createQueryForMethod(DynamicallyMappedRepository.class, "findBy"); @@ -284,6 +324,229 @@ public void doesNotFixCollectionOnPreparation() { verify(executableFind).as(DynamicallyMapped.class); } + @Test // DATAMONGO-1979 + void usesAnnotatedSortWhenPresent() { + + createQueryForMethod("findByAge", Integer.class) // + .execute(new Object[] { 1000 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getSortObject()).isEqualTo(new Document("age", 1)); + } + + @Test // DATAMONGO-1979 + void usesExplicitSortOverridesAnnotatedSortWhenPresent() { + + createQueryForMethod("findByAge", Integer.class, Sort.class) // + .execute(new Object[] { 1000, Sort.by(Direction.DESC, "age") }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getSortObject()).isEqualTo(new Document("age", -1)); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollation() { + + createQueryForMethod("findWithCollationUsingSpimpleStringValueByFirstName", String.class) // + .execute(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingDocumentByFirstName", String.class) // + .execute(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsString() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", new Document("locale", "en_US") }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsLocale() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", Locale.US }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldThrowExceptionOnNonParsableCollation() { + + assertThatIllegalArgumentException().isThrownBy(() -> { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .execute(new Object[] { "dalinar", 100 }); + }); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationIn() { + + createQueryForMethod("findWithCollationUsingPlaceholderInDocumentByFirstName", String.class, String.class) // + .execute(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyCollationParameter() { + + Collation collation = Collation.of("en_US"); + createQueryForMethod("findWithCollationParameterByFirstName", String.class, Collation.class) // + .execute(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldOverrideAnnotation() { + + Collation collation = Collation.of("de_AT"); + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .execute(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldNotBeAppliedWhenNullOverrideAnnotation() { + + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .execute(new Object[] { "dalinar", null }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // GH-2107 + void updateExecutionCallsUpdateAllCorrectly() { + + when(terminatingUpdate.all()).thenReturn(updateResultMock); + + createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) // + .execute(new Object[] { "dalinar", 100 }); + + ArgumentCaptor update = ArgumentCaptor.forClass(UpdateDefinition.class); + verify(updateWithQuery).apply(update.capture()); + verify(terminatingUpdate).all(); + + assertThat(update.getValue().getUpdateObject()).isEqualTo(Document.parse("{ '$inc' : { 'visits' : 100 } }")); + } + + @Test // GH-3230 + void findShouldApplyHint() { + + createQueryForMethod("findWithHintByFirstname", String.class).execute(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-fn"); + } + + @Test // GH-3230 + void updateShouldApplyHint() { + + when(terminatingUpdate.all()).thenReturn(updateResultMock); + + createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) // + .execute(new Object[] { "dalinar", 100 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(executableUpdate).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-ln"); + } + + @Test // GH-4397 + void limitShouldBeAppliedToQuery() { + + createQueryForMethod("findWithLimit", String.class, Limit.class).execute(new Object[] { "dalinar", Limit.of(42) }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + + assertThat(captor.getValue().getLimit()).isEqualTo(42); + } + + @Test // GH-4397 + void sortAndLimitShouldBeAppliedToQuery() { + + createQueryForMethod("findWithSortAndLimit", String.class, Sort.class, Limit.class) + .execute(new Object[] { "dalinar", Sort.by("fn"), Limit.of(42) }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + + assertThat(captor.getValue().getLimit()).isEqualTo(42); + assertThat(captor.getValue().getSortObject()).isEqualTo(new Document("fn", 1)); + } + + @Test // GH-2971 + void findShouldApplyReadPreference() { + + createQueryForMethod("findWithReadPreferenceByFirstname", String.class).execute(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void findShouldApplyReadPreferenceAtRepository() { + + createQueryForMethod("findWithLimit", String.class, Limit.class).execute(new Object[] { "dalinar", Limit.of(42) }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.primaryPreferred()); + } + + private MongoQueryFake createQueryForMethod(String methodName, Class... paramTypes) { return createQueryForMethod(Repo.class, methodName, paramTypes); } @@ -308,8 +571,12 @@ private static class MongoQueryFake extends AbstractMongoQuery { private boolean isDeleteQuery; private boolean isLimitingQuery; - public MongoQueryFake(MongoQueryMethod method, MongoOperations operations) { - super(method, operations); + MongoQueryFake(MongoQueryMethod method, MongoOperations operations) { + super(method, operations, + new ValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor(new StandardEnvironment(), + Collections.emptySet()), + ValueExpressionParser.create(SpelExpressionParser::new))); } @Override @@ -337,18 +604,24 @@ protected boolean isLimiting() { return isLimitingQuery; } - public MongoQueryFake setDeleteQuery(boolean isDeleteQuery) { + MongoQueryFake setDeleteQuery(boolean isDeleteQuery) { this.isDeleteQuery = isDeleteQuery; return this; } - public MongoQueryFake setLimitingQuery(boolean limitingQuery) { + MongoQueryFake setLimitingQuery(boolean limitingQuery) { isLimitingQuery = limitingQuery; return this; } + + @Override + protected CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } } + @ReadPreference(value = "primaryPreferred") private interface Repo extends MongoRepository { List deleteByLastname(String lastname); @@ -370,6 +643,43 @@ private interface Repo extends MongoRepository { Optional findByLastname(String lastname); Person findFirstByLastname(String lastname); + + @org.springframework.data.mongodb.repository.Query(sort = "{ age : 1 }") + List findByAge(Integer age); + + @org.springframework.data.mongodb.repository.Query(sort = "{ age : 1 }") + List findByAge(Integer age, Sort page); + + @org.springframework.data.mongodb.repository.Query(collation = "en_US") + List findWithCollationUsingSpimpleStringValueByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithCollationUsingDocumentByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "?1") + List findWithCollationUsingPlaceholderByFirstName(String firstname, Object collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : '?1' }") + List findWithCollationUsingPlaceholderInDocumentByFirstName(String firstname, String collation); + + List findWithCollationParameterByFirstName(String firstname, Collation collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithWithCollationParameterAndAnnotationByFirstName(String firstname, Collation collation); + + @Hint("idx-ln") + @Update("{ '$inc' : { 'visits' : ?1 } }") + void findAndIncreaseVisitsByLastname(String lastname, int value); + + @Hint("idx-fn") + void findWithHintByFirstname(String firstname); + + List findWithLimit(String firstname, Limit limit); + + List findWithSortAndLimit(String firstname, Sort sort, Limit limit); + + @ReadPreference(value = "secondaryPreferred") + List findWithReadPreferenceByFirstname(String firstname); } // DATAMONGO-1872 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQueryUnitTests.java new file mode 100644 index 0000000000..1f3602fc71 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractReactiveMongoQueryUnitTests.java @@ -0,0 +1,395 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + +import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.context.support.StaticApplicationContext; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.ReactiveFindOperation.ReactiveFind; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate; +import org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Hint; +import org.springframework.data.mongodb.repository.ReactiveMongoRepository; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.Update; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.spel.standard.SpelExpressionParser; + +import com.mongodb.MongoClientSettings; +import com.mongodb.client.result.UpdateResult; + +/** + * Unit tests for {@link AbstractReactiveMongoQuery}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Jorge Rodríguez + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class AbstractReactiveMongoQueryUnitTests { + + @Mock ReactiveMongoOperations mongoOperationsMock; + @Mock BasicMongoPersistentEntity persitentEntityMock; + @Mock MongoMappingContext mappingContextMock; + + @Mock ReactiveFind executableFind; + @Mock FindWithQuery withQueryMock; + @Mock ReactiveUpdate executableUpdate; + @Mock UpdateWithQuery updateWithQuery; + @Mock TerminatingUpdate terminatingUpdate; + + @BeforeEach + void setUp() { + + doReturn("persons").when(persitentEntityMock).getCollection(); + doReturn(persitentEntityMock).when(mappingContextMock).getPersistentEntity(Mockito.any(Class.class)); + doReturn(persitentEntityMock).when(mappingContextMock).getRequiredPersistentEntity(Mockito.any(Class.class)); + doReturn(Person.class).when(persitentEntityMock).getType(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContextMock); + converter.afterPropertiesSet(); + + doReturn(converter).when(mongoOperationsMock).getConverter(); + + doReturn(executableFind).when(mongoOperationsMock).query(any()); + doReturn(withQueryMock).when(executableFind).as(any()); + doReturn(withQueryMock).when(withQueryMock).matching(any(Query.class)); + doReturn(Flux.empty()).when(withQueryMock).all(); + doReturn(Mono.empty()).when(withQueryMock).first(); + doReturn(Mono.empty()).when(withQueryMock).one(); + + doReturn(executableUpdate).when(mongoOperationsMock).update(any()); + doReturn(executableUpdate).when(executableUpdate).inCollection(anyString()); + doReturn(updateWithQuery).when(executableUpdate).matching(any(Query.class)); + doReturn(terminatingUpdate).when(updateWithQuery).apply(any(UpdateDefinition.class)); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollation() { + + createQueryForMethod("findWithCollationUsingSpimpleStringValueByFirstName", String.class) // + .executeBlocking(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyStaticAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingDocumentByFirstName", String.class) // + .executeBlocking(new Object[] { "dalinar" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsString() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsDocument() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", new Document("locale", "en_US") }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationAsLocale() { + + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", Locale.US }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldThrowExceptionOnNonParsableCollation() { + + assertThatIllegalArgumentException().isThrownBy(() -> { + createQueryForMethod("findWithCollationUsingPlaceholderByFirstName", String.class, Object.class) // + .executeBlocking(new Object[] { "dalinar", 100 }); + }); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationIn() { + + createQueryForMethod("findWithCollationUsingPlaceholderInDocumentByFirstName", String.class, String.class) // + .executeBlocking(new Object[] { "dalinar", "en_US" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyDynamicAnnotatedCollationWithMultiplePlaceholders() { + + createQueryForMethod("findWithCollationUsingPlaceholdersInDocumentByFirstName", String.class, String.class, + int.class) // + .executeBlocking(new Object[] { "dalinar", "en_US", 2 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").strength(2).toDocument()); + } + + @Test // DATAMONGO-1854 + void shouldApplyCollationParameter() { + + Collation collation = Collation.of("en_US"); + createQueryForMethod("findWithCollationParameterByFirstName", String.class, Collation.class) // + .executeBlocking(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldOverrideAnnotation() { + + Collation collation = Collation.of("de_AT"); + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .executeBlocking(new Object[] { "dalinar", collation }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void collationParameterShouldNotBeAppliedWhenNullOverrideAnnotation() { + + createQueryForMethod("findWithWithCollationParameterAndAnnotationByFirstName", String.class, Collation.class) // + .executeBlocking(new Object[] { "dalinar", null }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getCollation().map(Collation::toDocument)) + .contains(Collation.of("en_US").toDocument()); + } + + @Test // GH-3230 + void findShouldApplyHint() { + + createQueryForMethod("findWithHintByFirstname", String.class).executeBlocking(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-fn"); + } + + @Test // GH-3230 + void updateShouldApplyHint() { + + when(terminatingUpdate.all()).thenReturn(Mono.just(mock(UpdateResult.class))); + + createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) // + .executeBlocking(new Object[] { "dalinar", 100 }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(executableUpdate).matching(captor.capture()); + assertThat(captor.getValue().getHint()).isEqualTo("idx-ln"); + } + + @Test // GH-2971 + void findShouldApplyReadPreference() { + + createQueryForMethod("findWithReadPreferenceByFirstname", String.class).executeBlocking(new Object[] { "Jasna" }); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(withQueryMock).matching(captor.capture()); + assertThat(captor.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + private ReactiveMongoQueryFake createQueryForMethod(String methodName, Class... paramTypes) { + return createQueryForMethod(Repo.class, methodName, paramTypes); + } + + private ReactiveMongoQueryFake createQueryForMethod(Class repository, String methodName, Class... paramTypes) { + + try { + + Method method = repository.getMethod(methodName, paramTypes); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, + new DefaultRepositoryMetadata(repository), factory, mappingContextMock); + + return new ReactiveMongoQueryFake(queryMethod, mongoOperationsMock); + } catch (Exception e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + + private static class ReactiveMongoQueryFake extends AbstractReactiveMongoQuery { + + private boolean isDeleteQuery; + private boolean isLimitingQuery; + + ReactiveMongoQueryFake(ReactiveMongoQueryMethod method, ReactiveMongoOperations operations) { + super(method, operations, + new ValueExpressionDelegate( + new QueryMethodValueEvaluationContextAccessor( + new StandardEnvironment(), + Collections.emptySet()), + ValueExpressionParser.create(SpelExpressionParser::new))); + } + + @Override + protected Mono createQuery(ConvertingParameterAccessor accessor) { + return Mono.just(new BasicQuery("{'foo':'bar'}")); + } + + Object executeBlocking(Object[] parameters) { + return Flux.from(super.execute(parameters)).collectList().block(); + } + + @Override + protected boolean isCountQuery() { + return false; + } + + @Override + protected boolean isExistsQuery() { + return false; + } + + @Override + protected boolean isDeleteQuery() { + return isDeleteQuery; + } + + @Override + protected boolean isLimiting() { + return isLimitingQuery; + } + + public ReactiveMongoQueryFake setDeleteQuery(boolean isDeleteQuery) { + this.isDeleteQuery = isDeleteQuery; + return this; + } + + public ReactiveMongoQueryFake setLimitingQuery(boolean limitingQuery) { + + isLimitingQuery = limitingQuery; + return this; + } + + @Override + protected Mono getCodecRegistry() { + return Mono.just(MongoClientSettings.getDefaultCodecRegistry()); + } + } + + private interface Repo extends ReactiveMongoRepository { + + @org.springframework.data.mongodb.repository.Query(collation = "en_US") + List findWithCollationUsingSpimpleStringValueByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithCollationUsingDocumentByFirstName(String firstname); + + @org.springframework.data.mongodb.repository.Query(collation = "?1") + List findWithCollationUsingPlaceholderByFirstName(String firstname, Object collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : '?1' }") + List findWithCollationUsingPlaceholderInDocumentByFirstName(String firstname, String collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : '?1', 'strength' : ?#{[2]}}") + List findWithCollationUsingPlaceholdersInDocumentByFirstName(String firstname, String collation, + int strength); + + List findWithCollationParameterByFirstName(String firstname, Collation collation); + + @org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }") + List findWithWithCollationParameterAndAnnotationByFirstName(String firstname, Collation collation); + + @Hint("idx-ln") + @Update("{ '$inc' : { 'visits' : ?1 } }") + void findAndIncreaseVisitsByLastname(String lastname, int value); + + @Hint("idx-fn") + void findWithHintByFirstname(String firstname); + + @ReadPreference(value = "secondaryPreferred") + Flux findWithReadPreferenceByFirstname(String firstname); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java index ef348541c9..71b2ff7581 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,20 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.Arrays; import java.util.Collection; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.mongodb.MongoDbFactory; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; @@ -45,36 +46,37 @@ * @author Oliver Gierke * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class ConvertingParameterAccessorUnitTests { +@ExtendWith(MockitoExtension.class) +class ConvertingParameterAccessorUnitTests { - @Mock MongoDbFactory factory; + @Mock MongoDatabaseFactory factory; @Mock MongoParameterAccessor accessor; - MongoMappingContext context; - MappingMongoConverter converter; - DbRefResolver resolver; + private MongoMappingContext context; + private MappingMongoConverter converter; + private DbRefResolver resolver; - @Before - public void setUp() { + @BeforeEach + void setUp() { + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); this.context = new MongoMappingContext(); this.resolver = new DefaultDbRefResolver(factory); this.converter = new MappingMongoConverter(resolver, context); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullDbRefResolver() { - new MappingMongoConverter((DbRefResolver) null, context); + @Test + void rejectsNullDbRefResolver() { + assertThatIllegalArgumentException().isThrownBy(() -> new MappingMongoConverter((DbRefResolver) null, context)); } - @Test(expected = IllegalArgumentException.class) - public void rejectsNullContext() { - new MappingMongoConverter(resolver, null); + @Test + void rejectsNullContext() { + assertThatIllegalArgumentException().isThrownBy(() -> new MappingMongoConverter(resolver, null)); } @Test - public void convertsCollectionUponAccess() { + void convertsCollectionUponAccess() { when(accessor.getBindableValue(0)).thenReturn(Arrays.asList("Foo")); @@ -84,41 +86,41 @@ public void convertsCollectionUponAccess() { BasicDBList reference = new BasicDBList(); reference.add("Foo"); - assertThat(result, is((Object) reference)); + assertThat(result).isEqualTo((Object) reference); } @Test // DATAMONGO-505 - public void convertsAssociationsToDBRef() { + void convertsAssociationsToDBRef() { Property property = new Property(); property.id = 5L; Object result = setupAndConvert(property); - assertThat(result, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(result).isInstanceOf(com.mongodb.DBRef.class); com.mongodb.DBRef dbRef = (com.mongodb.DBRef) result; - assertThat(dbRef.getCollectionName(), is("property")); - assertThat(dbRef.getId(), is((Object) 5L)); + assertThat(dbRef.getCollectionName()).isEqualTo("property"); + assertThat(dbRef.getId()).isEqualTo((Object) 5L); } @Test // DATAMONGO-505 - public void convertsAssociationsToDBRefForCollections() { + void convertsAssociationsToDBRefForCollections() { Property property = new Property(); property.id = 5L; Object result = setupAndConvert(Arrays.asList(property)); - assertThat(result, is(instanceOf(Collection.class))); + assertThat(result).isInstanceOf(Collection.class); Collection collection = (Collection) result; - assertThat(collection, hasSize(1)); + assertThat(collection).hasSize(1); Object element = collection.iterator().next(); - assertThat(element, is(instanceOf(com.mongodb.DBRef.class))); + assertThat(element).isInstanceOf(com.mongodb.DBRef.class); com.mongodb.DBRef dbRef = (com.mongodb.DBRef) element; - assertThat(dbRef.getCollectionName(), is("property")); - assertThat(dbRef.getId(), is((Object) 5L)); + assertThat(dbRef.getCollectionName()).isEqualTo("property"); + assertThat(dbRef.getId()).isEqualTo((Object) 5L); } private Object setupAndConvert(Object... parameters) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java index 834ad93142..19ac837513 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MappingMongoEntityInformationUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,47 +15,76 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Persistable; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.mongodb.repository.support.MappingMongoEntityInformation; +import org.springframework.data.repository.core.EntityInformation; /** * Unit tests for {@link MappingMongoEntityInformation}. * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class MappingMongoEntityInformationUnitTests { @Mock MongoPersistentEntity info; - - @Before - public void setUp() { - - when(info.getType()).thenReturn(Person.class); - when(info.getCollection()).thenReturn("Person"); - } + @Mock MongoPersistentEntity persistableImplementingEntityTypeInfo; @Test // DATAMONGO-248 public void usesEntityCollectionIfNoCustomOneGiven() { + when(info.getCollection()).thenReturn("Person"); + MongoEntityInformation information = new MappingMongoEntityInformation(info); - assertThat(information.getCollectionName(), is("Person")); + assertThat(information.getCollectionName()).isEqualTo("Person"); } @Test // DATAMONGO-248 public void usesCustomCollectionIfGiven() { MongoEntityInformation information = new MappingMongoEntityInformation(info, "foobar"); - assertThat(information.getCollectionName(), is("foobar")); + assertThat(information.getCollectionName()).isEqualTo("foobar"); + } + + @Test // DATAMONGO-1590 + public void considersPersistableIsNew() { + + EntityInformation information = new MappingMongoEntityInformation<>( + persistableImplementingEntityTypeInfo); + + assertThat(information.isNew(new TypeImplementingPersistable(100L, false))).isFalse(); + } + + static final class TypeImplementingPersistable implements Persistable { + + private final Long id; + private final boolean isNew; + + public TypeImplementingPersistable(Long id, boolean isNew) { + this.id = id; + this.isNew = isNew; + } + + public Long getId() { + return this.id; + } + + public boolean isNew() { + return this.isNew; + } + + public String toString() { + return "MappingMongoEntityInformationUnitTests.TypeImplementingPersistable(id=" + this.getId() + ", isNew=" + + this.isNew() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java index 2e8c6a1624..1c856394d8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,23 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.assertj.core.api.Assertions.assertThat; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.assertThat; +import static org.assertj.core.api.Assertions.*; import java.lang.reflect.Method; import java.util.List; import org.bson.Document; -import org.hamcrest.core.IsNull; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Range; import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -82,7 +82,7 @@ public void shouldReturnAsFullTextStringWhenNoneDefinedForMethod() throws NoSuch MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { new Point(10, 20), DISTANCE }); - assertThat(accessor.getFullText(), IsNull.nullValue()); + assertThat(accessor.getFullText()).isNull(); } @Test // DATAMONGO-973 @@ -93,8 +93,8 @@ public void shouldProperlyConvertTextCriteria() throws NoSuchMethodException, Se MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, new Object[] { "spring", TextCriteria.forDefaultLanguage().matching("data") }); - assertThat(accessor.getFullText().getCriteriaObject().toJson(), - equalTo(Document.parse("{ \"$text\" : { \"$search\" : \"data\"}}").toJson())); + assertThat(accessor.getFullText().getCriteriaObject().toJson()) + .isEqualTo(Document.parse("{ \"$text\" : { \"$search\" : \"data\"}}").toJson()); } @Test // DATAMONGO-1110 @@ -111,8 +111,46 @@ public void shouldDetectMinAndMaxDistance() throws NoSuchMethodException, Securi Range range = accessor.getDistanceRange(); - assertThat(range.getLowerBound(), is(Bound.inclusive(min))); - assertThat(range.getUpperBound(), is(Bound.inclusive(max))); + assertThat(range.getLowerBound()).isEqualTo(Bound.inclusive(min)); + assertThat(range.getUpperBound()).isEqualTo(Bound.inclusive(max)); + } + + @Test // DATAMONGO-1854 + public void shouldDetectCollation() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Collation.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + Collation collation = Collation.of("en_US"); + MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, + new Object[] { "dalinar", collation }); + + assertThat(accessor.getCollation()).isEqualTo(collation); + } + + @Test // GH-2107 + public void shouldReturnUpdateIfPresent() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findAndModifyByFirstname", String.class, UpdateDefinition.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + Update update = new Update(); + MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, + new Object[] { "dalinar", update }); + + assertThat(accessor.getUpdate()).isSameAs(update); + } + + @Test // GH-2107 + public void shouldReturnNullIfNoUpdatePresent() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + MongoParameterAccessor accessor = new MongoParametersParameterAccessor(queryMethod, + new Object[] { new Point(0,0) }); + + assertThat(accessor.getUpdate()).isNull(); } interface PersonRepository extends Repository { @@ -124,5 +162,10 @@ interface PersonRepository extends Repository { List findByLocationNear(Point point, Range distances); List findByFirstname(String firstname, TextCriteria fullText); + + List findByFirstname(String firstname, Collation collation); + + List findAndModifyByFirstname(String firstname, UpdateDefinition update); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java index dd4061c31d..93674e23fc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoParametersUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,28 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.lang.reflect.Method; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Near; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.repository.query.Parameter; +import org.springframework.data.repository.query.ParametersSource; /** * Unit tests for {@link MongoParameters}. @@ -40,101 +44,144 @@ * @author Oliver Gierke * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) -public class MongoParametersUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoParametersUnitTests { @Mock MongoQueryMethod queryMethod; @Test - public void discoversDistanceParameter() throws NoSuchMethodException, SecurityException { + void discoversDistanceParameter() throws NoSuchMethodException, SecurityException { + Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getNumberOfParameters(), is(2)); - assertThat(parameters.getMaxDistanceIndex(), is(1)); - assertThat(parameters.getBindableParameters().getNumberOfParameters(), is(1)); + assertThat(parameters.getNumberOfParameters()).isEqualTo(2); + assertThat(parameters.getMaxDistanceIndex()).isEqualTo(1); + assertThat(parameters.getBindableParameters().getNumberOfParameters()).isOne(); Parameter parameter = parameters.getParameter(1); - assertThat(parameter.isSpecialParameter(), is(true)); - assertThat(parameter.isBindable(), is(false)); + assertThat(parameter.isSpecialParameter()).isTrue(); + assertThat(parameter.isBindable()).isFalse(); } @Test - public void doesNotConsiderPointAsNearForSimpleQuery() throws Exception { + void doesNotConsiderPointAsNearForSimpleQuery() throws Exception { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getNearIndex(), is(-1)); + assertThat(parameters.getNearIndex()).isEqualTo(-1); } - @Test(expected = IllegalStateException.class) - public void rejectsMultiplePointsForGeoNearMethod() throws Exception { + @Test + void rejectsMultiplePointsForGeoNearMethod() throws Exception { + Method method = PersonRepository.class.getMethod("findByLocationNearAndOtherLocation", Point.class, Point.class); - new MongoParameters(method, true); + + assertThatIllegalStateException().isThrownBy(() -> new MongoParameters(ParametersSource.of(method), true)); } - @Test(expected = IllegalStateException.class) - public void rejectsMultipleDoubleArraysForGeoNearMethod() throws Exception { + @Test + void rejectsMultipleDoubleArraysForGeoNearMethod() throws Exception { + Method method = PersonRepository.class.getMethod("invalidDoubleArrays", double[].class, double[].class); - new MongoParameters(method, true); + + assertThatIllegalStateException().isThrownBy(() -> new MongoParameters(ParametersSource.of(method), true)); } @Test - public void doesNotRejectMultiplePointsForSimpleQueryMethod() throws Exception { + void doesNotRejectMultiplePointsForSimpleQueryMethod() throws Exception { + Method method = PersonRepository.class.getMethod("someOtherMethod", Point.class, Point.class); - new MongoParameters(method, false); + new MongoParameters(ParametersSource.of(method), false); } @Test - public void findsAnnotatedPointForGeoNearQuery() throws Exception { + void findsAnnotatedPointForGeoNearQuery() throws Exception { + Method method = PersonRepository.class.getMethod("findByOtherLocationAndLocationNear", Point.class, Point.class); - MongoParameters parameters = new MongoParameters(method, true); - assertThat(parameters.getNearIndex(), is(1)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), true); + assertThat(parameters.getNearIndex()).isOne(); } @Test - public void findsAnnotatedDoubleArrayForGeoNearQuery() throws Exception { + void findsAnnotatedDoubleArrayForGeoNearQuery() throws Exception { + Method method = PersonRepository.class.getMethod("validDoubleArrays", double[].class, double[].class); - MongoParameters parameters = new MongoParameters(method, true); - assertThat(parameters.getNearIndex(), is(1)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), true); + assertThat(parameters.getNearIndex()).isOne(); } @Test // DATAMONGO-973 - public void shouldFindTextCriteriaAtItsIndex() throws SecurityException, NoSuchMethodException { + void shouldFindTextCriteriaAtItsIndex() throws SecurityException, NoSuchMethodException { Method method = PersonRepository.class.getMethod("findByNameAndText", String.class, TextCriteria.class); - MongoParameters parameters = new MongoParameters(method, false); - assertThat(parameters.getFullTextParameterIndex(), is(1)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + assertThat(parameters.getFullTextParameterIndex()).isOne(); } @Test // DATAMONGO-973 - public void shouldTreatTextCriteriaParameterAsSpecialParameter() throws SecurityException, NoSuchMethodException { + void shouldTreatTextCriteriaParameterAsSpecialParameter() throws SecurityException, NoSuchMethodException { Method method = PersonRepository.class.getMethod("findByNameAndText", String.class, TextCriteria.class); - MongoParameters parameters = new MongoParameters(method, false); - assertThat(parameters.getParameter(parameters.getFullTextParameterIndex()).isSpecialParameter(), is(true)); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + assertThat(parameters.getParameter(parameters.getFullTextParameterIndex()).isSpecialParameter()).isTrue(); } @Test // DATAMONGO-1110 - public void shouldFindMinAndMaxDistanceParameters() throws NoSuchMethodException, SecurityException { + void shouldFindMinAndMaxDistanceParameters() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Range.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getRangeIndex(), is(1)); - assertThat(parameters.getMaxDistanceIndex(), is(-1)); + assertThat(parameters.getRangeIndex()).isOne(); + assertThat(parameters.getMaxDistanceIndex()).isEqualTo(-1); } @Test // DATAMONGO-1110 - public void shouldNotHaveMinDistanceIfOnlyOneDistanceParameterPresent() throws NoSuchMethodException, - SecurityException { + void shouldNotHaveMinDistanceIfOnlyOneDistanceParameterPresent() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getRangeIndex()).isEqualTo(-1); + assertThat(parameters.getMaxDistanceIndex()).isOne(); + } + + @Test // DATAMONGO-1854 + void shouldReturnMinusOneIfCollationParameterDoesNotExist() throws NoSuchMethodException, SecurityException { Method method = PersonRepository.class.getMethod("findByLocationNear", Point.class, Distance.class); - MongoParameters parameters = new MongoParameters(method, false); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); - assertThat(parameters.getRangeIndex(), is(-1)); - assertThat(parameters.getMaxDistanceIndex(), is(1)); + assertThat(parameters.getCollationParameterIndex()).isEqualTo(-1); + } + + @Test // DATAMONGO-1854 + void shouldReturnIndexOfCollationParameterIfExists() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findByText", String.class, Collation.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getCollationParameterIndex()).isOne(); + } + + @Test // GH-2107 + void shouldReturnIndexUpdateIfExists() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("findAndModifyByFirstname", String.class, UpdateDefinition.class, Pageable.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getUpdateIndex()).isOne(); + } + + @Test // GH-2107 + void shouldReturnInvalidIndexIfUpdateDoesNotExist() throws NoSuchMethodException, SecurityException { + + Method method = PersonRepository.class.getMethod("someOtherMethod", Point.class, Point.class); + MongoParameters parameters = new MongoParameters(ParametersSource.of(method), false); + + assertThat(parameters.getUpdateIndex()).isEqualTo(-1); } interface PersonRepository { @@ -154,5 +201,9 @@ interface PersonRepository { List findByNameAndText(String name, TextCriteria text); List findByLocationNear(Point point, Range range); + + List findByText(String text, Collation collation); + + List findAndModifyByFirstname(String firstname, UpdateDefinition update, Pageable page); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java index ecf4398f6b..609e0a0018 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,33 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; import static org.springframework.data.mongodb.repository.query.StubParameterAccessor.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.reflect.Method; import java.util.List; +import java.util.regex.Pattern; +import org.bson.BsonRegularExpression; +import org.bson.Document; import org.bson.types.ObjectId; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; import org.springframework.data.geo.Polygon; import org.springframework.data.geo.Shape; import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.Venue; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.geo.GeoJsonLineString; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; @@ -60,8 +58,6 @@ import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; import org.springframework.data.repository.query.parser.PartTree; -import org.bson.Document; - /** * Unit test for {@link MongoQueryCreator}. * @@ -69,63 +65,59 @@ * @author Thomas Darimont * @author Christoph Strobl */ -public class MongoQueryCreatorUnitTests { - - MappingContext, MongoPersistentProperty> context; - MongoConverter converter; +class MongoQueryCreatorUnitTests { - @Rule public ExpectedException expection = ExpectedException.none(); + private MappingContext, MongoPersistentProperty> context; + private MongoConverter converter; - @Before - public void setUp() throws SecurityException, NoSuchMethodException { + @BeforeEach + void beforeEach() { context = new MongoMappingContext(); - - DbRefResolver resolver = new DefaultDbRefResolver(mock(MongoDbFactory.class)); - converter = new MappingMongoConverter(resolver, context); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); } @Test - public void createsQueryCorrectly() throws Exception { + void createsQueryCorrectly() { PartTree tree = new PartTree("findByFirstName", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Oliver"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").is("Oliver")))); + assertThat(query).isEqualTo(query(where("firstName").is("Oliver"))); } @Test // DATAMONGO-469 - public void createsAndQueryCorrectly() { + void createsAndQueryCorrectly() { Person person = new Person(); MongoQueryCreator creator = new MongoQueryCreator(new PartTree("findByFirstNameAndFriend", Person.class), getAccessor(converter, "Oliver", person), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").is("Oliver").and("friend").is(person)))); + assertThat(query).isEqualTo(query(where("firstName").is("Oliver").and("friend").is(person))); } @Test - public void createsNotNullQueryCorrectly() { + void createsNotNullQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameNotNull", Person.class); Query query = new MongoQueryCreator(tree, getAccessor(converter), context).createQuery(); - assertThat(query, is(new Query(Criteria.where("firstName").ne(null)))); + assertThat(query).isEqualTo(new Query(Criteria.where("firstName").ne(null))); } @Test - public void createsIsNullQueryCorrectly() { + void createsIsNullQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameIsNull", Person.class); Query query = new MongoQueryCreator(tree, getAccessor(converter), context).createQuery(); - assertThat(query, is(new Query(Criteria.where("firstName").is(null)))); + assertThat(query).isEqualTo(new Query(Criteria.where("firstName").is(null))); } @Test - public void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception { + void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception { Point point = new Point(10, 20); Distance distance = new Distance(2.5, Metrics.KILOMETERS); @@ -136,7 +128,7 @@ public void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception } @Test - public void bindsDistanceParameterToNearCorrectly() throws Exception { + void bindsDistanceParameterToNearCorrectly() throws Exception { Point point = new Point(10, 20); Distance distance = new Distance(2.5); @@ -147,73 +139,73 @@ public void bindsDistanceParameterToNearCorrectly() throws Exception { } @Test - public void createsLessThanEqualQueryCorrectly() throws Exception { + void createsLessThanEqualQueryCorrectly() { PartTree tree = new PartTree("findByAgeLessThanEqual", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, 18), context); Query reference = query(where("age").lte(18)); - assertThat(creator.createQuery(), is(reference)); + assertThat(creator.createQuery()).isEqualTo(reference); } @Test - public void createsGreaterThanEqualQueryCorrectly() throws Exception { + void createsGreaterThanEqualQueryCorrectly() { PartTree tree = new PartTree("findByAgeGreaterThanEqual", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, 18), context); Query reference = query(where("age").gte(18)); - assertThat(creator.createQuery(), is(reference)); + assertThat(creator.createQuery()).isEqualTo(reference); } @Test // DATAMONGO-338 - public void createsExistsClauseCorrectly() { + void createsExistsClauseCorrectly() { PartTree tree = new PartTree("findByAgeExists", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, true), context); Query query = query(where("age").exists(true)); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } @Test // DATAMONGO-338 - public void createsRegexClauseCorrectly() { + void createsRegexClauseCorrectly() { PartTree tree = new PartTree("findByFirstNameRegex", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, ".*"), context); Query query = query(where("firstName").regex(".*")); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } @Test // DATAMONGO-338 - public void createsTrueClauseCorrectly() { + void createsTrueClauseCorrectly() { PartTree tree = new PartTree("findByActiveTrue", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter), context); Query query = query(where("active").is(true)); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } @Test // DATAMONGO-338 - public void createsFalseClauseCorrectly() { + void createsFalseClauseCorrectly() { PartTree tree = new PartTree("findByActiveFalse", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter), context); Query query = query(where("active").is(false)); - assertThat(creator.createQuery(), is(query)); + assertThat(creator.createQuery()).isEqualTo(query); } @Test // DATAMONGO-413 - public void createsOrQueryCorrectly() { + void createsOrQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameOrAge", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Dave", 42), context); Query query = creator.createQuery(); - assertThat(query, is(query(new Criteria().orOperator(where("firstName").is("Dave"), where("age").is(42))))); + assertThat(query).isEqualTo(query(new Criteria().orOperator(where("firstName").is("Dave"), where("age").is(42)))); } @Test // DATAMONGO-347 - public void createsQueryReferencingADBRefCorrectly() { + void createsQueryReferencingADBRefCorrectly() { User user = new User(); user.id = new ObjectId(); @@ -222,37 +214,37 @@ public void createsQueryReferencingADBRefCorrectly() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, user), context); Document queryObject = creator.createQuery().getQueryObject(); - assertThat(queryObject.get("creator"), is((Object) user)); + assertThat(queryObject.get("creator")).isEqualTo(user); } @Test // DATAMONGO-418 - public void createsQueryWithStartingWithPredicateCorrectly() { + void createsQueryWithStartingWithPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameStartingWith", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Matt"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex("^Matt")))); + assertThat(query).isEqualTo(query(where("username").regex("^Matt"))); } @Test // DATAMONGO-418 - public void createsQueryWithEndingWithPredicateCorrectly() { + void createsQueryWithEndingWithPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameEndingWith", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "ews"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex("ews$")))); + assertThat(query).isEqualTo(query(where("username").regex("ews$"))); } @Test // DATAMONGO-418 - public void createsQueryWithContainingPredicateCorrectly() { + void createsQueryWithContainingPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("username").regex(".*thew.*")))); + assertThat(query).isEqualTo(query(where("username").regex(".*thew.*"))); } private void assertBindsDistanceToQuery(Point point, Distance distance, Query reference) throws Exception { @@ -268,139 +260,149 @@ private void assertBindsDistanceToQuery(Point point, Distance distance, Query re Query query = new MongoQueryCreator(tree, new ConvertingParameterAccessor(converter, accessor), context) .createQuery(); - assertThat(query, is(query)); + assertThat(query).isEqualTo(query); } @Test // DATAMONGO-770 - public void createsQueryWithFindByIgnoreCaseCorrectly() { + void createsQueryWithFindByIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByfirstNameIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("^dave$", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i"))); + } + + @Test // GH-4404 + void createsQueryWithFindByInClauseHavingIgnoreCaseCorrectly() { + + PartTree tree = new PartTree("findAllByFirstNameInIgnoreCase", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, List.of("da've", "carter")), context); + + Query query = creator.createQuery(); + assertThat(query).isEqualTo(query(where("firstName") + .in(List.of(new BsonRegularExpression("^\\Qda've\\E$", "i"), new BsonRegularExpression("^carter$", "i"))))); } @Test // DATAMONGO-770 - public void createsQueryWithFindByNotIgnoreCaseCorrectly() { + void createsQueryWithFindByNotIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameNotIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query.toString(), is(query(where("firstName").not().regex("^dave$", "i")).toString())); + assertThat(query.toString()).isEqualTo(query(where("firstName").not().regex("^dave$", "i")).toString()); } @Test // DATAMONGO-770 - public void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { + void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameStartingWithIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("^dave", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave", "i"))); } @Test // DATAMONGO-770 - public void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { + void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameEndingWithIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("dave$", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex("dave$", "i"))); } @Test // DATAMONGO-770 - public void createsQueryWithFindByContainingIgnoreCaseCorrectly() { + void createsQueryWithFindByContainingIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameContainingIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex(".*dave.*", "i")))); + assertThat(query).isEqualTo(query(where("firstName").regex(".*dave.*", "i"))); } @Test // DATAMONGO-770 - public void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty() { - - expection.expect(IllegalArgumentException.class); - expection.expectMessage("must be of type String"); + void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty() { PartTree tree = new PartTree("findByFirstNameAndAgeIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "foo", 42), context); - creator.createQuery(); + assertThatIllegalArgumentException().isThrownBy(creator::createQuery) + .withMessageContaining("must be of type String"); } @Test // DATAMONGO-770 - public void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase() { + void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase() { PartTree tree = new PartTree("findByFirstNameAndAgeAllIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("firstName").regex("^dave$", "i").and("age").is(42)))); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i").and("age").is(42))); } @Test // DATAMONGO-566 - public void shouldCreateDeleteByQueryCorrectly() { + void shouldCreateDeleteByQueryCorrectly() { PartTree tree = new PartTree("deleteByFirstName", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); Query query = creator.createQuery(); - assertThat(tree.isDelete(), is(true)); - assertThat(query, is(query(where("firstName").is("dave")))); + assertThat(tree.isDelete()).isTrue(); + assertThat(query).isEqualTo(query(where("firstName").is("dave"))); } @Test // DATAMONGO-566 - public void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressions() { + void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressions() { PartTree tree = new PartTree("deleteByFirstNameAndAgeAllIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); Query query = creator.createQuery(); - assertThat(tree.isDelete(), is(true)); - assertThat(query, is(query(where("firstName").regex("^dave$", "i").and("age").is(42)))); + assertThat(tree.isDelete()).isTrue(); + assertThat(query).isEqualTo(query(where("firstName").regex("^dave$", "i").and("age").is(42))); } @Test // DATAMONGO-1075 - public void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { + void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { PartTree tree = new PartTree("findByEmailAddressesContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("emailAddresses").in("dave")))); + assertThat(query).isEqualTo(query(where("emailAddresses").in("dave"))); } @Test // DATAMONGO-1075 - public void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { + void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { PartTree tree = new PartTree("findByEmailAddressesNotContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("emailAddresses").not().in("dave")))); + assertThat(query).isEqualTo(query(where("emailAddresses").not().in("dave"))); } @Test // DATAMONGO-1075, DATAMONGO-1425 - public void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { + void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { PartTree tree = new PartTree("findByUsernameNotContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context); Query query = creator.createQuery(); - assertThat(query.getQueryObject().toJson(), is(query(where("username").not().regex(".*thew.*")).getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*thew.*")).getQueryObject().toJson()); } @Test // DATAMONGO-1139 - public void createsNonSphericalNearForDistanceWithDefaultMetric() { + void createsNonSphericalNearForDistanceWithDefaultMetric() { Point point = new Point(1.0, 1.0); Distance distance = new Distance(1.0); @@ -409,11 +411,11 @@ public void createsNonSphericalNearForDistanceWithDefaultMetric() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, distance), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("location").near(point).maxDistance(1.0)))); + assertThat(query).isEqualTo(query(where("location").near(point).maxDistance(1.0))); } @Test // DATAMONGO-1136 - public void shouldCreateWithinQueryCorrectly() { + void shouldCreateWithinQueryCorrectly() { Point first = new Point(1, 1); Point second = new Point(2, 2); @@ -424,11 +426,11 @@ public void shouldCreateWithinQueryCorrectly() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, shape), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address.geo").within(shape)))); + assertThat(query).isEqualTo(query(where("address.geo").within(shape))); } @Test // DATAMONGO-1110 - public void shouldCreateNearSphereQueryForSphericalProperty() { + void shouldCreateNearSphereQueryForSphericalProperty() { Point point = new Point(10, 20); @@ -436,11 +438,11 @@ public void shouldCreateNearSphereQueryForSphericalProperty() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address2dSphere.geo").nearSphere(point)))); + assertThat(query).isEqualTo(query(where("address2dSphere.geo").nearSphere(point))); } @Test // DATAMONGO-1110 - public void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDefaultMetric() { + void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDefaultMetric() { Point point = new Point(1.0, 1.0); Distance distance = new Distance(1.0); @@ -449,11 +451,11 @@ public void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDef MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, distance), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address2dSphere.geo").nearSphere(point).maxDistance(1.0)))); + assertThat(query).isEqualTo(query(where("address2dSphere.geo").nearSphere(point).maxDistance(1.0))); } @Test // DATAMONGO-1110 - public void shouldCreateNearQueryForMinMaxDistance() { + void shouldCreateNearQueryForMinMaxDistance() { Point point = new Point(10, 20); Range range = Distance.between(new Distance(10), new Distance(20)); @@ -462,169 +464,209 @@ public void shouldCreateNearQueryForMinMaxDistance() { MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, point, range), context); Query query = creator.createQuery(); - assertThat(query, is(query(where("address.geo").near(point).minDistance(10D).maxDistance(20D)))); + assertThat(query).isEqualTo(query(where("address.geo").near(point).minDistance(10D).maxDistance(20D))); } @Test // DATAMONGO-1229 - public void appliesIgnoreCaseToLeafProperty() { + void appliesIgnoreCaseToLeafProperty() { PartTree tree = new PartTree("findByAddressStreetIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "Street"); - assertThat(new MongoQueryCreator(tree, accessor, context).createQuery(), is(notNullValue())); + assertThat(new MongoQueryCreator(tree, accessor, context).createQuery()).isNotNull(); } @Test // DATAMONGO-1232 - public void ignoreCaseShouldEscapeSource() { + void ignoreCaseShouldEscapeSource() { PartTree tree = new PartTree("findByUsernameIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "con.flux+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("^\\Qcon.flux+\\E$", "i")))); + assertThat(query).isEqualTo(query(where("username").regex("^\\Qcon.flux+\\E$", "i"))); } @Test // DATAMONGO-1232 - public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { + void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { PartTree tree = new PartTree("findByUsernameStartingWithIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "dawns.light+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("^\\Qdawns.light+\\E", "i")))); + assertThat(query).isEqualTo(query(where("username").regex("^\\Qdawns.light+\\E", "i"))); } @Test // DATAMONGO-1232 - public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { + void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { PartTree tree = new PartTree("findByUsernameEndingWithIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "new.ton+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("\\Qnew.ton+\\E$", "i")))); + assertThat(query).isEqualTo(query(where("username").regex("\\Qnew.ton+\\E$", "i"))); } @Test // DATAMONGO-1232 - public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { + void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex(".*\\Qfire.fight+\\E.*")))); + assertThat(query).isEqualTo(query(where("username").regex(".*\\Qfire.fight+\\E.*"))); } @Test // DATAMONGO-1232 - public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { + void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex(".*\\Qsteel.heart+\\E")))); + assertThat(query).isEqualTo(query(where("username").regex(".*\\Qsteel.heart+\\E"))); } @Test // DATAMONGO-1232 - public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { + void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "cala.mity+*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex("\\Qcala.mity+\\E.*")))); + assertThat(query).isEqualTo(query(where("username").regex("\\Qcala.mity+\\E.*"))); } @Test // DATAMONGO-1232 - public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { + void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query, is(query(where("username").regex(".*")))); + assertThat(query).isEqualTo(query(where("username").regex(".*"))); } @Test // DATAMONGO-1342 - public void bindsNullValueToContainsClause() { + void bindsNullValueToContainsClause() { PartTree partTree = new PartTree("emailAddressesContains", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, new Object[] { null }); Query query = new MongoQueryCreator(partTree, accessor, context).createQuery(); - assertThat(query, is(query(where("emailAddresses").in((Object) null)))); + assertThat(query).isEqualTo(query(where("emailAddresses").in((Object) null))); } @Test // DATAMONGO-1424 - public void notLikeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { + void notLikeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query.getQueryObject().toJson(), - is(query(where("username").not().regex(".*\\Qfire.fight+\\E.*")).getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*\\Qfire.fight+\\E.*")).getQueryObject().toJson()); } @Test // DATAMONGO-1424 - public void notLikeShouldEscapeSourceWhenUsedWithLeadingWildcard() { + void notLikeShouldEscapeSourceWhenUsedWithLeadingWildcard() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query.getQueryObject().toJson(), - is(query(where("username").not().regex(".*\\Qsteel.heart+\\E")).getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*\\Qsteel.heart+\\E")).getQueryObject().toJson()); } @Test // DATAMONGO-1424 - public void notLikeShouldEscapeSourceWhenUsedWithTrailingWildcard() { + void notLikeShouldEscapeSourceWhenUsedWithTrailingWildcard() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "cala.mity+*"), context); Query query = creator.createQuery(); - assertThat(query.getQueryObject().toJson(), is(query(where("username").not().regex("\\Qcala.mity+\\E.*")).getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex("\\Qcala.mity+\\E.*")).getQueryObject().toJson()); } @Test // DATAMONGO-1424 - public void notLikeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { + void notLikeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*"); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query.getQueryObject().toJson(), is(query(where("username").not().regex(".*")).getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(query(where("username").not().regex(".*")).getQueryObject().toJson()); } @Test // DATAMONGO-1588 - public void queryShouldAcceptSubclassOfDeclaredArgument() { + void queryShouldAcceptSubclassOfDeclaredArgument() { PartTree tree = new PartTree("findByLocationNear", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, new GeoJsonPoint(-74.044502D, 40.689247D)); Query query = new MongoQueryCreator(tree, accessor, context).createQuery(); - assertThat(query.getQueryObject().containsKey("location"), is(true)); + assertThat(query.getQueryObject()).containsKey("location"); } @Test // DATAMONGO-1588 - public void queryShouldThrowExceptionWhenArgumentDoesNotMatchDeclaration() { - - expection.expect(IllegalArgumentException.class); - expection.expectMessage("Expected parameter type of " + Point.class); + void queryShouldThrowExceptionWhenArgumentDoesNotMatchDeclaration() { PartTree tree = new PartTree("findByLocationNear", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, new GeoJsonLineString(new Point(-74.044502D, 40.689247D), new Point(-73.997330D, 40.730824D))); - new MongoQueryCreator(tree, accessor, context).createQuery(); + assertThatIllegalArgumentException().isThrownBy(() -> new MongoQueryCreator(tree, accessor, context).createQuery()) + .withMessageContaining("Expected parameter type of " + Point.class); + } + + @Test // DATAMONGO-2003 + void createsRegexQueryForPatternCorrectly() { + + PartTree tree = new PartTree("findByFirstNameRegex", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, Pattern.compile(".*")), context); + + assertThat(creator.createQuery()).isEqualTo(query(where("firstName").regex(".*"))); + } + + @Test // DATAMONGO-2003 + void createsRegexQueryForPatternWithOptionsCorrectly() { + + Pattern pattern = Pattern.compile(".*", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE); + + PartTree tree = new PartTree("findByFirstNameRegex", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, pattern), context); + assertThat(creator.createQuery()).isEqualTo(query(where("firstName").regex(".*", "iu"))); + } + + @Test // DATAMONGO-2071 + void betweenShouldAllowSingleRageParameter() { + + PartTree tree = new PartTree("findByAgeBetween", Person.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, + getAccessor(converter, Range.of(Bound.exclusive(10), Bound.exclusive(11))), context); + + assertThat(creator.createQuery()).isEqualTo(query(where("age").gt(10).lt(11))); + } + + @Test // DATAMONGO-2394 + void nearShouldUseMetricDistanceForGeoJsonTypes() { + + GeoJsonPoint point = new GeoJsonPoint(27.987901, 86.9165379); + PartTree tree = new PartTree("findByLocationNear", User.class); + MongoQueryCreator creator = new MongoQueryCreator(tree, + getAccessor(converter, point, new Distance(1, Metrics.KILOMETERS)), context); + + assertThat(creator.createQuery()).isEqualTo(query(where("location").nearSphere(point).maxDistance(1000.0D))); } interface PersonRepository extends Repository { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java index 1c3241f4d3..74ff20b148 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryExecutionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,18 +15,20 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.mockito.ArgumentMatchers.any; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collections; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Distance; @@ -46,6 +48,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.query.MongoQueryExecution.DeleteExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagedExecution; import org.springframework.data.mongodb.repository.query.MongoQueryExecution.PagingGeoNearExecution; import org.springframework.data.projection.ProjectionFactory; @@ -53,17 +56,23 @@ import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.RepositoryMetadata; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.util.ReflectionUtils; +import com.mongodb.client.result.DeleteResult; + /** * Unit tests for {@link MongoQueryExecution}. * * @author Mark Paluch * @author Oliver Gierke + * @author Artyom Gabeev + * @author Christoph Strobl * @soundtrack U Can't Touch This - MC Hammer */ -@RunWith(MockitoJUnitRunner.class) -public class MongoQueryExecutionUnitTests { +@ExtendWith(MockitoExtension.class) +class MongoQueryExecutionUnitTests { @Mock MongoOperations mongoOperationsMock; @Mock ExecutableFind findOperationMock; @@ -72,27 +81,28 @@ public class MongoQueryExecutionUnitTests { @Mock TerminatingFindNear terminatingGeoMock; @Mock DbRefResolver dbRefResolver; - Point POINT = new Point(10, 20); - Distance DISTANCE = new Distance(2.5, Metrics.KILOMETERS); - RepositoryMetadata metadata = new DefaultRepositoryMetadata(PersonRepository.class); - MongoMappingContext context = new MongoMappingContext(); - ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); - Method method = ReflectionUtils.findMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class, + private SpelExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); + private Point POINT = new Point(10, 20); + private Distance DISTANCE = new Distance(2.5, Metrics.KILOMETERS); + private RepositoryMetadata metadata = new DefaultRepositoryMetadata(PersonRepository.class); + private MongoMappingContext context = new MongoMappingContext(); + private ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + private Method method = ReflectionUtils.findMethod(PersonRepository.class, "findByLocationNear", Point.class, + Distance.class, Pageable.class); - MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); - MappingMongoConverter converter; + private MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + private MappingMongoConverter converter; - @Before + @BeforeEach @SuppressWarnings("unchecked") - public void setUp() { + void setUp() { converter = new MappingMongoConverter(dbRefResolver, context); - when(mongoOperationsMock.getConverter()).thenReturn(converter); - when(mongoOperationsMock.query(any(Class.class))).thenReturn(findOperationMock); + } @Test // DATAMONGO-1464 - public void pagedExecutionShouldNotGenerateCountQueryIfQueryReportedNoResults() { + void pagedExecutionShouldNotGenerateCountQueryIfQueryReportedNoResults() { doReturn(terminatingMock).when(operationMock).matching(any(Query.class)); doReturn(Collections.emptyList()).when(terminatingMock).all(); @@ -105,7 +115,7 @@ public void pagedExecutionShouldNotGenerateCountQueryIfQueryReportedNoResults() } @Test // DATAMONGO-1464 - public void pagedExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPageSize() { + void pagedExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPageSize() { doReturn(terminatingMock).when(operationMock).matching(any(Query.class)); doReturn(Arrays.asList(new Person(), new Person(), new Person(), new Person())).when(terminatingMock).all(); @@ -118,7 +128,7 @@ public void pagedExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPage } @Test // DATAMONGO-1464 - public void pagedExecutionRetrievesObjectsForPageableOutOfRange() { + void pagedExecutionRetrievesObjectsForPageableOutOfRange() { doReturn(terminatingMock).when(operationMock).matching(any(Query.class)); doReturn(Collections.emptyList()).when(terminatingMock).all(); @@ -131,16 +141,19 @@ public void pagedExecutionRetrievesObjectsForPageableOutOfRange() { } @Test // DATAMONGO-1464 - public void pagingGeoExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPageSize() { + void pagingGeoExecutionShouldUseCountFromResultWithOffsetAndResultsWithinPageSize() { GeoResult result = new GeoResult<>(new Person(), DISTANCE); + when(mongoOperationsMock.getConverter()).thenReturn(converter); + when(mongoOperationsMock.query(any(Class.class))).thenReturn(findOperationMock); when(findOperationMock.near(any(NearQuery.class))).thenReturn(terminatingGeoMock); doReturn(new GeoResults<>(Arrays.asList(result, result, result, result))).when(terminatingGeoMock).all(); ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(converter, new MongoParametersParameterAccessor(queryMethod, new Object[] { POINT, DISTANCE, PageRequest.of(0, 10) })); - PartTreeMongoQuery query = new PartTreeMongoQuery(queryMethod, mongoOperationsMock); + PartTreeMongoQuery query = new PartTreeMongoQuery(queryMethod, mongoOperationsMock, EXPRESSION_PARSER, + QueryMethodEvaluationContextProvider.DEFAULT); PagingGeoNearExecution execution = new PagingGeoNearExecution(findOperationMock, queryMethod, accessor, query); execution.execute(new Query()); @@ -149,8 +162,10 @@ public void pagingGeoExecutionShouldUseCountFromResultWithOffsetAndResultsWithin } @Test // DATAMONGO-1464 - public void pagingGeoExecutionRetrievesObjectsForPageableOutOfRange() { + void pagingGeoExecutionRetrievesObjectsForPageableOutOfRange() { + when(mongoOperationsMock.getConverter()).thenReturn(converter); + when(mongoOperationsMock.query(any(Class.class))).thenReturn(findOperationMock); when(findOperationMock.near(any(NearQuery.class))).thenReturn(terminatingGeoMock); doReturn(new GeoResults<>(Collections.emptyList())).when(terminatingGeoMock).all(); doReturn(terminatingMock).when(findOperationMock).matching(any(Query.class)); @@ -158,7 +173,8 @@ public void pagingGeoExecutionRetrievesObjectsForPageableOutOfRange() { ConvertingParameterAccessor accessor = new ConvertingParameterAccessor(converter, new MongoParametersParameterAccessor(queryMethod, new Object[] { POINT, DISTANCE, PageRequest.of(2, 10) })); - PartTreeMongoQuery query = new PartTreeMongoQuery(queryMethod, mongoOperationsMock); + PartTreeMongoQuery query = new PartTreeMongoQuery(queryMethod, mongoOperationsMock, EXPRESSION_PARSER, + QueryMethodEvaluationContextProvider.DEFAULT); PagingGeoNearExecution execution = new PagingGeoNearExecution(findOperationMock, queryMethod, accessor, query); execution.execute(new Query()); @@ -167,8 +183,49 @@ public void pagingGeoExecutionRetrievesObjectsForPageableOutOfRange() { verify(terminatingMock).count(); } + @Test // DATAMONGO-2351 + void acknowledgedDeleteReturnsDeletedCount() { + + Method method = ReflectionUtils.findMethod(PersonRepository.class, "deleteAllByLastname", String.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + when(mongoOperationsMock.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(DeleteResult.acknowledged(10)); + + assertThat(new DeleteExecution(mongoOperationsMock, queryMethod).execute(new Query())).isEqualTo(10L); + } + + @Test // DATAMONGO-2351 + void unacknowledgedDeleteReturnsZeroDeletedCount() { + + Method method = ReflectionUtils.findMethod(PersonRepository.class, "deleteAllByLastname", String.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + when(mongoOperationsMock.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(DeleteResult.unacknowledged()); + + assertThat(new DeleteExecution(mongoOperationsMock, queryMethod).execute(new Query())).isEqualTo(0L); + } + + @Test // DATAMONGO-1997 + void deleteExecutionWithEntityReturnTypeTriggersFindAndRemove() { + + Method method = ReflectionUtils.findMethod(PersonRepository.class, "deleteByLastname", String.class); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, metadata, factory, context); + + Person person = new Person(); + + when(mongoOperationsMock.findAndRemove(any(Query.class), any(Class.class), anyString())).thenReturn(person); + + assertThat(new DeleteExecution(mongoOperationsMock, queryMethod).execute(new Query())).isEqualTo(person); + } + interface PersonRepository extends Repository { GeoPage findByLocationNear(Point point, Distance distance, Pageable pageable); + + Long deleteAllByLastname(String lastname); + + Person deleteByLastname(String lastname); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java index 4ea7793dfe..8f9824e14d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,14 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.lang.reflect.Method; import java.util.Collection; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoPage; @@ -31,11 +30,18 @@ import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.User; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.annotation.Collation; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Aggregation; import org.springframework.data.mongodb.repository.Contact; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.mongodb.repository.ReadPreference; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; @@ -47,12 +53,13 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Jorge Rodríguez */ public class MongoQueryMethodUnitTests { MongoMappingContext context; - @Before + @BeforeEach public void setUp() { context = new MongoMappingContext(); } @@ -63,8 +70,8 @@ public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exce MongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method"); MongoEntityMetadata metadata = queryMethod.getEntityInformation(); - assertThat(metadata.getJavaType(), is(typeCompatibleWith(Address.class))); - assertThat(metadata.getCollectionName(), is("contact")); + assertThat(metadata.getJavaType()).isAssignableFrom(Address.class); + assertThat(metadata.getCollectionName()).isEqualTo("contact"); } @Test @@ -73,8 +80,8 @@ public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Excep MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method"); MongoEntityMetadata entityInformation = queryMethod.getEntityInformation(); - assertThat(entityInformation.getJavaType(), is(typeCompatibleWith(Person.class))); - assertThat(entityInformation.getCollectionName(), is("person")); + assertThat(entityInformation.getJavaType()).isAssignableFrom(Person.class); + assertThat(entityInformation.getCollectionName()).isEqualTo("person"); } @Test @@ -82,34 +89,35 @@ public void discoversUserAsDomainTypeForGeoPageQueryMethod() throws Exception { MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class, Pageable.class); - assertThat(queryMethod.isGeoNearQuery(), is(true)); - assertThat(queryMethod.isPageQuery(), is(true)); + assertThat(queryMethod.isGeoNearQuery()).isTrue(); + assertThat(queryMethod.isPageQuery()).isTrue(); queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class); - assertThat(queryMethod.isGeoNearQuery(), is(true)); - assertThat(queryMethod.isPageQuery(), is(false)); - assertThat(queryMethod.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); - - assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery(), - is(true)); - assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery(), - is(true)); - assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery(), - is(true)); + assertThat(queryMethod.isGeoNearQuery()).isTrue(); + assertThat(queryMethod.isPageQuery()).isFalse(); + assertThat(queryMethod.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + + assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery()) + .isTrue(); } - @Test(expected = IllegalArgumentException.class) - public void rejectsGeoPageQueryWithoutPageable() throws Exception { - queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class); + @Test + public void rejectsGeoPageQueryWithoutPageable() { + assertThatIllegalArgumentException() + .isThrownBy(() -> queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class)); } - @Test(expected = IllegalArgumentException.class) + @Test public void rejectsNullMappingContext() throws Exception { Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class); - new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), - new SpelAwareProxyProjectionFactory(), null); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new MongoQueryMethod(method, + new DefaultRepositoryMetadata(PersonRepository.class), new SpelAwareProxyProjectionFactory(), null)); } @Test @@ -118,8 +126,8 @@ public void considersMethodReturningGeoPageAsPagingMethod() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class, Pageable.class); - assertThat(method.isPageQuery(), is(true)); - assertThat(method.isCollectionQuery(), is(false)); + assertThat(method.isPageQuery()).isTrue(); + assertThat(method.isCollectionQuery()).isFalse(); } @Test @@ -133,8 +141,8 @@ public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().hasValues(), is(false)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().hasValues()).isFalse(); } @Test // DATAMONGO-957 @@ -142,44 +150,44 @@ public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Except MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec()).isEqualTo(100L); } - @Test // DATAMONGO-1403 - public void createsMongoQueryMethodWithSpellFixedMaxExecutionTimeCorrectly() throws Exception { + @Test // DATAMONGO-1311 + public void createsMongoQueryMethodWithBatchSizeCorrectly() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSpellFixedMaxExecutionTime"); + MongoQueryMethod method = queryMethod(PersonRepository.class, "batchSize"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getCursorBatchSize()).isEqualTo(100); } - @Test // DATAMONGO-957 - public void createsMongoQueryMethodWithMaxScanCorrectly() throws Exception { + @Test // DATAMONGO-1311 + public void createsMongoQueryMethodWithNegativeBatchSizeCorrectly() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxScan"); + MongoQueryMethod method = queryMethod(PersonRepository.class, "negativeBatchSize"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxScan(), is(10L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getCursorBatchSize()).isEqualTo(-200); } - @Test // DATAMONGO-957 - public void createsMongoQueryMethodWithCommentCorrectly() throws Exception { + @Test // DATAMONGO-1403 + public void createsMongoQueryMethodWithSpellFixedMaxExecutionTimeCorrectly() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithComment"); + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSpellFixedMaxExecutionTime"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getComment(), is("foo bar")); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec()).isEqualTo(100L); } @Test // DATAMONGO-957 - public void createsMongoQueryMethodWithSnapshotCorrectly() throws Exception { + public void createsMongoQueryMethodWithCommentCorrectly() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithSnapshotUsage"); + MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithComment"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getSnapshot(), is(true)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getComment()).isEqualTo("foo bar"); } @Test // DATAMONGO-1480 @@ -187,19 +195,20 @@ public void createsMongoQueryMethodWithNoCursorTimeoutCorrectly() throws Excepti MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithNoCursorTimeout"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getFlags(), - containsInAnyOrder(org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getFlags()) + .contains(org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT); } - @Test // DATAMONGO-1480 + @Test // DATAMONGO-1480, DATAMONGO-2572 public void createsMongoQueryMethodWithMultipleFlagsCorrectly() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMultipleFlags"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getFlags(), - containsInAnyOrder(org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT, org.springframework.data.mongodb.core.query.Meta.CursorOption.SLAVE_OK)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getFlags()).contains( + org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT, + org.springframework.data.mongodb.core.query.Meta.CursorOption.SECONDARY_READS); } @Test // DATAMONGO-1266 @@ -207,7 +216,155 @@ public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() th MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); - assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); + assertThat(method.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + } + + @Test // DATAMONGO-2153 + public void findsAnnotatedAggregation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregation"); + + assertThat(method.hasAnnotatedAggregation()).isTrue(); + assertThat(method.getAnnotatedAggregation()).hasSize(1); + } + + @Test // DATAMONGO-2153 + public void detectsCollationForAggregation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregationWithCollation"); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + @Test // GH-2107 + void detectsModifyingQueryByUpdateType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findAndUpdateBy", String.class, Update.class); + + assertThat(method.isModifyingQuery()).isTrue(); + } + + @Test // GH-2107 + void detectsModifyingQueryByUpdateDefinitionType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findAndUpdateBy", String.class, + UpdateDefinition.class); + + assertThat(method.isModifyingQuery()).isTrue(); + } + + @Test // GH-2107 + void detectsModifyingQueryByAggregationUpdateDefinitionType() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findAndUpdateBy", String.class, + AggregationUpdate.class); + + assertThat(method.isModifyingQuery()).isTrue(); + } + + @Test // GH-2107 + void queryCreationFailsOnInvalidUpdate() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndUpdateByLastname", String.class).verify()) // + .withMessageContaining("Update") // + .withMessageContaining("findAndUpdateByLastname"); + } + + @Test // GH-2107 + void queryCreationForUpdateMethodFailsOnInvalidReturnType() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndIncrementVisitsByFirstname", String.class) + .verify()) // + .withMessageContaining("Update") // + .withMessageContaining("numeric") // + .withMessageContaining("findAndIncrementVisitsByFirstname"); + } + + @Test // GH-3002 + void readsCollationFromAtCollationAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithCollationFromAtCollationByFirstname", + String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void readsCollationFromAtQueryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithCollationFromAtQueryByFirstname", + String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void annotatedCollationClashSelectsAtCollationAnnotationValue() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, + "findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + @Test // GH-2971 + void readsReadPreferenceAtQueryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceFromAtQueryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtQueryByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void annotatedReadPreferenceClashSelectsAtReadPreferenceAnnotationValue() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceAtRepositoryAnnotation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("primaryPreferred"); + } + + @Test // GH-2971 + void detectsReadPreferenceForAggregation() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregationWithReadPreference"); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-4546 + void errorsOnInvalidAggregation() { + + assertThatIllegalStateException() // + .isThrownBy(() -> queryMethod(InvalidAggregationMethodRepo.class, "findByAggregation").verify()) // + .withMessageContaining("Invalid aggregation") // + .withMessageContaining("findByAggregation"); } private MongoQueryMethod queryMethod(Class repository, String name, Class... parameters) throws Exception { @@ -217,6 +374,7 @@ private MongoQueryMethod queryMethod(Class repository, String name, Class. return new MongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context); } + @ReadPreference(value = "primaryPreferred") interface PersonRepository extends Repository { // Misses Pageable @@ -233,29 +391,66 @@ interface PersonRepository extends Repository { @Meta List emptyMetaAnnotation(); + @Meta(cursorBatchSize = 100) + List batchSize(); + + @Meta(cursorBatchSize = -200) + List negativeBatchSize(); + @Meta(maxExecutionTimeMs = 100) List metaWithMaxExecutionTime(); @Meta(maxExecutionTimeMs = 100) List metaWithSpellFixedMaxExecutionTime(); - @Meta(maxScanDocuments = 10) - List metaWithMaxScan(); - @Meta(comment = "foo bar") List metaWithComment(); - @Meta(snapshot = true) - List metaWithSnapshotUsage(); - @Meta(flags = { org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT }) List metaWithNoCursorTimeout(); - @Meta(flags = { org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT, org.springframework.data.mongodb.core.query.Meta.CursorOption.SLAVE_OK }) + @Meta(flags = { org.springframework.data.mongodb.core.query.Meta.CursorOption.NO_TIMEOUT, + org.springframework.data.mongodb.core.query.Meta.CursorOption.SECONDARY_READS }) List metaWithMultipleFlags(); // DATAMONGO-1266 void deleteByUserName(String userName); + + @Aggregation("{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }") + List findByAggregation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", + collation = "de_AT") + List findByAggregationWithCollation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", readPreference = "secondaryPreferred") + List findByAggregationWithReadPreference(); + + void findAndUpdateBy(String firstname, Update update); + + void findAndUpdateBy(String firstname, UpdateDefinition update); + + void findAndUpdateBy(String firstname, AggregationUpdate update); + + @Collation("en_US") + List findWithCollationFromAtCollationByFirstname(String firstname); + + @Query(collation = "en_US") + List findWithCollationFromAtQueryByFirstname(String firstname); + + @Collation("de_AT") + @Query(collation = "en_US") + List findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + List findWithReadPreferenceFromAtReadPreferenceByFirstname(String firstname); + + @Query(readPreference = "secondaryPreferred") + List findWithReadPreferenceFromAtQueryByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + @Query(readPreference = "primaryPreferred") + List findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname(String firstname); } interface SampleRepository extends Repository { @@ -270,6 +465,21 @@ interface SampleRepository2 extends Repository { Customer methodReturningAnInterface(); } + interface InvalidUpdateMethodRepo extends Repository { + + @org.springframework.data.mongodb.repository.Update + void findAndUpdateByLastname(String lastname); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : 1 } }") + Person findAndIncrementVisitsByFirstname(String firstname); + } + + interface InvalidAggregationMethodRepo extends Repository { + + @Aggregation("[{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }]") + List findByAggregation(); + } + interface Customer { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java index ec95270969..e0b9b77099 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,28 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.core.query.IsTextQuery.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.lang.reflect.Method; import java.util.List; import org.bson.Document; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; +import org.bson.json.JsonParseException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.beans.factory.annotation.Value; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.Person; @@ -50,8 +45,8 @@ import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; - -import com.mongodb.util.JSONParseException; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; /** * Unit tests for {@link PartTreeMongoQuery}. @@ -61,138 +56,131 @@ * @author Thomas Darimont * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class PartTreeMongoQueryUnitTests { +@ExtendWith(MockitoExtension.class) +class PartTreeMongoQueryUnitTests { @Mock MongoOperations mongoOperationsMock; @Mock ExecutableFind findOperationMock; - MongoMappingContext mappingContext; - - public @Rule ExpectedException exception = ExpectedException.none(); + private MongoMappingContext mappingContext; - @Before - public void setUp() { + @BeforeEach + void setUp() { mappingContext = new MongoMappingContext(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDbFactory.class)); - MongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); doReturn(converter).when(mongoOperationsMock).getConverter(); doReturn(findOperationMock).when(mongoOperationsMock).query(any()); } @Test // DATAMOGO-952 - public void rejectsInvalidFieldSpecification() { + void rejectsInvalidFieldSpecification() { - exception.expect(IllegalStateException.class); - exception.expectMessage("findByLastname"); - - deriveQueryFromMethod("findByLastname", "foo"); + assertThatIllegalStateException().isThrownBy(() -> deriveQueryFromMethod("findByLastname", "foo")) + .withMessageContaining("findByLastname"); } @Test // DATAMOGO-952 - public void singleFieldJsonIncludeRestrictionShouldBeConsidered() { + void singleFieldJsonIncludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstname", "foo"); - assertThat(query.getFieldsObject(), is(new Document().append("firstname", 1))); + assertThat(query.getFieldsObject()).isEqualTo(new Document().append("firstname", 1)); } @Test // DATAMOGO-952 - public void multiFieldJsonIncludeRestrictionShouldBeConsidered() { + void multiFieldJsonIncludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstnameAndLastname", "foo", "bar"); - assertThat(query.getFieldsObject(), is(new Document().append("firstname", 1).append("lastname", 1))); + assertThat(query.getFieldsObject()).isEqualTo(new Document().append("firstname", 1).append("lastname", 1)); } @Test // DATAMOGO-952 - public void multiFieldJsonExcludeRestrictionShouldBeConsidered() { + void multiFieldJsonExcludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstnameAndLastname", "foo", "bar"); - assertThat(query.getFieldsObject(), is(new Document().append("firstname", 0).append("lastname", 0))); + assertThat(query.getFieldsObject()).isEqualTo(new Document().append("firstname", 0).append("lastname", 0)); } @Test // DATAMOGO-973 - public void shouldAddFullTextParamCorrectlyToDerivedQuery() { + void shouldAddFullTextParamCorrectlyToDerivedQuery() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstname", "text", TextCriteria.forDefaultLanguage().matching("search")); - assertThat(query, isTextQuery().searchingFor("search").where(new Criteria("firstname").is("text"))); + assertThat(query.getQueryObject()).containsEntry("$text.$search", "search").containsEntry("firstname", "text"); } @Test // DATAMONGO-1180 - public void propagatesRootExceptionForInvalidQuery() { - - exception.expect(IllegalStateException.class); - exception.expectCause(is(instanceOf(JSONParseException.class))); + void propagatesRootExceptionForInvalidQuery() { - deriveQueryFromMethod("findByAge", 1); + assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> deriveQueryFromMethod("findByAge", 1)) + .withCauseInstanceOf(JsonParseException.class); } @Test // DATAMONGO-1345, DATAMONGO-1735 - public void doesNotDeriveFieldSpecForNormalDomainType() { - assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject(), is(new Document())); + void doesNotDeriveFieldSpecForNormalDomainType() { + assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEmpty(); } @Test // DATAMONGO-1345 - public void restrictsQueryToFieldsRequiredForProjection() { + void restrictsQueryToFieldsRequiredForProjection() { Document fieldsObject = deriveQueryFromMethod("findPersonProjectedBy", new Object[0]).getFieldsObject(); - assertThat(fieldsObject.get("firstname"), is(1)); - assertThat(fieldsObject.get("lastname"), is(1)); + assertThat(fieldsObject.get("firstname")).isEqualTo(1); + assertThat(fieldsObject.get("lastname")).isEqualTo(1); } @Test // DATAMONGO-1345 - public void restrictsQueryToFieldsRequiredForDto() { + void restrictsQueryToFieldsRequiredForDto() { Document fieldsObject = deriveQueryFromMethod("findPersonDtoByAge", new Object[] { 42 }).getFieldsObject(); - assertThat(fieldsObject.get("firstname"), is(1)); - assertThat(fieldsObject.get("lastname"), is(1)); + assertThat(fieldsObject.get("firstname")).isEqualTo(1); + assertThat(fieldsObject.get("lastname")).isEqualTo(1); } @Test // DATAMONGO-1345 - public void usesDynamicProjection() { + void usesDynamicProjection() { Document fields = deriveQueryFromMethod("findDynamicallyProjectedBy", ExtendedProjection.class).getFieldsObject(); - assertThat(fields.get("firstname"), is(1)); - assertThat(fields.get("lastname"), is(1)); - assertThat(fields.get("age"), is(1)); + assertThat(fields.get("firstname")).isEqualTo(1); + assertThat(fields.get("lastname")).isEqualTo(1); + assertThat(fields.get("age")).isEqualTo(1); } @Test // DATAMONGO-1500 - public void shouldLeaveParameterConversionToQueryMapper() { + void shouldLeaveParameterConversionToQueryMapper() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findBySex", Sex.FEMALE); - assertThat(query.getQueryObject().get("sex"), is(Sex.FEMALE)); - assertThat(query.getFieldsObject().get("firstname"), is(1)); + assertThat(query.getQueryObject().get("sex")).isEqualTo(Sex.FEMALE); + assertThat(query.getFieldsObject().get("firstname")).isEqualTo(1); } @Test // DATAMONGO-1729, DATAMONGO-1735 - public void doesNotCreateFieldsObjectForOpenProjection() { + void doesNotCreateFieldsObjectForOpenProjection() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy"); - assertThat(query.getFieldsObject(), is(new Document())); + assertThat(query.getFieldsObject()).isEmpty(); } @Test // DATAMONGO-1865 - public void limitingReturnsTrueIfTreeIsLimiting() { - assertThat(createQueryForMethod("findFirstBy").isLimiting(), is(true)); + void limitingReturnsTrueIfTreeIsLimiting() { + assertThat(createQueryForMethod("findFirstBy").isLimiting()).isTrue(); } @Test // DATAMONGO-1865 - public void limitingReturnsFalseIfTreeIsNotLimiting() { - assertThat(createQueryForMethod("findPersonBy").isLimiting(), is(false)); + void limitingReturnsFalseIfTreeIsNotLimiting() { + assertThat(createQueryForMethod("findPersonBy").isLimiting()).isFalse(); } private org.springframework.data.mongodb.core.query.Query deriveQueryFromMethod(String method, Object... args) { @@ -218,7 +206,8 @@ private PartTreeMongoQuery createQueryForMethod(String methodName, Class... p MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(Repo.class), factory, mappingContext); - return new PartTreeMongoQuery(queryMethod, mongoOperationsMock); + return new PartTreeMongoQuery(queryMethod, mongoOperationsMock, new SpelExpressionParser(), + QueryMethodEvaluationContextProvider.DEFAULT); } catch (Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java index 030243f04d..21d5dc71fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryExecutionUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,23 +15,28 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; import java.lang.reflect.Method; import java.util.Arrays; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.reactivestreams.Publisher; + import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Point; @@ -39,20 +44,25 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.DeleteExecution; import org.springframework.data.mongodb.repository.query.ReactiveMongoQueryExecution.GeoNearExecution; import org.springframework.data.util.ClassTypeInformation; import org.springframework.util.ClassUtils; +import com.mongodb.client.result.DeleteResult; + /** * Unit tests for {@link ReactiveMongoQueryExecution}. * * @author Mark Paluch + * @author Artyom Gabeev */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class ReactiveMongoQueryExecutionUnitTests { @Mock private ReactiveMongoOperations operations; @Mock private MongoParameterAccessor parameterAccessor; + @Mock private MongoQueryMethod method; @Test // DATAMONGO-1444 public void geoNearExecutionShouldApplyQuerySettings() throws Exception { @@ -60,7 +70,8 @@ public void geoNearExecutionShouldApplyQuerySettings() throws Exception { Method geoNear = ClassUtils.getMethod(GeoRepo.class, "geoNear"); Query query = new Query(); when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2)); - when(parameterAccessor.getDistanceRange()).thenReturn(new Range<>(new Distance(10), new Distance(15))); + when(parameterAccessor.getDistanceRange()) + .thenReturn(Range.from(Bound.inclusive(new Distance(10))).to(Bound.inclusive(new Distance(15)))); when(parameterAccessor.getPageable()).thenReturn(PageRequest.of(1, 10)); new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query, @@ -70,10 +81,10 @@ public void geoNearExecutionShouldApplyQuerySettings() throws Exception { verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person")); NearQuery nearQuery = queryArgumentCaptor.getValue(); - assertThat(nearQuery.toDocument().get("near"), is(equalTo(Arrays.asList(1d, 2d)))); - assertThat(nearQuery.getSkip(), is(10L)); - assertThat(nearQuery.getMinDistance(), is(equalTo(new Distance(10)))); - assertThat(nearQuery.getMaxDistance(), is(equalTo(new Distance(15)))); + assertThat(nearQuery.toDocument().get("near")).isEqualTo(Arrays.asList(1d, 2d)); + assertThat(nearQuery.getSkip()).isEqualTo(10L); + assertThat(nearQuery.getMinDistance()).isEqualTo(new Distance(10)); + assertThat(nearQuery.getMaxDistance()).isEqualTo(new Distance(15)); } @Test // DATAMONGO-1444 @@ -83,7 +94,7 @@ public void geoNearExecutionShouldApplyMinimalSettings() throws Exception { Query query = new Query(); when(parameterAccessor.getPageable()).thenReturn(Pageable.unpaged()); when(parameterAccessor.getGeoNearLocation()).thenReturn(new Point(1, 2)); - when(parameterAccessor.getDistanceRange()).thenReturn(new Range<>(null, null)); + when(parameterAccessor.getDistanceRange()).thenReturn(Range.unbounded()); new GeoNearExecution(operations, parameterAccessor, ClassTypeInformation.fromReturnTypeOf(geoNear)).execute(query, Person.class, "person"); @@ -92,10 +103,34 @@ public void geoNearExecutionShouldApplyMinimalSettings() throws Exception { verify(operations).geoNear(queryArgumentCaptor.capture(), eq(Person.class), eq("person")); NearQuery nearQuery = queryArgumentCaptor.getValue(); - assertThat(nearQuery.toDocument().get("near"), is(equalTo(Arrays.asList(1d, 2d)))); - assertThat(nearQuery.getSkip(), is(0L)); - assertThat(nearQuery.getMinDistance(), is(nullValue())); - assertThat(nearQuery.getMaxDistance(), is(nullValue())); + assertThat(nearQuery.toDocument().get("near")).isEqualTo(Arrays.asList(1d, 2d)); + assertThat(nearQuery.getSkip()).isEqualTo(0L); + assertThat(nearQuery.getMinDistance()).isNull(); + assertThat(nearQuery.getMaxDistance()).isNull(); + } + + @Test // DATAMONGO-2351 + public void acknowledgedDeleteReturnsDeletedCount() { + + when(operations.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(Mono.just(DeleteResult.acknowledged(10))); + + Mono.from((Publisher) new DeleteExecution(operations, method).execute(new Query(), Class.class, "")) // + .as(StepVerifier::create) // + .expectNext(10L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2351 + public void unacknowledgedDeleteReturnsZeroDeletedCount() { + + when(operations.remove(any(Query.class), any(Class.class), anyString())) + .thenReturn(Mono.just(DeleteResult.unacknowledged())); + + Mono.from((Publisher) new DeleteExecution(operations, method).execute(new Query(), Class.class, "")) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); } interface GeoRepo { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java index 6b4846bb30..82cd0a157c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,18 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; + +import org.springframework.data.mongodb.repository.query.MongoQueryMethodUnitTests.PersonRepository; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; import java.lang.reflect.Method; import java.util.List; -import org.junit.Before; -import org.junit.Test; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -31,29 +35,32 @@ import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.User; +import org.springframework.data.mongodb.core.annotation.Collation; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Aggregation; import org.springframework.data.mongodb.repository.Contact; import org.springframework.data.mongodb.repository.Meta; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.Query; +import org.springframework.data.mongodb.repository.ReadPreference; import org.springframework.data.projection.ProjectionFactory; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; -import reactor.core.publisher.Flux; -import reactor.core.publisher.Mono; - /** * Unit test for {@link ReactiveMongoQueryMethod}. * * @author Mark Paluch + * @author Christoph Strobl + * @author Jorge Rodríguez */ public class ReactiveMongoQueryMethodUnitTests { MongoMappingContext context; - @Before + @BeforeEach public void setUp() { context = new MongoMappingContext(); } @@ -64,8 +71,8 @@ public void detectsCollectionFromRepoTypeIfReturnTypeNotAssignable() throws Exce ReactiveMongoQueryMethod queryMethod = queryMethod(SampleRepository.class, "method"); MongoEntityMetadata metadata = queryMethod.getEntityInformation(); - assertThat(metadata.getJavaType(), is(typeCompatibleWith(Address.class))); - assertThat(metadata.getCollectionName(), is("contact")); + assertThat(metadata.getJavaType()).isAssignableFrom(Address.class); + assertThat(metadata.getCollectionName()).isEqualTo("contact"); } @Test // DATAMONGO-1444 @@ -74,8 +81,8 @@ public void detectsCollectionFromReturnTypeIfReturnTypeAssignable() throws Excep MongoQueryMethod queryMethod = queryMethod(SampleRepository2.class, "method"); MongoEntityMetadata entityInformation = queryMethod.getEntityInformation(); - assertThat(entityInformation.getJavaType(), is(typeCompatibleWith(Person.class))); - assertThat(entityInformation.getCollectionName(), is("person")); + assertThat(entityInformation.getJavaType()).isAssignableFrom(Person.class); + assertThat(entityInformation.getCollectionName()).isEqualTo("person"); } @Test // DATAMONGO-1444 @@ -83,34 +90,35 @@ public void discoversUserAsDomainTypeForGeoPagingQueryMethod() throws Exception MongoQueryMethod queryMethod = queryMethod(PersonRepository.class, "findByLocationNear", Point.class, Distance.class, Pageable.class); - assertThat(queryMethod.isGeoNearQuery(), is(false)); - assertThat(queryMethod.isPageQuery(), is(false)); + assertThat(queryMethod.isGeoNearQuery()).isFalse(); + assertThat(queryMethod.isPageQuery()).isFalse(); queryMethod = queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class); - assertThat(queryMethod.isGeoNearQuery(), is(false)); - assertThat(queryMethod.isPageQuery(), is(false)); - assertThat(queryMethod.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); - - assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery(), - is(true)); - assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery(), - is(false)); - assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery(), - is(true)); + assertThat(queryMethod.isGeoNearQuery()).isFalse(); + assertThat(queryMethod.isPageQuery()).isFalse(); + assertThat(queryMethod.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + + assertThat(queryMethod(PersonRepository.class, "findByEmailAddress", String.class, Point.class).isGeoNearQuery()) + .isTrue(); + assertThat(queryMethod(PersonRepository.class, "findByFirstname", String.class, Point.class).isGeoNearQuery()) + .isFalse(); + assertThat(queryMethod(PersonRepository.class, "findByLastname", String.class, Point.class).isGeoNearQuery()) + .isTrue(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1444 + @Test // DATAMONGO-1444 public void rejectsNullMappingContext() throws Exception { Method method = PersonRepository.class.getMethod("findByFirstname", String.class, Point.class); - new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), - new SpelAwareProxyProjectionFactory(), null); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> new MongoQueryMethod(method, new DefaultRepositoryMetadata(PersonRepository.class), + new SpelAwareProxyProjectionFactory(), null)); } - @Test(expected = IllegalStateException.class) // DATAMONGO-1444 - public void rejectsMonoPageableResult() throws Exception { - queryMethod(PersonRepository.class, "findMonoByLastname", String.class, Pageable.class); + @Test // DATAMONGO-1444 + public void rejectsMonoPageableResult() { + assertThatIllegalStateException() + .isThrownBy(() -> queryMethod(PersonRepository.class, "findMonoByLastname", String.class, Pageable.class).verify()); } @Test // DATAMONGO-1444 @@ -123,8 +131,8 @@ public void createsMongoQueryMethodWithEmptyMetaCorrectly() throws Exception { MongoQueryMethod method = queryMethod(PersonRepository.class, "emptyMetaAnnotation"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().hasValues(), is(false)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().hasValues()).isFalse(); } @Test // DATAMONGO-1444 @@ -132,26 +140,138 @@ public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Except MongoQueryMethod method = queryMethod(PersonRepository.class, "metaWithMaxExecutionTime"); - assertThat(method.hasQueryMetaAttributes(), is(true)); - assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); + assertThat(method.hasQueryMetaAttributes()).isTrue(); + assertThat(method.getQueryMetaAttributes().getMaxTimeMsec()).isEqualTo(100L); } - @Test(expected = InvalidDataAccessApiUsageException.class) // DATAMONGO-1444 - public void throwsExceptionOnWrappedPage() throws Exception { - queryMethod(PersonRepository.class, "findMonoPageByLastname", String.class, Pageable.class); + @Test // DATAMONGO-1444 + public void throwsExceptionOnWrappedPage() { + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> queryMethod(PersonRepository.class, "findMonoPageByLastname", String.class, Pageable.class).verify()); } - @Test(expected = InvalidDataAccessApiUsageException.class) // DATAMONGO-1444 - public void throwsExceptionOnWrappedSlice() throws Exception { - queryMethod(PersonRepository.class, "findMonoSliceByLastname", String.class, Pageable.class); + @Test // DATAMONGO-1444 + public void throwsExceptionOnWrappedSlice() { + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> queryMethod(PersonRepository.class, "findMonoSliceByLastname", String.class, Pageable.class).verify()); } @Test // DATAMONGO-1444 public void fallsBackToRepositoryDomainTypeIfMethodDoesNotReturnADomainType() throws Exception { - MongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.getEntityInformation().getJavaType()).isAssignableFrom(User.class); + } + + @Test // DATAMONGO-2153 + public void findsAnnotatedAggregation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregation"); + + Assertions.assertThat(method.hasAnnotatedAggregation()).isTrue(); + Assertions.assertThat(method.getAnnotatedAggregation()).hasSize(1); + } + + @Test // DATAMONGO-2153 + public void detectsCollationForAggregation() throws Exception { - assertThat(method.getEntityInformation().getJavaType(), is(typeCompatibleWith(User.class))); + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findByAggregationWithCollation"); + + Assertions.assertThat(method.hasAnnotatedCollation()).isTrue(); + Assertions.assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + @Test // GH-2107 + public void queryCreationFailsOnInvalidUpdate() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndUpdateByLastname", String.class).verify()) // + .withMessageContaining("Update") // + .withMessageContaining("findAndUpdateByLastname"); + } + + @Test // GH-2107 + public void queryCreationForUpdateMethodFailsOnInvalidReturnType() throws Exception { + + assertThatExceptionOfType(IllegalStateException.class) // + .isThrownBy(() -> queryMethod(InvalidUpdateMethodRepo.class, "findAndIncrementVisitsByFirstname", String.class).verify()) // + .withMessageContaining("Update") // + .withMessageContaining("numeric") // + .withMessageContaining("findAndIncrementVisitsByFirstname"); + } + + @Test // GH-3002 + void readsCollationFromAtCollationAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(MongoQueryMethodUnitTests.PersonRepository.class, "findWithCollationFromAtCollationByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void readsCollationFromAtQueryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(MongoQueryMethodUnitTests.PersonRepository.class, "findWithCollationFromAtQueryByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("en_US"); + } + + @Test // GH-3002 + void annotatedCollationClashSelectsAtCollationAnnotationValue() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname", String.class); + + assertThat(method.hasAnnotatedCollation()).isTrue(); + assertThat(method.getAnnotatedCollation()).isEqualTo("de_AT"); + } + + + @Test // GH-2971 + void readsReadPreferenceAtQueryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceFromAtQueryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithReadPreferenceFromAtQueryByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void annotatedReadPreferenceClashSelectsAtReadPreferenceAnnotationValue() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); + } + + @Test // GH-2971 + void readsReadPreferenceAtRepositoryAnnotation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(PersonRepository.class, "deleteByUserName", String.class); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("primaryPreferred"); + } + + @Test // GH-2971 + void detectsReadPreferenceForAggregation() throws Exception { + + ReactiveMongoQueryMethod method = queryMethod(MongoQueryMethodUnitTests.PersonRepository.class, "findByAggregationWithReadPreference"); + + assertThat(method.hasAnnotatedReadPreference()).isTrue(); + assertThat(method.getAnnotatedReadPreference()).isEqualTo("secondaryPreferred"); } private ReactiveMongoQueryMethod queryMethod(Class repository, String name, Class... parameters) @@ -162,6 +282,7 @@ private ReactiveMongoQueryMethod queryMethod(Class repository, String name, C return new ReactiveMongoQueryMethod(method, new DefaultRepositoryMetadata(repository), factory, context); } + @ReadPreference(value = "primaryPreferred") interface PersonRepository extends Repository { Mono findMonoByLastname(String lastname, Pageable pageRequest); @@ -188,6 +309,36 @@ interface PersonRepository extends Repository { Flux metaWithMaxExecutionTime(); void deleteByUserName(String userName); + + @Aggregation("{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }") + Flux findByAggregation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", + collation = "de_AT") + Flux findByAggregationWithCollation(); + + @Aggregation(pipeline = "{'$group': { _id: '$templateId', maxVersion : { $max : '$version'} } }", readPreference = "secondaryPreferred") + Flux findByAggregationWithReadPreference(); + + @Collation("en_US") + List findWithCollationFromAtCollationByFirstname(String firstname); + + @Query(collation = "en_US") + List findWithCollationFromAtQueryByFirstname(String firstname); + + @Collation("de_AT") + @Query(collation = "en_US") + List findWithMultipleCollationsFromAtQueryAndAtCollationByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + Flux findWithReadPreferenceFromAtReadPreferenceByFirstname(String firstname); + + @Query(readPreference = "secondaryPreferred") + Flux findWithReadPreferenceFromAtQueryByFirstname(String firstname); + + @ReadPreference("secondaryPreferred") + @Query(readPreference = "primaryPreferred") + Flux findWithMultipleReadPreferencesFromAtQueryAndAtReadPreferenceByFirstname(String firstname); } interface SampleRepository extends Repository { @@ -202,5 +353,14 @@ interface SampleRepository2 extends Repository { Customer methodReturningAnInterface(); } + interface InvalidUpdateMethodRepo extends Repository { + + @org.springframework.data.mongodb.repository.Update + Mono findAndUpdateByLastname(String lastname); + + @org.springframework.data.mongodb.repository.Update("{ '$inc' : { 'visits' : 1 } }") + Mono findAndIncrementVisitsByFirstname(String firstname); + } + interface Customer {} } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregationUnitTests.java new file mode 100644 index 0000000000..c6047ce30d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedAggregationUnitTests.java @@ -0,0 +1,343 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.reactivestreams.Publisher; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Hint; +import org.springframework.data.mongodb.repository.Meta; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.ReadPreference; + +/** + * Unit tests for {@link ReactiveStringBasedAggregation}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class ReactiveStringBasedAggregationUnitTests { + + SpelExpressionParser PARSER = new SpelExpressionParser(); + + @Mock ReactiveMongoOperations operations; + @Mock DbRefResolver dbRefResolver; + MongoConverter converter; + + private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }"; + private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }"; + private static final String RAW_OUT = "{ '$out' : 'authors' }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', names : { '$addToSet' : '$?0' } } }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$?#{[0]}' } } }"; + + private static final Document SORT = Document.parse(RAW_SORT_STRING); + private static final Document GROUP_BY_LASTNAME = Document.parse(RAW_GROUP_BY_LASTNAME_STRING); + + @BeforeEach + public void setUp() { + + converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); + when(operations.getConverter()).thenReturn(converter); + when(operations.aggregate(any(TypedAggregation.class), any())).thenReturn(Flux.empty()); + when(operations.execute(any())).thenReturn(Flux.empty()); + } + + @Test // DATAMONGO-2153 + public void plainStringAggregation() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + } + + @Test // DATAMONGO-2153 + public void plainStringAggregationConsidersMeta() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).contains("expensive-aggregation"); + assertThat(options.getCursorBatchSize()).isEqualTo(42); + } + + @Test // DATAMONGO-2153 + public void plainStringAggregationWithSortParameter() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation", + Sort.by(Direction.DESC, "lastname")); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).isEmpty(); + assertThat(options.getCursorBatchSize()).isNull(); + } + + @Test // DATAMONGO-2153 + public void replaceParameter() { + + AggregationInvocation invocation = executeAggregation("parameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + public void replaceSpElParameter() { + + AggregationInvocation invocation = executeAggregation("spelParameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + public void aggregateWithCollation() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation"); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("de_AT")); + } + + @Test // DATAMONGO-2153 + public void aggregateWithCollationParameter() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation", Collation.of("en_US")); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("en_US")); + } + + @Test // DATAMONGO-2557 + void aggregationRetrievesCodecFromDriverJustOnceForMultipleAggregationOperationsInPipeline() { + + executeAggregation("multiOperationPipeline", "firstname"); + verify(operations).execute(any()); + } + + @Test // GH-3230 + void aggregatePicksUpHintFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withHint"); + assertThat(hintOf(invocation)).isEqualTo("idx"); + } + + private AggregationInvocation executeAggregation(String name, Object... args) { + + Class[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(size -> new Class[size]); + ReactiveStringBasedAggregation aggregation = createAggregationForMethod(name, argTypes); + + ArgumentCaptor aggregationCaptor = ArgumentCaptor.forClass(TypedAggregation.class); + ArgumentCaptor targetTypeCaptor = ArgumentCaptor.forClass(Class.class); + + Object result = Flux.from((Publisher) aggregation.execute(args)).blockLast(); + + verify(operations).aggregate(aggregationCaptor.capture(), targetTypeCaptor.capture()); + + return new AggregationInvocation(aggregationCaptor.getValue(), targetTypeCaptor.getValue(), result); + } + + @Test // GH-4088 + void aggregateWithVoidReturnTypeSkipsResultOnOutStage() { + + AggregationInvocation invocation = executeAggregation("outSkipResult"); + + assertThat(skipResultsOf(invocation)).isTrue(); + } + + @Test // GH-4088 + void aggregateWithOutStageDoesNotSkipResults() { + + AggregationInvocation invocation = executeAggregation("outDoNotSkipResult"); + + assertThat(skipResultsOf(invocation)).isFalse(); + } + + @Test // GH-2971 + void aggregatePicksUpReadPreferenceFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withReadPreference"); + assertThat(readPreferenceOf(invocation)).isEqualTo(ReadPreference.secondaryPreferred()); + } + + private ReactiveStringBasedAggregation createAggregationForMethod(String name, Class... parameters) { + + Method method = ClassUtils.getMethod(SampleRepository.class, name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, + new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext()); + return new ReactiveStringBasedAggregation(queryMethod, operations, PARSER, + ReactiveQueryMethodEvaluationContextProvider.DEFAULT); + } + + private List pipelineOf(AggregationInvocation invocation) { + + AggregationOperationContext context = new TypeBasedAggregationOperationContext( + invocation.aggregation.getInputType(), converter.getMappingContext(), new QueryMapper(converter)); + + return invocation.aggregation.toPipeline(context); + } + + private Class inputTypeOf(AggregationInvocation invocation) { + return invocation.aggregation.getInputType(); + } + + @Nullable + private Collation collationOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getCollation().orElse(null) + : null; + } + + @Nullable + private Object hintOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getHintObject().orElse(null) + : null; + } + + private Boolean skipResultsOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().isSkipResults() + : false; + } + + @Nullable + private ReadPreference readPreferenceOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getReadPreference() + : null; + } + + private Class targetTypeOf(AggregationInvocation invocation) { + return invocation.getTargetType(); + } + + private interface SampleRepository extends ReactiveCrudRepository { + + @Meta(cursorBatchSize = 42, comment = "expensive-aggregation") + @Aggregation({ RAW_GROUP_BY_LASTNAME_STRING, RAW_SORT_STRING }) + Mono plainStringAggregation(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Mono plainStringAggregation(Sort sort); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER) + Mono parameterReplacementAggregation(String attribute); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER) + Mono spelParameterReplacementAggregation(String arg0); + + @Aggregation(pipeline = {RAW_GROUP_BY_LASTNAME_STRING, GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER}) + Mono multiOperationPipeline(String arg0); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + Mono aggregateWithCollation(); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + Mono aggregateWithCollation(Collation collation); + + @Hint("idx") + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + String withHint(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + Flux outDoNotSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + Mono outSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }, readPreference = "secondaryPreferred") + Mono withReadPreference(); + } + + static class PersonAggregate { + + } + + static final class AggregationInvocation { + + private final TypedAggregation aggregation; + private final Class targetType; + private final Object result; + + public AggregationInvocation(TypedAggregation aggregation, Class targetType, Object result) { + this.aggregation = aggregation; + this.targetType = targetType; + this.result = result; + } + + public TypedAggregation getAggregation() { + return this.aggregation; + } + + public Class getTargetType() { + return this.targetType; + } + + public Object getResult() { + return this.result; + } + + public String toString() { + return "ReactiveStringBasedAggregationUnitTests.AggregationInvocation(aggregation=" + this.getAggregation() + + ", targetType=" + this.getTargetType() + ", result=" + this.getResult() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java index c640b25d3f..72f9626a57 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ReactiveStringBasedMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,31 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import static org.mockito.Mockito.any; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.lang.reflect.Method; +import java.util.Base64; import java.util.Collections; +import java.util.HashMap; import java.util.Map; -import javax.xml.bind.DatatypeConverter; - -import org.bson.BSON; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.core.env.MapPropertySource; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; import org.springframework.data.mongodb.core.ReactiveFindOperation.ReactiveFind; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.convert.DbRefResolver; @@ -51,7 +55,12 @@ import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; -import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ReactiveExtensionAwareQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.spel.spi.ReactiveEvaluationContextExtension; import org.springframework.expression.spel.standard.SpelExpressionParser; /** @@ -60,10 +69,12 @@ * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class ReactiveStringBasedMongoQueryUnitTests { - SpelExpressionParser PARSER = new SpelExpressionParser(); + ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); + StandardEnvironment environment = new StandardEnvironment(); @Mock ReactiveMongoOperations operations; @Mock DbRefResolver factory; @@ -71,12 +82,18 @@ public class ReactiveStringBasedMongoQueryUnitTests { MongoConverter converter; - @Before + Map properties = new HashMap<>(); + MapPropertySource propertySource = new MapPropertySource("mock", properties); + + @BeforeEach public void setUp() { - when(operations.query(any())).thenReturn(reactiveFind); + environment.getPropertySources().addFirst(propertySource); this.converter = new MappingMongoConverter(factory, new MongoMappingContext()); + + when(operations.query(any())).thenReturn(reactiveFind); + when(operations.execute(any())).thenReturn(Flux.empty()); } @Test // DATAMONGO-1444 @@ -85,10 +102,10 @@ public void bindsSimplePropertyCorrectly() throws Exception { ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastname", String.class); ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1444 @@ -103,23 +120,31 @@ public void bindsComplexPropertyCorrectly() throws Exception { converter.write(address, dbObject); dbObject.remove(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); Document queryObject = new Document("address", dbObject); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); - assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); } @Test // DATAMONGO-1444 public void constructsDeleteQueryCorrectly() throws Exception { ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("removeByLastname", String.class); - assertThat(mongoQuery.isDeleteQuery(), is(true)); + assertThat(mongoQuery.isDeleteQuery()).isTrue(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1444 - public void preventsDeleteAndCountFlagAtTheSameTime() throws Exception { - createQueryForMethod("invalidMethod", String.class); + @Test // DATAMONGO-1444 + public void preventsDeleteAndCountFlagAtTheSameTime() { + assertThatIllegalArgumentException().isThrownBy(() -> createQueryForMethod("invalidMethod", String.class)); + } + + @Test // DATAMONGO-2030 + public void shouldSupportExistsProjection() throws Exception { + + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("existsByLastname", String.class); + + assertThat(mongoQuery.isExistsQuery()).isTrue(); } @Test // DATAMONGO-1444 @@ -130,11 +155,11 @@ public void shouldSupportFindByParameterizedCriteriaAndFields() throws Exception ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByParameterizedCriteriaAndFields", Document.class, Map.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor).block(); - assertThat(query.getQueryObject(), - is(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject())); - assertThat(query.getFieldsObject(), is(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject()); + assertThat(query.getFieldsObject()).isEqualTo(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject()); } @Test // DATAMONGO-1444 @@ -144,11 +169,11 @@ public void shouldParseQueryWithParametersInExpression() throws Exception { ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithParametersInExpression", int.class, int.class, int.class, int.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor).block(); - assertThat(query.getQueryObject(), - is(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") - .getQueryObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") + .getQueryObject()); } @Test // DATAMONGO-1444 @@ -158,9 +183,9 @@ public void shouldParseJsonKeyReplacementCorrectly() throws Exception { String.class, String.class); ConvertingParameterAccessor parameterAccessor = StubParameterAccessor.getAccessor(converter, "key", "value"); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor).block(); - assertThat(query.getQueryObject(), is(new Document().append("key", "value"))); + assertThat(query.getQueryObject()).isEqualTo(new Document().append("key", "value")); } @Test // DATAMONGO-1444 @@ -169,23 +194,36 @@ public void shouldSupportExpressionsInCustomQueries() throws Exception { ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, "Matthews"); ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpression", String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldSupportPropertiesInCustomQueries() throws Exception { + + properties.put("foo", "bar"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithProperty"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'bar'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1444 public void shouldSupportExpressionsInCustomQueriesWithNestedObject() throws Exception { - ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, true, "param1"); ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject", boolean.class, String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1444 @@ -195,11 +233,11 @@ public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() t ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndMultipleNestedObjects", boolean.class, String.class, String.class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( "{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1444 @@ -209,21 +247,36 @@ public void shouldSupportNonQuotedBinaryDataReplacement() throws Exception { ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter, binaryData); ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinary", byte[].class); - org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor); - org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { '$binary' : '" - + DatatypeConverter.printBase64Binary(binaryData) + "', '$type' : '" + BSON.B_GENERAL + "'}}"); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { '$binary' : '" + Base64.getEncoder().encodeToString(binaryData) + "', '$type' : '" + 0 + "'}}"); - assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); } - private ReactiveStringBasedMongoQuery createQueryForMethod(String name, Class... parameters) throws Exception { + @Test // DATAMONGO-1894 + void shouldConsiderReactiveSpelExtension() throws Exception { + + ConvertingParameterAccessor accesor = StubParameterAccessor.getAccessor(converter); + ReactiveStringBasedMongoQuery mongoQuery = createQueryForMethod("withReactiveSpelExtensions"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accesor).block(); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{lastname: true}", "{project: true}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + private ReactiveStringBasedMongoQuery createQueryForMethod( + String name, Class... parameters) + throws Exception { Method method = SampleRepository.class.getMethod(name, parameters); ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); ReactiveMongoQueryMethod queryMethod = new ReactiveMongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext()); - return new ReactiveStringBasedMongoQuery(queryMethod, operations, PARSER, - DefaultEvaluationContextProvider.INSTANCE); + QueryMethodValueEvaluationContextAccessor accessor = new QueryMethodValueEvaluationContextAccessor( + environment, Collections.singletonList(ReactiveSpelExtension.INSTANCE)); + return new ReactiveStringBasedMongoQuery(queryMethod, operations, new ValueExpressionDelegate(accessor, PARSER)); } private interface SampleRepository extends Repository { @@ -255,10 +308,53 @@ private interface SampleRepository extends Repository { @Query("{'lastname': ?#{[0]} }") Flux findByQueryWithExpression(String param0); + @Query("{'lastname': ?${foo} }") + Flux findByQueryWithProperty(); + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }}") Flux findByQueryWithExpressionAndNestedObject(boolean param0, String param1); @Query("{'id':?#{ [0] ? { $exists :true} : [1] }, 'foo':42, 'bar': ?#{ [0] ? { $exists :false} : [1] }}") Flux findByQueryWithExpressionAndMultipleNestedObjects(boolean param0, String param1, String param2); + + @Query(value = "{ 'lastname' : ?0 }", exists = true) + Mono existsByLastname(String lastname); + + @Query(value = "{ 'lastname' : ?#{hasRole()} }", fields = "{project: ?#{hasRole()}}") + Mono withReactiveSpelExtensions(); + } + + public enum ReactiveSpelExtension implements ReactiveEvaluationContextExtension { + + INSTANCE; + + @Override + public Mono getExtension() { + return Mono.just(SpelExtension.INSTANCE); + } + + @Override + public String getExtensionId() { + return "sample"; + } + } + + public enum SpelExtension implements EvaluationContextExtension { + + INSTANCE; + + @Override + public Object getRootObject() { + return this; + } + + @Override + public String getExtensionId() { + return "sample"; + } + + public boolean hasRole() { + return true; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationOperationUnitTests.java new file mode 100644 index 0000000000..32f9092fbf --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationOperationUnitTests.java @@ -0,0 +1,51 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; + +import org.assertj.core.api.Assertions; +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +/** + * Unit tests for {@link StringBasedAggregation}. + * + * @author Christoph Strobl + */ +public class StringBasedAggregationOperationUnitTests { + + @ParameterizedTest // GH-4712 + @ValueSource(strings = { "$project", "'$project'", "\"$project\"" }) + void extractsAggregationOperatorFromAggregationStringWithoutBindingParameters(String operator) { + + StringAggregationOperation agg = new StringAggregationOperation("{ %s : { 'fn' : 1 } }".formatted(operator), + Object.class, (it) -> Assertions.fail("o_O Parameter binding")); + + assertThat(agg.getOperator()).isEqualTo("$project"); + } + + @Test // GH-4712 + void fallbackToParameterBindingIfAggregationOperatorCannotBeExtractedFromAggregationStringWithoutBindingParameters() { + + StringAggregationOperation agg = new StringAggregationOperation("{ happy-madison : { 'fn' : 1 } }", Object.class, + (it) -> new Document("$project", "")); + + assertThat(agg.getOperator()).isEqualTo("$project"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java new file mode 100644 index 0000000000..85a8650b26 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java @@ -0,0 +1,454 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.lang.reflect.Method; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.repository.Aggregation; +import org.springframework.data.mongodb.repository.Hint; +import org.springframework.data.mongodb.repository.Meta; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.Repository; +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; +import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ReadPreference; + +/** + * Unit tests for {@link StringBasedAggregation}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Divya Srivastava + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class StringBasedAggregationUnitTests { + + private SpelExpressionParser PARSER = new SpelExpressionParser(); + + @Mock MongoOperations operations; + @Mock DbRefResolver dbRefResolver; + @Mock AggregationResults aggregationResults; + private MongoConverter converter; + + private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }"; + private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }"; + private static final String RAW_OUT = "{ '$out' : 'authors' }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', names : { '$addToSet' : '$?0' } } }"; + private static final String GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$?#{[0]}' } } }"; + + private static final Document SORT = Document.parse(RAW_SORT_STRING); + private static final Document GROUP_BY_LASTNAME = Document.parse(RAW_GROUP_BY_LASTNAME_STRING); + + @BeforeEach + void setUp() { + + converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); + when(operations.getConverter()).thenReturn(converter); + when(operations.aggregate(any(TypedAggregation.class), any())).thenReturn(aggregationResults); + when(operations.execute(any())).thenReturn(MongoClientSettings.getDefaultCodecRegistry()); + } + + @Test // DATAMONGO-2153 + void plainStringAggregation() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + } + + @Test // DATAMONGO-2153, DATAMONGO-2449 + void plainStringAggregationConsidersMeta() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation"); + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).contains("expensive-aggregation"); + assertThat(options.getCursorBatchSize()).isEqualTo(42); + assertThat(options.isAllowDiskUse()).isTrue(); + assertThat(options.getMaxTime()).isEqualTo(Duration.ofMillis(100)); + } + + @Test // DATAMONGO-2153, DATAMONGO-2449 + void returnSingleObject() { + + PersonAggregate expected = new PersonAggregate(); + when(aggregationResults.getUniqueMappedResult()).thenReturn(Collections.singletonList(expected)); + + AggregationInvocation invocation = executeAggregation("returnSingleEntity"); + assertThat(invocation.result).isEqualTo(expected); + + AggregationOptions options = invocation.aggregation.getOptions(); + + assertThat(options.getComment()).isEmpty(); + assertThat(options.getCursorBatchSize()).isNull(); + assertThat(options.isAllowDiskUse()).isFalse(); + assertThat(options.getMaxTime()).isEqualTo(Duration.ZERO); + } + + @Test // DATAMONGO-2153 + void returnSingleObjectThrowsError() { + + when(aggregationResults.getUniqueMappedResult()).thenThrow(new IllegalArgumentException("o_O")); + + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> executeAggregation("returnSingleEntity")); + } + + @Test // DATAMONGO-2153 + void returnCollection() { + + List expected = Collections.singletonList(new PersonAggregate()); + when(aggregationResults.getMappedResults()).thenReturn(expected); + + assertThat(executeAggregation("returnCollection").result).isEqualTo(expected); + } + + @Test // GH-3623 + void returnNullWhenSingleResultIsNotPresent() { + + when(aggregationResults.getMappedResults()).thenReturn(Collections.emptyList()); + + assertThat(executeAggregation("simpleReturnType").result).isNull(); + } + + @Test // DATAMONGO-2153 + void returnRawResultType() { + assertThat(executeAggregation("returnRawResultType").result).isEqualTo(aggregationResults); + } + + @Test // DATAMONGO-2153 + void plainStringAggregationWithSortParameter() { + + AggregationInvocation invocation = executeAggregation("plainStringAggregation", + Sort.by(Direction.DESC, "lastname")); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME, SORT); + } + + @Test // DATAMONGO-2153 + void replaceParameter() { + + AggregationInvocation invocation = executeAggregation("parameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + void replaceSpElParameter() { + + AggregationInvocation invocation = executeAggregation("spelParameterReplacementAggregation", "firstname"); + + assertThat(inputTypeOf(invocation)).isEqualTo(Person.class); + assertThat(targetTypeOf(invocation)).isEqualTo(PersonAggregate.class); + assertThat(pipelineOf(invocation)).containsExactly(GROUP_BY_LASTNAME); + } + + @Test // DATAMONGO-2153 + void aggregateWithCollation() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation"); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("de_AT")); + } + + @Test // DATAMONGO-2153 + void aggregateWithCollationParameter() { + + AggregationInvocation invocation = executeAggregation("aggregateWithCollation", Collation.of("en_US")); + + assertThat(collationOf(invocation)).isEqualTo(Collation.of("en_US")); + } + + @Test // GH-3543 + void aggregationWithSliceReturnType() { + + StringBasedAggregation sba = createAggregationForMethod("aggregationWithSliceReturnType", Pageable.class); + + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); + + assertThat(result).isInstanceOf(Slice.class); + } + + @Test // GH-3543 + void aggregationWithStreamReturnType() { + + when(operations.aggregateStream(any(TypedAggregation.class), any())).thenReturn(Stream.empty()); + + StringBasedAggregation sba = createAggregationForMethod("aggregationWithStreamReturnType", Pageable.class); + + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); + + assertThat(result).isInstanceOf(Stream.class); + } + + @Test // DATAMONGO-2506 + void aggregateRaisesErrorOnInvalidReturnType() { + + Method method = ClassUtils.getMethod(UnsupportedRepository.class, "pageIsUnsupported", Pageable.class); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), + factory, converter.getMappingContext()); + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class) // + .isThrownBy(() -> new StringBasedAggregation(queryMethod, operations, PARSER, + QueryMethodEvaluationContextProvider.DEFAULT)) // + .withMessageContaining("pageIsUnsupported") // + .withMessageContaining("Page"); + } + + @Test // GH-3230 + void aggregatePicksUpHintFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withHint"); + assertThat(hintOf(invocation)).isEqualTo("idx"); + } + + @Test // GH-4088 + void aggregateWithVoidReturnTypeSkipsResultOnOutStage() { + + AggregationInvocation invocation = executeAggregation("outSkipResult"); + + assertThat(skipResultsOf(invocation)).isTrue(); + } + + @Test // GH-4088 + void aggregateWithOutStageDoesNotSkipResults() { + + AggregationInvocation invocation = executeAggregation("outDoNotSkipResult"); + + assertThat(skipResultsOf(invocation)).isFalse(); + } + + @Test // GH-2971 + void aggregatePicksUpReadPreferenceFromAnnotation() { + + AggregationInvocation invocation = executeAggregation("withReadPreference"); + assertThat(readPreferenceOf(invocation)).isEqualTo(ReadPreference.secondaryPreferred()); + } + + private AggregationInvocation executeAggregation(String name, Object... args) { + + Class[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(Class[]::new); + StringBasedAggregation aggregation = createAggregationForMethod(name, argTypes); + + ArgumentCaptor aggregationCaptor = ArgumentCaptor.forClass(TypedAggregation.class); + ArgumentCaptor targetTypeCaptor = ArgumentCaptor.forClass(Class.class); + + Object result = aggregation.execute(args); + + verify(operations).aggregate(aggregationCaptor.capture(), targetTypeCaptor.capture()); + + return new AggregationInvocation(aggregationCaptor.getValue(), targetTypeCaptor.getValue(), result); + } + + private StringBasedAggregation createAggregationForMethod(String name, Class... parameters) { + + Method method = ClassUtils.getMethod(SampleRepository.class, name, parameters); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), + factory, converter.getMappingContext()); + return new StringBasedAggregation(queryMethod, operations, PARSER, QueryMethodEvaluationContextProvider.DEFAULT); + } + + private List pipelineOf(AggregationInvocation invocation) { + + AggregationOperationContext context = new TypeBasedAggregationOperationContext( + invocation.aggregation.getInputType(), converter.getMappingContext(), new QueryMapper(converter)); + + return invocation.aggregation.toPipeline(context); + } + + private Class inputTypeOf(AggregationInvocation invocation) { + return invocation.aggregation.getInputType(); + } + + @Nullable + private Collation collationOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getCollation().orElse(null) + : null; + } + + @Nullable + private Object hintOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getHintObject().orElse(null) + : null; + } + + private Boolean skipResultsOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().isSkipResults() + : false; + } + + @Nullable + private ReadPreference readPreferenceOf(AggregationInvocation invocation) { + return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getReadPreference() + : null; + } + + private Class targetTypeOf(AggregationInvocation invocation) { + return invocation.getTargetType(); + } + + private interface SampleRepository extends Repository { + + @Meta(cursorBatchSize = 42, comment = "expensive-aggregation", allowDiskUse = true, maxExecutionTimeMs = 100) + @Aggregation({ RAW_GROUP_BY_LASTNAME_STRING, RAW_SORT_STRING }) + PersonAggregate plainStringAggregation(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + PersonAggregate plainStringAggregation(Sort sort); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + PersonAggregate returnSingleEntity(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + List returnCollection(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + AggregationResults returnRawResultType(); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + AggregationResults returnRawResults(); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER) + PersonAggregate parameterReplacementAggregation(String attribute); + + @Aggregation(GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER) + PersonAggregate spelParameterReplacementAggregation(String arg0); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER }) + PersonAggregate multiOperationPipeline(String arg0); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + PersonAggregate aggregateWithCollation(); + + @Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT") + PersonAggregate aggregateWithCollation(Collation collation); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Slice aggregationWithSliceReturnType(Pageable page); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Stream aggregationWithStreamReturnType(Pageable page); + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + String simpleReturnType(); + + @Hint("idx") + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + String withHint(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + List outDoNotSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }) + void outSkipResult(); + + @Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT }, readPreference = "secondaryPreferred") + void withReadPreference(); + } + + private interface UnsupportedRepository extends Repository { + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Page pageIsUnsupported(Pageable page); + } + + static class PersonAggregate { + + } + + private static final class AggregationInvocation { + + private final TypedAggregation aggregation; + private final Class targetType; + private final Object result; + + public AggregationInvocation(TypedAggregation aggregation, Class targetType, Object result) { + this.aggregation = aggregation; + this.targetType = targetType; + this.result = result; + } + + public TypedAggregation getAggregation() { + return this.aggregation; + } + + public Class getTargetType() { + return this.targetType; + } + + public Object getResult() { + return this.result; + } + + public String toString() { + return "StringBasedAggregationUnitTests.AggregationInvocation(aggregation=" + this.getAggregation() + + ", targetType=" + this.getTargetType() + ", result=" + this.getResult() + ")"; + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java index 0860ed7ae2..51f210f024 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedMongoQueryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,38 @@ */ package org.springframework.data.mongodb.repository.query; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.any; +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; -import javax.xml.bind.DatatypeConverter; - -import org.bson.BSON; +import org.bson.BsonBinarySubType; import org.bson.Document; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistry; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.core.env.MapPropertySource; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.mongodb.core.DbCallback; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.MongoOperations; @@ -52,9 +63,14 @@ import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; -import org.springframework.data.repository.query.DefaultEvaluationContextProvider; +import org.springframework.data.repository.query.QueryMethodValueEvaluationContextAccessor; +import org.springframework.data.repository.query.ValueExpressionDelegate; +import org.springframework.expression.EvaluationException; import org.springframework.expression.spel.standard.SpelExpressionParser; +import com.mongodb.MongoClientSettings; +import com.mongodb.reactivestreams.client.MongoClients; + /** * Unit tests for {@link StringBasedMongoQuery}. * @@ -63,10 +79,12 @@ * @author Thomas Darimont * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class StringBasedMongoQueryUnitTests { - SpelExpressionParser PARSER = new SpelExpressionParser(); + ValueExpressionParser PARSER = ValueExpressionParser.create(SpelExpressionParser::new); + StandardEnvironment environment = new StandardEnvironment(); @Mock MongoOperations operations; @Mock ExecutableFind findOperation; @@ -74,12 +92,17 @@ public class StringBasedMongoQueryUnitTests { MongoConverter converter; - @Before + Map properties = new HashMap<>(); + MapPropertySource propertySource = new MapPropertySource("mock", properties); + + @BeforeEach public void setUp() { this.converter = new MappingMongoConverter(factory, new MongoMappingContext()); + environment.getPropertySources().addFirst(propertySource); doReturn(findOperation).when(operations).query(any()); + doReturn(MongoClientSettings.getDefaultCodecRegistry()).when(operations).execute(any()); } @Test @@ -91,7 +114,7 @@ public void bindsSimplePropertyCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test @@ -110,7 +133,7 @@ public void bindsComplexPropertyCorrectly() { Document queryObject = new Document("address", document); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); - assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); } @Test @@ -129,7 +152,7 @@ public void bindsMultipleParametersCorrectly() { reference.append("address", addressDocument); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject().toJson(), is(reference.toJson())); + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.toJson()); } @Test @@ -139,8 +162,8 @@ public void bindsNullParametersCorrectly() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { null }); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject().containsKey("address"), is(true)); - assertThat(query.getQueryObject().get("address"), is(nullValue())); + assertThat(query.getQueryObject().containsKey("address")).isTrue(); + assertThat(query.getQueryObject().get("address")).isNull(); } @Test // DATAMONGO-821 @@ -150,19 +173,20 @@ public void bindsDbrefCorrectly() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new BasicQuery("{ fans : { $not : { $size : 0 } } }").getQueryObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{ fans : { $not : { $size : 0 } } }").getQueryObject()); } @Test // DATAMONGO-566 public void constructsDeleteQueryCorrectly() { StringBasedMongoQuery mongoQuery = createQueryForMethod("removeByLastname", String.class); - assertThat(mongoQuery.isDeleteQuery(), is(true)); + assertThat(mongoQuery.isDeleteQuery()).isTrue(); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-566 + @Test // DATAMONGO-566 public void preventsDeleteAndCountFlagAtTheSameTime() { - createQueryForMethod("invalidMethod", String.class); + assertThatIllegalArgumentException().isThrownBy(() -> createQueryForMethod("invalidMethod", String.class)); } @Test // DATAMONGO-420 @@ -177,9 +201,9 @@ public void shouldSupportFindByParameterizedCriteriaAndFields() { Map.class); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject())); - assertThat(query.getFieldsObject(), is(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{ \"firstname\": \"first\", \"lastname\": \"last\"}").getQueryObject()); + assertThat(query.getFieldsObject()).isEqualTo(new BasicQuery(null, "{ \"lastname\": 1}").getFieldsObject()); } @Test // DATAMONGO-420 @@ -190,8 +214,8 @@ public void shouldSupportRespectExistingQuotingInFindByTitleBeginsWithExplicitQu org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject().toJson(), - is(new BasicQuery("{title: {$regex: '^fun', $options: 'i'}}").getQueryObject().toJson())); + assertThat(query.getQueryObject().toJson()) + .isEqualTo(new BasicQuery("{title: {$regex: '^fun', $options: 'i'}}").getQueryObject().toJson()); } @Test // DATAMONGO-995, DATAMONGO-420 @@ -203,9 +227,9 @@ public void shouldParseQueryWithParametersInExpression() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") - .getQueryObject())); + assertThat(query.getQueryObject()) + .isEqualTo(new BasicQuery("{$where: 'return this.date.getUTCMonth() == 3 && this.date.getUTCDay() == 4;'}") + .getQueryObject()); } @Test // DATAMONGO-995, DATAMONGO-420 @@ -217,7 +241,7 @@ public void bindsSimplePropertyAlreadyQuotedCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-995, DATAMONGO-420 @@ -229,7 +253,7 @@ public void bindsSimplePropertyAlreadyQuotedWithRegexCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : '^Mat.*'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-995, DATAMONGO-420 @@ -241,7 +265,7 @@ public void bindsSimplePropertyWithRegexCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : '^Mat.*'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1070 @@ -253,7 +277,7 @@ public void parsesDbRefDeclarationsCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor); Document dbRef = DocumentTestUtils.getTypedValue(query.getQueryObject(), "reference", Document.class); - assertThat(dbRef, is(new Document("$ref", "reference").append("$id", "myid"))); + assertThat(dbRef).isEqualTo(new Document("$ref", "reference").append("$id", "myid")); } @Test // DATAMONGO-1072 @@ -265,7 +289,7 @@ public void shouldParseJsonKeyReplacementCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(parameterAccessor); - assertThat(query.getQueryObject(), is(new Document().append("key", "value"))); + assertThat(query.getQueryObject()).isEqualTo(new Document().append("key", "value")); } @Test // DATAMONGO-990 @@ -277,20 +301,56 @@ public void shouldSupportExpressionsInCustomQueries() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'Matthews'}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldSupportExpressionsAndPropertiesInCustomQueries() { + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews"); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndProperty", String.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : 'Matthews', 'firstname' : 'some-default'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldSupportPropertiesInCustomQueries() { + + properties.put("foo", "bar"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithProperty"); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : 'bar'}"); + + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); + } + + @Test // GH-3050 + public void shouldFailWhenPropertiesWithNoDefaultValueInCustomQueries() { + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithProperty"); + + assertThatThrownBy(() -> mongoQuery.createQuery(accessor)) + .isInstanceOf(EvaluationException.class) + .hasMessageContaining("Could not resolve placeholder 'foo' in value \"${foo}\""); } @Test // DATAMONGO-1244 public void shouldSupportExpressionsInCustomQueriesWithNestedObject() { - ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, true, "param1", "param2"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, true, "param1"); StringBasedMongoQuery mongoQuery = createQueryForMethod("findByQueryWithExpressionAndNestedObject", boolean.class, String.class); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{ \"id\" : { \"$exists\" : true}}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1244 @@ -304,7 +364,7 @@ public void shouldSupportExpressionsInCustomQueriesWithMultipleNestedObjects() { org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( "{ \"id\" : { \"$exists\" : true} , \"foo\" : 42 , \"bar\" : { \"$exists\" : false}}"); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1290 @@ -316,9 +376,109 @@ public void shouldSupportNonQuotedBinaryDataReplacement() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { '$binary' : '" - + DatatypeConverter.printBase64Binary(binaryData) + "', '$type' : '" + BSON.B_GENERAL + "'}}"); + + Base64.getEncoder().encodeToString(binaryData) + "', '$type' : '" + BsonBinarySubType.BINARY.getValue() + "'}}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-2029 + public void shouldSupportNonQuotedBinaryCollectionDataReplacement() { + + byte[] binaryData = "Matthews".getBytes(StandardCharsets.UTF_8); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + (Object) Collections.singletonList(binaryData)); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsBinaryIn", List.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery("{'lastname' : { $in: [{'$binary' : '" + + Base64.getEncoder().encodeToString(binaryData) + "', '$type' : '" + BsonBinarySubType.BINARY.getValue() + "'}] }}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1911 + public void shouldSupportNonQuotedUUIDReplacement() { + + UUID uuid = UUID.fromString("864de43b-e3ea-f1e4-3663-fb8240b659b9"); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, (Object) uuid); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsUUID", UUID.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { $binary:\"5PHq4zvkTYa5WbZAgvtjNg==\", $type: \"03\"}}"); + + // CodecRegistry registry = + // MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.JAVA_LEGACY).build().getCodecRegistry(); + + // TODO: use OverridableUuidRepresentationCodecRegistry instead to save resources + CodecRegistry registry = MongoClients + .create(MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.JAVA_LEGACY).build()) + .getDatabase("database").getCodecRegistry(); + + // OverridableUuidRepresentationCodecRegistry + + assertThat(query.getQueryObject().toJson(registry.get(Document.class))) + .isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-2029 + public void shouldSupportNonQuotedUUIDCollectionReplacement() { + + UUID uuid1 = UUID.fromString("864de43b-e3ea-f1e4-3663-fb8240b659b9"); + UUID uuid2 = UUID.fromString("864de43b-cafe-f1e4-3663-fb8240b659b9"); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + (Object) Arrays.asList(uuid1, uuid2)); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsUUIDIn", List.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { $in: [{ $binary : \"5PHq4zvkTYa5WbZAgvtjNg==\", $type : \"03\" }, { $binary : \"5PH+yjvkTYa5WbZAgvtjNg==\", $type : \"03\" }]}}"); + + // TODO: use OverridableUuidRepresentationCodecRegistry instead to save resources + CodecRegistry registry = MongoClients + .create(MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.JAVA_LEGACY).build()) + .getDatabase("database").getCodecRegistry(); + assertThat(query.getQueryObject().toJson(registry.get(Document.class))) + .isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-2427 + public void shouldSupportNonQuotedUUIDCollectionReplacementWhenUsingNonLegacyUUIDCodec() { - assertThat(query.getQueryObject().toJson(), is(reference.getQueryObject().toJson())); + // TODO: use OverridableUuidRepresentationCodecRegistry instead to save resources + CodecRegistry registry = MongoClients + .create(MongoClientSettings.builder().uuidRepresentation(UuidRepresentation.STANDARD).build()) + .getDatabase("database").getCodecRegistry(); + when(operations.execute(any(DbCallback.class))).thenReturn(registry); + + UUID uuid1 = UUID.fromString("864de43b-e3ea-f1e4-3663-fb8240b659b9"); + UUID uuid2 = UUID.fromString("864de43b-cafe-f1e4-3663-fb8240b659b9"); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + (Object) Arrays.asList(uuid1, uuid2)); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsUUIDIn", List.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : { $in: [{ $binary : \"hk3kO+Pq8eQ2Y/uCQLZZuQ==\", $type : \"04\" }, { $binary : \"hk3kO8r+8eQ2Y/uCQLZZuQ==\", $type : \"04\" }]}}"); + + assertThat(query.getQueryObject().toJson(registry.get(Document.class))) + .isEqualTo(reference.getQueryObject().toJson()); + } + + @Test // DATAMONGO-1911 + public void shouldSupportQuotedUUIDReplacement() { + + UUID uuid = UUID.randomUUID(); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, (Object) uuid); + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByLastnameAsStringUUID", UUID.class); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + org.springframework.data.mongodb.core.query.Query reference = new BasicQuery( + "{'lastname' : '" + uuid.toString() + "'}"); + + assertThat(query.getQueryObject().toJson()).isEqualTo(reference.getQueryObject().toJson()); } @Test // DATAMONGO-1454 @@ -326,7 +486,7 @@ public void shouldSupportExistsProjection() { StringBasedMongoQuery mongoQuery = createQueryForMethod("existsByLastname", String.class); - assertThat(mongoQuery.isExistsQuery(), is(true)); + assertThat(mongoQuery.isExistsQuery()).isTrue(); } @Test // DATAMONGO-1565 @@ -343,7 +503,7 @@ public void bindsPropertyReferenceMultipleTimesCorrectly() { Document queryObject = new Document("$or", or); org.springframework.data.mongodb.core.query.Query reference = new BasicQuery(queryObject); - assertThat(query.getQueryObject(), is(reference.getQueryObject())); + assertThat(query.getQueryObject()).isEqualTo(reference.getQueryObject()); } @Test // DATAMONGO-1565 @@ -355,8 +515,8 @@ public void shouldIgnorePlaceholderPatternInReplacementValue() { StringBasedMongoQuery mongoQuery = createQueryForMethod("findByStringWithWildcardChar", String.class, String.class); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(Document.parse("{ \"arg0\" : \"argWith?1andText\" , \"arg1\" : \"nothing-special\"}"))); + assertThat(query.getQueryObject()) + .isEqualTo(Document.parse("{ \"arg0\" : \"argWith?1andText\" , \"arg1\" : \"nothing-special\"}")); } @Test // DATAMONGO-1565 @@ -366,9 +526,9 @@ public void shouldQuoteStringReplacementCorrectly() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews', password: 'foo"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(not(new Document().append("lastname", "Matthews").append("password", "foo")))); - assertThat(query.getQueryObject(), is(new Document("lastname", "Matthews', password: 'foo"))); + assertThat(query.getQueryObject()) + .isNotEqualTo(new Document().append("lastname", "Matthews").append("password", "foo")); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "Matthews', password: 'foo")); } @Test // DATAMONGO-1565 @@ -378,9 +538,9 @@ public void shouldQuoteStringReplacementContainingQuotesCorrectly() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Matthews\", password: \"foo"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(not(new Document().append("lastname", "Matthews").append("password", "foo")))); - assertThat(query.getQueryObject(), is(new Document("lastname", "Matthews\", password: \"foo"))); + assertThat(query.getQueryObject()) + .isNotEqualTo(new Document().append("lastname", "Matthews").append("password", "foo")); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "Matthews\", password: \"foo")); } @Test // DATAMONGO-1565 @@ -391,7 +551,7 @@ public void shouldQuoteStringReplacementWithQuotationsCorrectly() { "\"Dave Matthews\", password: 'foo"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("lastname", "\"Dave Matthews\", password: 'foo"))); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "\"Dave Matthews\", password: 'foo")); } @Test // DATAMONGO-1565, DATAMONGO-1575 @@ -401,7 +561,7 @@ public void shouldQuoteComplexQueryStringCorrectly() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "{ $ne : \"calamity\" }"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("lastname", "{ $ne : \"calamity\" }"))); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "{ $ne : \"calamity\" }")); } @Test // DATAMONGO-1565, DATAMONGO-1575 @@ -413,7 +573,7 @@ public void shouldQuotationInQuotedComplexQueryString() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("lastname", "{ $ne : \"\\\"calamity\\\"\" }"))); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", "{ $ne : \"\\\"calamity\\\"\" }")); } @Test // DATAMONGO-1575, DATAMONGO-1770 @@ -424,7 +584,7 @@ public void shouldTakeBsonParameterAsIs() { new Document("$regex", "^calamity$")); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("arg0", new Document("$regex", "^calamity$")))); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", new Document("$regex", "^calamity$"))); } @Test // DATAMONGO-1575, DATAMONGO-1770 @@ -434,7 +594,7 @@ public void shouldReplaceParametersInInQuotedExpressionOfNestedQueryOperator() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("lastname", new Document("$regex", "^(calamity)")))); + assertThat(query.getQueryObject()).isEqualTo(new Document("lastname", new Document("$regex", "^(calamity)"))); } @Test // DATAMONGO-1603 @@ -445,8 +605,8 @@ public void shouldAllowReuseOfPlaceholderWithinQuery() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamity"))); + assertThat(query.getQueryObject()) + .isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamity")); } @Test // DATAMONGO-1603 @@ -457,8 +617,8 @@ public void shouldAllowReuseOfQuotedPlaceholderWithinQuery() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamity"))); + assertThat(query.getQueryObject()) + .isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamity")); } @Test // DATAMONGO-1603 @@ -469,8 +629,8 @@ public void shouldAllowReuseOfQuotedPlaceholderWithinQueryAndIncludeSuffixCorrec ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamitys"))); + assertThat(query.getQueryObject()) + .isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalia").append("arg2", "calamitys")); } @Test // DATAMONGO-1603 @@ -481,7 +641,7 @@ public void shouldAllowQuotedParameterWithSuffixAppended() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "calamity", "regalia"); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document().append("arg0", "calamity").append("arg1", "regalias"))); + assertThat(query.getQueryObject()).isEqualTo(new Document().append("arg0", "calamity").append("arg1", "regalias")); } @Test // DATAMONGO-1603 @@ -492,8 +652,8 @@ public void shouldCaptureReplacementWithComplexSuffixCorrectly() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(Document.parse( - "{ \"$or\" : [ { \"firstname\" : { \"$regex\" : \".*calamity.*\" , \"$options\" : \"i\"}} , { \"lastname\" : { \"$regex\" : \".*calamityxyz.*\" , \"$options\" : \"i\"}}]}"))); + assertThat(query.getQueryObject()).isEqualTo(Document.parse( + "{ \"$or\" : [ { \"firstname\" : { \"$regex\" : \".*calamity.*\" , \"$options\" : \"i\"}} , { \"lastname\" : { \"$regex\" : \".*calamityxyz.*\" , \"$options\" : \"i\"}}]}")); } @Test // DATAMONGO-1603 @@ -504,8 +664,8 @@ public void shouldAllowPlaceholderReuseInQuotedValue() { org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), - is(Document.parse("{ 'lastname' : { '$regex' : '^(calamity|John regalia|regalia)'} }"))); + assertThat(query.getQueryObject()) + .isEqualTo(Document.parse("{ 'lastname' : { '$regex' : '^(calamity|John regalia|regalia)'} }")); } @Test // DATAMONGO-1605 @@ -515,7 +675,7 @@ public void findUsingSpelShouldRetainParameterType() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, 100.01D); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("arg0", 100.01D))); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", 100.01D)); } @Test // DATAMONGO-1605 @@ -525,7 +685,65 @@ public void findUsingSpelShouldRetainNullValues() { ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, new Object[] { null }); org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); - assertThat(query.getQueryObject(), is(new Document("arg0", null))); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", null)); + } + + @Test // DATAMONGO-2119 + public void spelShouldIgnoreJsonParseErrorsForRegex() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findByPersonLastnameRegex", Person.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, + new Person("Molly", "Chandler")); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject().toJson()) + .isEqualTo(new BasicQuery("{lastname: {$regex: 'Chandler'}}").getQueryObject().toJson()); + } + + @Test // DATAMONGO-2149 + public void shouldParseFieldsProjectionWithSliceCorrectly() { + + StringBasedMongoQuery mongoQuery = createQueryForMethod("findWithSliceInProjection", String.class, int.class, + int.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, "Bruce Banner", 0, 5); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getFieldsObject()).isEqualTo(Document.parse("{ \"fans\" : { \"$slice\" : [0, 5] } }")); + } + + @Test // DATAMONGO-1593 + public void shouldRenderObjectIdParameterCorrectly() { + + ObjectId id = new ObjectId(); + + StringBasedMongoQuery mongoQuery = createQueryForMethod("singeObjectIdArgInQueryString", String.class); + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, id.toString()); + + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + assertThat(query.getQueryObject()).isEqualTo(new Document("arg0", id)); + } + + @Test // DATAMONGO-1593 + public void shouldRenderMultipleObjectIdParametersCorrectly() { + + ObjectId id = new ObjectId(); + ObjectId readUsersId = new ObjectId(); + + StringBasedMongoQuery mongoQuery = createQueryForMethod("multipleObjectIdArgsInQueryString", String.class, + String.class); + + ConvertingParameterAccessor accessor = StubParameterAccessor.getAccessor(converter, id.toString(), + readUsersId.toString()); + org.springframework.data.mongodb.core.query.Query query = mongoQuery.createQuery(accessor); + + assertThat(query.getQueryObject().get("arg0")).isEqualTo(id); + assertThat(query.getQueryObject().get("$or")).isInstanceOf(List.class); + assertThat(DocumentTestUtils.getAsDBList(query.getQueryObject(), "$or").get(0)) + .isEqualTo(new Document("arg1.value0", readUsersId)); + assertThat(DocumentTestUtils.getAsDBList(query.getQueryObject(), "$or").get(1)) + .isEqualTo(new Document("arg1.value1", readUsersId)); } private StringBasedMongoQuery createQueryForMethod(String name, Class... parameters) { @@ -536,7 +754,9 @@ private StringBasedMongoQuery createQueryForMethod(String name, Class... para ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), factory, converter.getMappingContext()); - return new StringBasedMongoQuery(queryMethod, operations, PARSER, DefaultEvaluationContextProvider.INSTANCE); + QueryMethodValueEvaluationContextAccessor accessor = new QueryMethodValueEvaluationContextAccessor( + environment, Collections.emptySet()); + return new StringBasedMongoQuery(queryMethod, operations, new ValueExpressionDelegate(accessor, PARSER)); } catch (Exception e) { throw new IllegalArgumentException(e.getMessage(), e); @@ -551,6 +771,18 @@ private interface SampleRepository extends Repository { @Query("{ 'lastname' : ?0 }") Person findByLastnameAsBinary(byte[] lastname); + @Query("{ 'lastname' : { $in: ?0} }") + Person findByLastnameAsBinaryIn(List lastname); + + @Query("{ 'lastname' : ?0 }") + Person findByLastnameAsUUID(UUID lastname); + + @Query("{ 'lastname' : { $in : ?0} }") + Person findByLastnameAsUUIDIn(List lastname); + + @Query("{ 'lastname' : '?0' }") + Person findByLastnameAsStringUUID(UUID lastname); + @Query("{ 'lastname' : '?0' }") Person findByLastnameQuoted(String lastname); @@ -593,6 +825,12 @@ private interface SampleRepository extends Repository { @Query("{'lastname': ?#{[0]} }") List findByQueryWithExpression(String param0); + @Query("{'lastname': ?#{[0]}, 'firstname': ?${absent-property:some-default} }") + List findByQueryWithExpressionAndProperty(String param0); + + @Query("{'lastname': ?${foo} }") + List findByQueryWithProperty(); + @Query("{'id':?#{ [0] ? { $exists :true} : [1] }}") List findByQueryWithExpressionAndNestedObject(boolean param0, String param1); @@ -623,10 +861,24 @@ private interface SampleRepository extends Repository { @Query("{ 'arg0' : '?0', 'arg1' : '?1s' }") List findByWhenQuotedAndSomeStuffAppended(String arg0, String arg1); - @Query("{ 'lastname' : { '$regex' : '^(?0|John ?1|?1)'} }") // use spel or some regex string this is fucking bad + @Query("{ 'lastname' : { '$regex' : '^(?0|John ?1|?1)'} }") + // use spel or some regex string this is bad Person findByLastnameRegex(String lastname, String alternative); @Query("{ arg0 : ?#{[0]} }") List findByUsingSpel(Object arg0); + + @Query("{ 'lastname' : { '$regex' : ?#{[0].lastname} } }") + Person findByPersonLastnameRegex(Person key); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'fans': { '$slice': [ ?1, ?2 ] } }") + Person findWithSliceInProjection(String id, int skip, int limit); + + @Query("{ 'arg0' : { \"$oid\" : ?0} }") + List singeObjectIdArgInQueryString(String arg0); + + @Query("{ 'arg0' : { \"$oid\" : ?0} , '$or' : [ { 'arg1.value0' : { \"$oid\" : ?1 } }, { 'arg1.value1' : { \"$oid\" : ?1 } } ] }") + List multipleObjectIdArgsInQueryString(String arg0, String arg1); } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java index 4bb09ed52b..1927378e80 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StubParameterAccessor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,20 @@ import java.util.Arrays; import java.util.Iterator; -import java.util.Optional; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.TextCriteria; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.repository.query.ParameterAccessor; +import org.springframework.lang.Nullable; /** * Simple {@link ParameterAccessor} that returns the given parameters unfiltered. @@ -38,7 +42,8 @@ class StubParameterAccessor implements MongoParameterAccessor { private final Object[] values; - private Range range = new Range(null, null); + private Range range = Range.unbounded(); + private @Nullable Collation colllation; /** * Creates a new {@link ConvertingParameterAccessor} backed by a {@link StubParameterAccessor} simply returning the @@ -61,77 +66,57 @@ public StubParameterAccessor(Object... values) { if (value instanceof Range) { this.range = (Range) value; } else if (value instanceof Distance) { - this.range = new Range(null, (Distance) value); + this.range = Range.from(Bound. unbounded()).to(Bound.inclusive((Distance) value)); + } else if (value instanceof Collation) { + this.colllation = Collation.class.cast(value); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getPageable() - */ + @Override + public ScrollPosition getScrollPosition() { + return null; + } + public Pageable getPageable() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getBindableValue(int) - */ public Object getBindableValue(int index) { return values[index]; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#hasBindableNullValue() - */ public boolean hasBindableNullValue() { return false; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getSort() - */ public Sort getSort() { return Sort.unsorted(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getDistanceRange() - */ @Override public Range getDistanceRange() { return range; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#iterator() - */ public Iterator iterator() { return Arrays.asList(values).iterator(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.MongoParameterAccessor#getGeoNearLocation() - */ public Point getGeoNearLocation() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getFullText() - */ @Override public TextCriteria getFullText() { return null; } + @Override + public Collation getCollation() { + return this.colllation; + } + /* (non-Javadoc) * @see org.springframework.data.mongodb.repository.query.MongoParameterAccessor#getValues() */ @@ -140,12 +125,13 @@ public Object[] getValues() { return this.values; } - /* - * (non-Javadoc) - * @see org.springframework.data.repository.query.ParameterAccessor#getDynamicProjection() - */ @Override - public Optional> getDynamicProjection() { - return Optional.empty(); + public Class findDynamicProjection() { + return null; + } + + @Override + public UpdateDefinition getUpdate() { + return null; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/DefaultCrudMethodMetadataUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/DefaultCrudMethodMetadataUnitTests.java new file mode 100644 index 0000000000..85b487af65 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/DefaultCrudMethodMetadataUnitTests.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Optional; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadataPostProcessor.DefaultCrudMethodMetadata; +import org.springframework.data.repository.CrudRepository; +import org.springframework.util.ReflectionUtils; + +/** + * Unit tests for {@link DefaultCrudMethodMetadata}. + * + * @author Christoph Strobl + */ +class DefaultCrudMethodMetadataUnitTests { + + @Test // GH-4542 + void detectsReadPreferenceOnRepositoryInterface() { + + DefaultCrudMethodMetadata metadata = new DefaultCrudMethodMetadata(ReadPreferenceAnnotated.class, + ReflectionUtils.findMethod(ReadPreferenceAnnotated.class, "findAll")); + + assertThat(metadata.getReadPreference()).hasValue(com.mongodb.ReadPreference.primary()); + } + + @Test // GH-4542 + void favorsReadPreferenceOfAnnotatedMethod() { + + DefaultCrudMethodMetadata metadata = new DefaultCrudMethodMetadata(ReadPreferenceAnnotated.class, + ReflectionUtils.findMethod(ReadPreferenceAnnotated.class, "findById", Object.class)); + + assertThat(metadata.getReadPreference()).hasValue(com.mongodb.ReadPreference.secondary()); + } + + @ReadPreference("primary") + interface ReadPreferenceAnnotated extends CrudRepository { + + @Override + @ReadPreference("secondary") + Optional findById(String s); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java index 84b0533235..10c513dda4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/IndexEnsuringQueryCreationListenerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,50 +15,146 @@ */ package org.springframework.data.mongodb.repository.support; +import static org.assertj.core.api.Assertions.*; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Answers; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.index.IndexDefinition; +import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; +import org.springframework.data.mongodb.repository.query.MongoEntityMetadata; +import org.springframework.data.mongodb.repository.query.MongoQueryMethod; import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery; import org.springframework.data.repository.query.parser.PartTree; +import org.springframework.data.util.Streamable; /** * Unit tests for {@link IndexEnsuringQueryCreationListener}. * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) -public class IndexEnsuringQueryCreationListenerUnitTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class IndexEnsuringQueryCreationListenerUnitTests { - IndexEnsuringQueryCreationListener listener; + private IndexEnsuringQueryCreationListener listener; @Mock IndexOperationsProvider provider; + @Mock PartTree partTree; + @Mock PartTreeMongoQuery partTreeQuery; + @Mock MongoQueryMethod queryMethod; + @Mock IndexOperations indexOperations; + @Mock MongoEntityMetadata entityInformation; + + @BeforeEach + void setUp() { - @Before - public void setUp() { this.listener = new IndexEnsuringQueryCreationListener(provider); + + partTreeQuery = mock(PartTreeMongoQuery.class, Answers.RETURNS_MOCKS); + when(partTreeQuery.getTree()).thenReturn(partTree); + when(provider.indexOps(anyString(), any())).thenReturn(indexOperations); + when(queryMethod.getEntityInformation()).thenReturn(entityInformation); + when(entityInformation.getCollectionName()).thenReturn("persons"); } @Test // DATAMONGO-1753 - public void skipsQueryCreationForMethodWithoutPredicate() { - - PartTree tree = mock(PartTree.class); - when(tree.hasPredicate()).thenReturn(false); + void skipsQueryCreationForMethodWithoutPredicate() { - PartTreeMongoQuery query = mock(PartTreeMongoQuery.class, Answers.RETURNS_MOCKS); - when(query.getTree()).thenReturn(tree); + when(partTree.hasPredicate()).thenReturn(false); - listener.onCreation(query); + listener.onCreation(partTreeQuery); verify(provider, times(0)).indexOps(any()); } + @Test // DATAMONGO-1854 + void usesCollationWhenPresentAndFixedValue() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(true); + when(queryMethod.getAnnotatedCollation()).thenReturn("en_US"); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new Document("collation", new Document("locale", "en_US"))); + } + + @Test // DATAMONGO-1854 + void usesCollationWhenPresentAndFixedDocumentValue() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(true); + when(queryMethod.getAnnotatedCollation()).thenReturn("{ 'locale' : 'en_US' }"); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEqualTo(new Document("collation", new Document("locale", "en_US"))); + } + + @Test // DATAMONGO-1854 + void skipsCollationWhenPresentButDynamic() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(true); + when(queryMethod.getAnnotatedCollation()).thenReturn("{ 'locale' : '?0' }"); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEmpty(); + } + + @Test // DATAMONGO-1854 + void skipsCollationWhenNotPresent() { + + when(partTree.hasPredicate()).thenReturn(true); + when(partTree.getParts()).thenReturn(Streamable.empty()); + when(partTree.getSort()).thenReturn(Sort.unsorted()); + when(partTreeQuery.getQueryMethod()).thenReturn(queryMethod); + when(queryMethod.hasAnnotatedCollation()).thenReturn(false); + + listener.onCreation(partTreeQuery); + + ArgumentCaptor indexArgumentCaptor = ArgumentCaptor.forClass(IndexDefinition.class); + verify(indexOperations).ensureIndex(indexArgumentCaptor.capture()); + + IndexDefinition indexDefinition = indexArgumentCaptor.getValue(); + assertThat(indexDefinition.getIndexOptions()).isEmpty(); + } + interface SampleRepository { Object findAllBy(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java index 48e36bd0a4..dcfd57b018 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBeanUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,16 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.util.List; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.convert.MongoConverter; @@ -37,7 +37,7 @@ * * @author Oliver Gierke */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class MongoRepositoryFactoryBeanUnitTests { @Mock MongoOperations operations; @@ -52,8 +52,8 @@ public void addsIndexEnsuringQueryCreationListenerIfConfigured() { factory.setCreateIndexesForQueryMethods(true); List listeners = getListenersFromFactory(factory); - assertThat(listeners.isEmpty(), is(false)); - assertThat(listeners, hasItem(instanceOf(IndexEnsuringQueryCreationListener.class))); + assertThat(listeners.isEmpty()).isFalse(); + assertThat(listeners.stream().filter(IndexEnsuringQueryCreationListener.class::isInstance)).isNotEmpty(); } @Test @@ -61,7 +61,7 @@ public void addsIndexEnsuringQueryCreationListenerIfConfigured() { public void doesNotAddIndexEnsuringQueryCreationListenerByDefault() { List listeners = getListenersFromFactory(new MongoRepositoryFactoryBean(ContactRepository.class)); - assertThat(listeners.size(), is(1)); + assertThat(listeners.size()).isEqualTo(1); } @SuppressWarnings({ "unchecked", "rawtypes" }) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java index 68ac33546a..c40f24dacb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,80 +15,103 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.Serializable; import java.util.Optional; +import java.util.Set; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReadPreference; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.ListCrudRepository; import org.springframework.data.repository.Repository; /** * Unit test for {@link MongoRepositoryFactory}. * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class MongoRepositoryFactoryUnitTests { - @Mock - MongoTemplate template; - - @Mock - MongoConverter converter; - - @Mock - @SuppressWarnings("rawtypes") - MappingContext mappingContext; + @Mock MongoOperations template; - @Mock - @SuppressWarnings("rawtypes") - MongoPersistentEntity entity; + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); - @Before - @SuppressWarnings("unchecked") + @BeforeEach public void setUp() { when(template.getConverter()).thenReturn(converter); - when(converter.getMappingContext()).thenReturn(mappingContext); } @Test - @SuppressWarnings("unchecked") public void usesMappingMongoEntityInformationIfMappingContextSet() { - when(mappingContext.getRequiredPersistentEntity(Person.class)).thenReturn(entity); - when(entity.getType()).thenReturn(Person.class); - MongoRepositoryFactory factory = new MongoRepositoryFactory(template); MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); - assertTrue(entityInformation instanceof MappingMongoEntityInformation); + assertThat(entityInformation instanceof MappingMongoEntityInformation).isTrue(); } @Test // DATAMONGO-385 - @SuppressWarnings("unchecked") public void createsRepositoryWithIdTypeLong() { - when(mappingContext.getRequiredPersistentEntity(Person.class)).thenReturn(entity); - when(entity.getType()).thenReturn(Person.class); + MongoRepositoryFactory factory = new MongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + assertThat(repository).isNotNull(); + } + + @Test // GH-2971 + void considersCrudMethodMetadata() { MongoRepositoryFactory factory = new MongoRepositoryFactory(template); MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); - assertThat(repository, is(notNullValue())); + repository.findById(42L); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).findOne(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondary()); + } + + @Test // GH-2971 + void ignoresCrudMethodMetadataOnNonAnnotatedMethods() { + + MongoRepositoryFactory factory = new MongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + repository.findAllById(Set.of(42L)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).find(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isNull(); } - interface MyPersonRepository extends Repository { + interface MyPersonRepository extends ListCrudRepository { + @ReadPreference("secondary") + Optional findById(Long id); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/PersistableMappingMongoEntityInformationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/PersistableMappingMongoEntityInformationUnitTests.java deleted file mode 100644 index 90f93dd97b..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/PersistableMappingMongoEntityInformationUnitTests.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2017-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; - -import lombok.Value; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.domain.Persistable; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; - -/** - * Tests for {@link PersistableMongoEntityInformation}. - * - * @author Christoph Strobl - * @author Oliver Gierke - */ -@RunWith(MockitoJUnitRunner.class) -public class PersistableMappingMongoEntityInformationUnitTests { - - @Mock MongoPersistentEntity persistableImplementingEntityTypeInfo; - - @Before - public void setUp() { - when(persistableImplementingEntityTypeInfo.getType()).thenReturn(TypeImplementingPersistable.class); - } - - @Test // DATAMONGO-1590 - public void considersPersistableIsNew() { - - PersistableMongoEntityInformation information = new PersistableMongoEntityInformation( - new MappingMongoEntityInformation(persistableImplementingEntityTypeInfo)); - - assertThat(information.isNew(new TypeImplementingPersistable(100L, false)), is(false)); - } - - @Value - static class TypeImplementingPersistable implements Persistable { - - private static final long serialVersionUID = -1619090149320971099L; - - Long id; - boolean isNew; - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java index 6dbede532e..7d9024e2fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,21 +18,38 @@ import static org.assertj.core.api.Assertions.*; import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.repository.Address; import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.QAddress; import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.QUser; +import org.springframework.data.mongodb.repository.User; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.query.FluentQuery; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.MongoException; +import com.mongodb.client.MongoDatabase; /** * Integration test for {@link QuerydslMongoPredicateExecutor}. @@ -43,10 +60,12 @@ */ @ContextConfiguration( locations = "/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml") -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) public class QuerydslMongoPredicateExecutorIntegrationTests { @Autowired MongoOperations operations; + @Autowired MongoDatabaseFactory dbFactory; + QuerydslMongoPredicateExecutor repository; Person dave, oliver, carter; @@ -57,7 +76,7 @@ public void setup() { MongoRepositoryFactory factory = new MongoRepositoryFactory(operations); MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); - repository = new QuerydslMongoPredicateExecutor(entityInformation, operations); + repository = new QuerydslMongoPredicateExecutor<>(entityInformation, operations); operations.dropCollection(Person.class); @@ -95,8 +114,258 @@ public void findOneWithPredicateReturnsOptionalEmptyWhenNoDataFound() { assertThat(repository.findOne(person.firstname.eq("batman"))).isNotPresent(); } - @Test(expected = IncorrectResultSizeDataAccessException.class) // DATAMONGO-1690 + @Test // DATAMONGO-1690 public void findOneWithPredicateThrowsExceptionForNonUniqueResults() { - repository.findOne(person.firstname.contains("e")); + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class) + .isThrownBy(() -> repository.findOne(person.firstname.contains("e"))); + } + + @Test // DATAMONGO-1848 + public void findUsingAndShouldWork() { + + assertThat(repository.findAll( + person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())))) + .containsExactly(dave); + } + + @Test // GH-3751 + public void findPage() { + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + PageRequest.of(0, 10)) + .getContent()).containsExactly(dave); + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + Pageable.unpaged()) + .getContent()).containsExactly(dave); + } + + @Test // GH-4771 + public void findUnpagedPage() { + + assertThat(repository.findAll(person.lastname.isNotNull(), Pageable.unpaged(Sort.by("firstname")))) + .containsExactly(carter, dave, oliver); + } + + @Test // DATAMONGO-362, DATAMONGO-1848 + public void springDataMongodbQueryShouldAllowJoinOnDBref() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + User user3 = new User(); + user3.setUsername("user-3"); + + operations.save(user1); + operations.save(user2); + operations.save(user3); + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + Person person3 = new Person("Bob", "The Builder"); + person3.setCoworker(user3); + + operations.save(person1); + operations.save(person2); + operations.save(person3); + + List result = new SpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("user-2")).fetch(); + + assertThat(result).containsExactly(person2); + } + + @Test // DATAMONGO-362, DATAMONGO-1848 + public void springDataMongodbQueryShouldReturnEmptyOnJoinWithNoResults() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + operations.save(user1); + operations.save(user2); + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + operations.save(person1); + operations.save(person2); + + List result = new SpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("does-not-exist")).fetch(); + + assertThat(result).isEmpty(); + } + + @Test // DATAMONGO-595, DATAMONGO-1848 + public void springDataMongodbQueryShouldAllowElemMatchOnArrays() { + + Address adr1 = new Address("Hauptplatz", "4020", "Linz"); + Address adr2 = new Address("Stephansplatz", "1010", "Wien"); + Address adr3 = new Address("Tower of London", "EC3N 4AB", "London"); + + Person person1 = new Person("Max", "The Mighty"); + person1.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr1, adr2))); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr2, adr3))); + + operations.save(person1); + operations.save(person2); + + List result = new SpringDataMongodbQuery<>(operations, Person.class).where() + .anyEmbedded(person.shippingAddresses, QAddress.address).on(QAddress.address.city.eq("London")).fetch(); + + assertThat(result).containsExactly(person2); + } + + @Test(expected = PermissionDeniedDataAccessException.class) + // DATAMONGO-1434, DATAMONGO-1848 + public void translatesExceptionsCorrectly() { + + MongoOperations ops = new MongoTemplate(dbFactory) { + + @Override + protected MongoDatabase doGetDatabase() { + throw new MongoException(18, "Authentication Failed"); + } + }; + + MongoRepositoryFactory factory = new MongoRepositoryFactory(ops); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new QuerydslMongoPredicateExecutor<>(entityInformation, ops); + + repository.findOne(person.firstname.contains("batman")); + } + + @Test // GH-3757 + public void findByShouldReturnFirstResult() { + + Person result = repository.findBy(person.firstname.eq(oliver.getFirstname()), + FluentQuery.FetchableFluentQuery::oneValue); + + assertThat(result).isEqualTo(oliver); + } + + @Test // GH-3757 + public void findByShouldReturnOneResult() { + + Person result = repository.findBy(person.firstname.eq(oliver.getFirstname()), + FluentQuery.FetchableFluentQuery::oneValue); + + assertThat(result).isEqualTo(oliver); + + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class).isThrownBy( + () -> repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.FetchableFluentQuery::one)); + } + + @Test // GH-3757 + public void findByShouldReturnAll() { + + List result = repository.findBy(person.lastname.eq(oliver.getLastname()), + FluentQuery.FetchableFluentQuery::all); + + assertThat(result).hasSize(2); + } + + @Test // GH-3757 + public void findByShouldApplySortAll() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + List result = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.sortBy(Sort.by("firstname")).all()); + assertThat(result).containsSequence(dave, oliver); + + result = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.sortBy(Sort.by(Sort.Direction.DESC, "firstname")).all()); + assertThat(result).containsSequence(oliver, dave); + } + + @Test // GH-3757 + public void findByShouldApplyProjection() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Person result = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.project("firstname").firstValue()); + + assertThat(result.getFirstname()).isNotNull(); + assertThat(result.getLastname()).isNull(); + } + + @Test // GH-3757 + public void findByShouldApplyPagination() { + + Page first = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))); + + assertThat(first.getTotalElements()).isEqualTo(2); + assertThat(first.getContent()).contains(dave); + + Page next = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.getTotalElements()).isEqualTo(2); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-4889 + public void findByShouldApplySlice() { + + Slice first = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))); + + assertThat(first.hasNext()).isTrue(); + assertThat(first.getContent()).contains(dave); + + Slice next = repository.findBy(person.lastname.eq(oliver.getLastname()), + it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.hasNext()).isFalse(); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-3757 + public void findByShouldCount() { + + long count = repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(2L); + + count = repository.findBy(person.lastname.eq("foo"), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(0L); + } + + @Test // GH-3757 + public void findByShouldReportExists() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + boolean exists = repository.findBy(person.lastname.eq(oliver.getLastname()), + FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isTrue(); + + probe = new Person(); + probe.setLastname("foo"); + + exists = repository.findBy(person.lastname.eq("foo"), FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isFalse(); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java index f4990bf9b4..89b82f4171 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslRepositorySupportTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,30 +15,38 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.CoreMatchers.*; -import static org.junit.Assert.*; +import static org.assertj.core.api.Assertions.*; import java.util.Arrays; +import java.util.Objects; +import org.bson.types.ObjectId; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.springframework.beans.DirectFieldAccessor; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.mongodb.repository.QPerson; import org.springframework.data.mongodb.repository.User; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.util.StringUtils; /** * Unit tests for {@link QuerydslRepositorySupport}. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(SpringJUnit4ClassRunner.class) +@RunWith(SpringRunner.class) @ContextConfiguration("classpath:infrastructure.xml") public class QuerydslRepositorySupportTests { @@ -49,7 +57,9 @@ public class QuerydslRepositorySupportTests { @Before public void setUp() { + operations.remove(new Query(), Outer.class); operations.remove(new Query(), Person.class); + person = new Person("Dave", "Matthews"); operations.save(person); @@ -62,7 +72,7 @@ public void providesMongoQuery() { QPerson p = QPerson.person; QuerydslRepositorySupport support = new QuerydslRepositorySupport(operations) {}; SpringDataMongodbQuery query = support.from(p).where(p.lastname.eq("Matthews")); - assertThat(query.fetchOne(), is(person)); + assertThat(query.fetchOne()).isEqualTo(person); } @Test // DATAMONGO-1063 @@ -76,7 +86,7 @@ public void shouldAllowAny() { SpringDataMongodbQuery query = repoSupport.from(p).where(p.skills.any().in("guitarist")); - assertThat(query.fetchOne(), is(person)); + assertThat(query.fetchOne()).isEqualTo(person); } @Test // DATAMONGO-1394 @@ -94,7 +104,299 @@ public void shouldAllowDbRefAgainstIdProperty() { SpringDataMongodbQuery queryUsingIdField = repoSupport.from(p).where(p.coworker.id.eq(bart.getId())); SpringDataMongodbQuery queryUsingRefObject = repoSupport.from(p).where(p.coworker.eq(bart)); - assertThat(queryUsingIdField.fetchOne(), equalTo(person)); - assertThat(queryUsingIdField.fetchOne(), equalTo(queryUsingRefObject.fetchOne())); + assertThat(queryUsingIdField.fetchOne()).isEqualTo(person); + assertThat(queryUsingIdField.fetchOne()).isEqualTo(queryUsingRefObject.fetchOne()); + } + + @Test // DATAMONGO-1998 + public void shouldLeaveStringIdThatIsNoValidObjectIdAsItIs() { + + Outer outer = new Outer(); + outer.id = "outer-1"; + outer.inner = new Inner(); + outer.inner.id = "inner-1"; + outer.inner.value = "go climb a rock"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.inner.id.eq(outer.inner.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1998 + public void shouldConvertStringIdThatIsAValidObjectIdIntoTheSuch() { + + Outer outer = new Outer(); + outer.id = new ObjectId().toHexString(); + outer.inner = new Inner(); + outer.inner.id = new ObjectId().toHexString(); + outer.inner.value = "eat sleep workout repeat"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.inner.id.eq(outer.inner.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1810, DATAMONGO-1848 + public void shouldFetchObjectsViaStringWhenUsingInOnDbRef() { + + User bart = new User(); + DirectFieldAccessor dfa = new DirectFieldAccessor(bart); + dfa.setPropertyValue("id", "bart"); + + bart.setUsername("bart@simpson.com"); + operations.save(bart); + + User lisa = new User(); + dfa = new DirectFieldAccessor(lisa); + dfa.setPropertyValue("id", "lisa"); + + lisa.setUsername("lisa@simposon.com"); + operations.save(lisa); + + person.setCoworker(bart); + operations.save(person); + + QPerson p = QPerson.person; + + SpringDataMongodbQuery queryUsingIdFieldWithinInClause = repoSupport.from(p) + .where(p.coworker.id.in(Arrays.asList(bart.getId(), lisa.getId()))); + + SpringDataMongodbQuery queryUsingRefObject = repoSupport.from(p).where(p.coworker.eq(bart)); + + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(person); + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(queryUsingRefObject.fetchOne()); + } + + @Test // DATAMONGO-1810, DATAMONGO-1848 + public void shouldFetchObjectsViaStringStoredAsObjectIdWhenUsingInOnDbRef() { + + User bart = new User(); + bart.setUsername("bart@simpson.com"); + operations.save(bart); + + User lisa = new User(); + lisa.setUsername("lisa@simposon.com"); + operations.save(lisa); + + person.setCoworker(bart); + operations.save(person); + + QPerson p = QPerson.person; + + SpringDataMongodbQuery queryUsingIdFieldWithinInClause = repoSupport.from(p) + .where(p.coworker.id.in(Arrays.asList(bart.getId(), lisa.getId()))); + + SpringDataMongodbQuery queryUsingRefObject = repoSupport.from(p).where(p.coworker.eq(bart)); + + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(person); + assertThat(queryUsingIdFieldWithinInClause.fetchOne()).isEqualTo(queryUsingRefObject.fetchOne()); + } + + @Test // DATAMONGO-1848, DATAMONGO-2010 + public void shouldConvertStringIdThatIsAValidObjectIdWhenUsedInInPredicateIntoTheSuch() { + + Outer outer = new Outer(); + outer.id = new ObjectId().toHexString(); + outer.inner = new Inner(); + outer.inner.id = new ObjectId().toHexString(); + outer.inner.value = "eat sleep workout repeat"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.inner.id.in(outer.inner.id, outer.inner.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1798 + public void shouldRetainIdPropertyTypeIfInvalidObjectId() { + + Outer outer = new Outer(); + outer.id = "foobar"; + + operations.save(outer); + + QQuerydslRepositorySupportTests_Outer o = QQuerydslRepositorySupportTests_Outer.outer; + SpringDataMongodbQuery query = repoSupport.from(o).where(o.id.eq(outer.id)); + + assertThat(query.fetchOne()).isEqualTo(outer); + } + + @Test // DATAMONGO-1798 + public void shouldUseStringForValidObjectIdHexStrings() { + + WithMongoId document = new WithMongoId(); + document.id = new ObjectId().toHexString(); + + operations.save(document); + + QQuerydslRepositorySupportTests_WithMongoId o = QQuerydslRepositorySupportTests_WithMongoId.withMongoId; + SpringDataMongodbQuery eqQuery = repoSupport.from(o).where(o.id.eq(document.id)); + + assertThat(eqQuery.fetchOne()).isEqualTo(document); + + SpringDataMongodbQuery inQuery = repoSupport.from(o).where(o.id.in(document.id)); + + assertThat(inQuery.fetchOne()).isEqualTo(document); + } + + @Test // DATAMONGO-2327 + public void toJsonShouldRenderQuery() { + + QPerson p = QPerson.person; + SpringDataMongodbQuery query = repoSupport.from(p).where(p.lastname.eq("Matthews")) + .orderBy(p.firstname.asc()).offset(1).limit(5); + + assertThat(StringUtils.trimAllWhitespace(query.toJson())).isEqualTo("{\"lastname\":\"Matthews\"}"); + } + + @Test // DATAMONGO-2327 + public void toStringShouldRenderQuery() { + + QPerson p = QPerson.person; + User user = new User(); + user.setId("id"); + SpringDataMongodbQuery query = repoSupport.from(p) + .where(p.lastname.eq("Matthews").and(p.coworker.eq(user))); + + assertThat(StringUtils.trimAllWhitespace(query.toString())) + .isEqualTo("find({\"lastname\":\"Matthews\",\"coworker\":{\"$ref\":\"user\",\"$id\":\"id\"}})"); + + query = query.orderBy(p.firstname.asc()); + assertThat(StringUtils.trimAllWhitespace(query.toString())).isEqualTo( + "find({\"lastname\":\"Matthews\",\"coworker\":{\"$ref\":\"user\",\"$id\":\"id\"}}).sort({\"firstname\":1})"); + + query = query.offset(1).limit(5); + assertThat(StringUtils.trimAllWhitespace(query.toString())).isEqualTo( + "find({\"lastname\":\"Matthews\",\"coworker\":{\"$ref\":\"user\",\"$id\":\"id\"}}).sort({\"firstname\":1}).skip(1).limit(5)"); + } + + @Document + public static class Outer { + + @Id String id; + Inner inner; + + public String getId() { + return this.id; + } + + public Inner getInner() { + return this.inner; + } + + public void setId(String id) { + this.id = id; + } + + public void setInner(Inner inner) { + this.inner = inner; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Outer outer = (Outer) o; + return Objects.equals(id, outer.id) && Objects.equals(inner, outer.inner); + } + + @Override + public int hashCode() { + return Objects.hash(id, inner); + } + + public String toString() { + return "QuerydslRepositorySupportTests.Outer(id=" + this.getId() + ", inner=" + this.getInner() + ")"; + } + } + + public static class Inner { + + @Id String id; + String value; + + public String getId() { + return this.id; + } + + public String getValue() { + return this.value; + } + + public void setId(String id) { + this.id = id; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Inner inner = (Inner) o; + return Objects.equals(id, inner.id) && Objects.equals(value, inner.value); + } + + @Override + public int hashCode() { + return Objects.hash(id, value); + } + + public String toString() { + return "QuerydslRepositorySupportTests.Inner(id=" + this.getId() + ", value=" + this.getValue() + ")"; + } + } + + @Document + public static class WithMongoId { + + @MongoId(FieldType.STRING) String id; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + WithMongoId that = (WithMongoId) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + public String toString() { + return "QuerydslRepositorySupportTests.WithMongoId(id=" + this.getId() + ")"; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryUnitTests.java new file mode 100644 index 0000000000..57ee473e6e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryUnitTests.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.Mockito.*; + +import java.util.Set; + +import org.springframework.data.repository.reactive.ReactiveCrudRepository; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.repository.Repository; + +/** + * Unit test for {@link ReactiveMongoRepositoryFactory}. + * + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +public class ReactiveMongoRepositoryFactoryUnitTests { + + @Mock ReactiveMongoTemplate template; + + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, new MongoMappingContext()); + + @BeforeEach + public void setUp() { + when(template.getConverter()).thenReturn(converter); + } + + @Test // GH-2971 + void considersCrudMethodMetadata() { + + when(template.findOne(any(), any(), anyString())).thenReturn(Mono.empty()); + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + repository.findById(42L); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).findOne(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondary()); + } + + @Test // GH-2971 + void ignoresCrudMethodMetadataOnNonAnnotatedMethods() { + + when(template.find(any(), any(), anyString())).thenReturn(Flux.empty()); + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(template); + MyPersonRepository repository = factory.getRepository(MyPersonRepository.class); + repository.findAllById(Set.of(42L)); + + ArgumentCaptor captor = ArgumentCaptor.forClass(Query.class); + verify(template).find(captor.capture(), eq(Person.class), eq("person")); + + Query value = captor.getValue(); + assertThat(value.getReadPreference()).isNull(); + } + + interface MyPersonRepository extends ReactiveCrudRepository { + + @ReadPreference("secondary") + Mono findById(Long id); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutorTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutorTests.java new file mode 100644 index 0000000000..807b7aec22 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/ReactiveQuerydslMongoPredicateExecutorTests.java @@ -0,0 +1,467 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.repository.Address; +import org.springframework.data.mongodb.repository.Person; +import org.springframework.data.mongodb.repository.QAddress; +import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.QUser; +import org.springframework.data.mongodb.repository.User; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.data.repository.query.FluentQuery; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.MongoException; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Tests for {@link ReactiveQuerydslMongoPredicateExecutor}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Rocco Lagrotteria + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class ReactiveQuerydslMongoPredicateExecutorTests { + + @Autowired ReactiveMongoOperations operations; + @Autowired ReactiveMongoDatabaseFactory dbFactory; + + ReactiveQuerydslMongoPredicateExecutor repository; + + Person dave, oliver, carter; + QPerson person; + + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return MongoTestUtils.reactiveClient(); + } + + @Override + protected String getDatabaseName() { + return "reactive"; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(Person.class); + } + } + + @BeforeClass + public static void cleanDb() { + + try (MongoClient client = MongoTestUtils.reactiveClient()) { + MongoTestUtils.createOrReplaceCollectionNow("reactive", "person", client); + MongoTestUtils.createOrReplaceCollectionNow("reactive", "user", client); + } + } + + @Before + public void setup() { + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(operations); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new ReactiveQuerydslMongoPredicateExecutor<>(entityInformation, operations); + + dave = new Person("Dave", "Matthews", 42); + oliver = new Person("Oliver August", "Matthews", 4); + carter = new Person("Carter", "Beauford", 49); + + person = new QPerson("person"); + + Flux.merge(operations.insert(oliver), operations.insert(dave), operations.insert(carter)).then() // + .as(StepVerifier::create).verifyComplete(); + } + + @After + public void tearDown() { + operations.remove(new BasicQuery("{}"), "person").then().as(StepVerifier::create).verifyComplete(); + operations.remove(new BasicQuery("{}"), "uer").then().as(StepVerifier::create).verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void shouldSupportExistsWithPredicate() { + + repository.exists(person.firstname.eq("Dave")) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + repository.exists(person.firstname.eq("Unknown")) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void shouldSupportCountWithPredicate() { + + repository.count(person.firstname.eq("Dave")) // + .as(StepVerifier::create) // + .expectNext(1L) // + .verifyComplete(); + + repository.count(person.firstname.eq("Unknown")) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void shouldSupportFindAllWithPredicateAndSort() { + + repository.findAll(person.lastname.isNotNull(), Sort.by(Direction.ASC, "firstname")) // + .as(StepVerifier::create) // + .expectNext(carter, dave, oliver) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void findOneWithPredicateReturnsResultCorrectly() { + + repository.findOne(person.firstname.eq(dave.getFirstname())) // + .as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void findOneWithPredicateReturnsEmptyWhenNoDataFound() { + + repository.findOne(person.firstname.eq("batman")) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void findOneWithPredicateThrowsExceptionForNonUniqueResults() { + + repository.findOne(person.firstname.contains("e")) // + .as(StepVerifier::create) // + .expectError(IncorrectResultSizeDataAccessException.class) // + .verify(); + } + + @Test // DATAMONGO-2182 + public void findUsingAndShouldWork() { + + repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname()))) // + .as(StepVerifier::create) // + .expectNext(dave) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182 + public void queryShouldTerminateWithUnsupportedOperationWithJoinOnDBref() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + User user3 = new User(); + user3.setUsername("user-3"); + + Flux.merge(operations.save(user1), operations.save(user2), operations.save(user3)) // + .then() // + .as(StepVerifier::create) // + .verifyComplete(); // + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + Person person3 = new Person("Bob", "The Builder"); + person3.setCoworker(user3); + + operations.save(person1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + operations.save(person2)// + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + operations.save(person3) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Flux result = new ReactiveSpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("user-2")).fetch(); + + result.as(StepVerifier::create) // + .expectError(UnsupportedOperationException.class) // + .verify(); + } + + @Test // DATAMONGO-2182 + public void queryShouldTerminateWithUnsupportedOperationOnJoinWithNoResults() { + + User user1 = new User(); + user1.setUsername("user-1"); + + User user2 = new User(); + user2.setUsername("user-2"); + + operations.insertAll(Arrays.asList(user1, user2)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + Person person1 = new Person("Max", "The Mighty"); + person1.setCoworker(user1); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setCoworker(user2); + + operations.save(person1) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + operations.save(person2) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + Flux result = new ReactiveSpringDataMongodbQuery<>(operations, Person.class).where() + .join(person.coworker, QUser.user).on(QUser.user.username.eq("does-not-exist")).fetch(); + + result.as(StepVerifier::create) // + .expectError(UnsupportedOperationException.class) // + .verify(); + } + + @Test // DATAMONGO-2182 + public void springDataMongodbQueryShouldAllowElemMatchOnArrays() { + + Address adr1 = new Address("Hauptplatz", "4020", "Linz"); + Address adr2 = new Address("Stephansplatz", "1010", "Wien"); + Address adr3 = new Address("Tower of London", "EC3N 4AB", "London"); + + Person person1 = new Person("Max", "The Mighty"); + person1.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr1, adr2))); + + Person person2 = new Person("Jack", "The Ripper"); + person2.setShippingAddresses(new LinkedHashSet<>(Arrays.asList(adr2, adr3))); + + operations.insertAll(Arrays.asList(person1, person2)) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + + Flux result = new ReactiveSpringDataMongodbQuery<>(operations, Person.class).where() + .anyEmbedded(person.shippingAddresses, QAddress.address).on(QAddress.address.city.eq("London")).fetch(); + + result.as(StepVerifier::create) // + .expectNext(person2) // + .verifyComplete(); + } + + @Test // DATAMONGO-2182, DATAMONGO-2265 + public void translatesExceptionsCorrectly() { + + ReactiveMongoOperations ops = new ReactiveMongoTemplate(dbFactory) { + + @Override + protected Mono doGetDatabase() { + return Mono.error(new MongoException(18, "Authentication Failed")); + } + }; + + ReactiveMongoRepositoryFactory factory = new ReactiveMongoRepositoryFactory(ops); + MongoEntityInformation entityInformation = factory.getEntityInformation(Person.class); + repository = new ReactiveQuerydslMongoPredicateExecutor<>(entityInformation, ops); + + repository.findOne(person.firstname.contains("batman")) // + .as(StepVerifier::create) // + .expectError(PermissionDeniedDataAccessException.class) // + .verify(); + } + + @Test // GH-3757 + public void findByShouldReturnFirstResult() { + + repository.findBy(person.firstname.eq(oliver.getFirstname()), FluentQuery.ReactiveFluentQuery::first) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldReturnOneResult() { + + repository.findBy(person.firstname.eq(oliver.getFirstname()), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .expectNext(oliver) // + .verifyComplete(); + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::one) // + .as(StepVerifier::create) // + .verifyError(IncorrectResultSizeDataAccessException.class); + } + + @Test // GH-3757 + public void findByShouldReturnAll() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::all) // + .as(StepVerifier::create) // + .expectNextCount(2) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldApplySortAll() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), it -> it.sortBy(Sort.by("firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(dave, oliver) // + .verifyComplete(); + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.sortBy(Sort.by(Direction.DESC, "firstname")).all()) // + .as(StepVerifier::create) // + .expectNext(oliver, dave) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldApplyProjection() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), it -> it.project("firstname").first()) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getFirstname()).isNotNull(); + assertThat(it.getLastname()).isNull(); + }).verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldApplyPagination() { + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).containsOnly(dave); + }).verifyComplete(); + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.getTotalElements()).isEqualTo(2); + assertThat(it.getContent()).containsOnly(oliver); + }).verifyComplete(); + } + + @Test // GH-4889 + public void findByShouldApplySlice() { + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isTrue(); + assertThat(it.getContent()).containsOnly(dave); + }).verifyComplete(); + + repository + .findBy(person.lastname.eq(oliver.getLastname()), it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))) // + .as(StepVerifier::create) // + .assertNext(it -> { + + assertThat(it.hasNext()).isFalse(); + assertThat(it.getContent()).containsOnly(oliver); + }).verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldCount() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(2L) // + .verifyComplete(); + + repository.findBy(person.lastname.eq("foo"), FluentQuery.ReactiveFluentQuery::count) // + .as(StepVerifier::create) // + .expectNext(0L) // + .verifyComplete(); + } + + @Test // GH-3757 + public void findByShouldReportExists() { + + repository.findBy(person.lastname.eq(oliver.getLastname()), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(true) // + .verifyComplete(); + + repository.findBy(person.lastname.eq("foo"), FluentQuery.ReactiveFluentQuery::exists) // + .as(StepVerifier::create) // + .expectNext(false) // + .verifyComplete(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java index 8f2b94c3d2..60c02ee775 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,12 @@ */ package org.springframework.data.mongodb.repository.support; +import static java.util.Arrays.*; import static org.assertj.core.api.Assertions.*; import static org.springframework.data.domain.ExampleMatcher.*; +import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -27,38 +28,52 @@ import java.util.Set; import java.util.UUID; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.domain.Example; -import org.springframework.data.domain.ExampleMatcher.StringMatcher; +import org.springframework.data.domain.ExampleMatcher; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.MongoTransactionManager; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.repository.Address; import org.springframework.data.mongodb.repository.Person; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.User; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.data.mongodb.test.util.DirtiesStateExtension; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoServerCondition; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; +import org.springframework.data.repository.query.FluentQuery; import org.springframework.test.util.ReflectionTestUtils; +import org.springframework.transaction.support.TransactionTemplate; /** * @author A. B. M. Kowser * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Jens Schauder */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:infrastructure.xml") -public class SimpleMongoRepositoryTests { +@ExtendWith({ MongoTemplateExtension.class, MongoServerCondition.class, DirtiesStateExtension.class }) +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +class SimpleMongoRepositoryTests implements StateFunctions { - @Autowired private MongoTemplate template; + @Template(initialEntitySet = Person.class) // + private static MongoTestTemplate template; private Person oliver, dave, carter, boyd, stefan, leroi, alicia; private List all; @@ -66,10 +81,23 @@ public class SimpleMongoRepositoryTests { private MongoEntityInformation personEntityInformation = new CustomizedPersonInformation(); private SimpleMongoRepository repository; - @Before - public void setUp() { - repository = new SimpleMongoRepository(personEntityInformation, template); + @BeforeEach + void setUp() { + repository = new SimpleMongoRepository<>(personEntityInformation, template); + } + + @Override + public void clear() { + + if (repository == null) { + setUp(); + } + repository.deleteAll(); + } + + @Override + public void setupState() { oliver = new Person("Oliver August", "Matthews", 4); dave = new Person("Dave", "Matthews", 42); @@ -79,21 +107,22 @@ public void setUp() { leroi = new Person("Leroi", "Moore", 41); alicia = new Person("Alicia", "Keys", 30, Sex.FEMALE); - all = repository.saveAll(Arrays.asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); + all = repository.saveAll(asList(oliver, dave, carter, boyd, stefan, leroi, alicia)); } @Test - public void findALlFromCustomCollectionName() { - assertThat(repository.findAll()).hasSize(all.size()); + void findAllFromCustomCollectionName() { + assertThat(repository.findAll()).hasSameSizeAs(all); } @Test - public void findOneFromCustomCollectionName() { - assertThat(repository.findById(dave.getId()).get()).isEqualTo(dave); + void findOneFromCustomCollectionName() { + assertThat(repository.findById(dave.getId())).contains(dave); } @Test - public void deleteFromCustomCollectionName() { + @DirtiesState + void deleteFromCustomCollectionName() { repository.delete(dave); @@ -101,7 +130,8 @@ public void deleteFromCustomCollectionName() { } @Test - public void deleteByIdFromCustomCollectionName() { + @DirtiesState + void deleteByIdFromCustomCollectionName() { repository.deleteById(dave.getId()); @@ -109,7 +139,8 @@ public void deleteByIdFromCustomCollectionName() { } @Test // DATAMONGO-1054 - public void shouldInsertSingle() { + @DirtiesState + void shouldInsertSingle() { String randomId = UUID.randomUUID().toString(); @@ -120,7 +151,8 @@ public void shouldInsertSingle() { } @Test // DATAMONGO-1054 - public void shouldInsertMultipleFromList() { + @DirtiesState + void shouldInsertMultipleFromList() { String randomId = UUID.randomUUID().toString(); Map idToPerson = new HashMap(); @@ -134,12 +166,13 @@ public void shouldInsertMultipleFromList() { List saved = repository.insert(persons); - assertThat(saved).hasSize(persons.size()); + assertThat(saved).hasSameSizeAs(persons); assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved); } @Test // DATAMONGO-1054 - public void shouldInsertMutlipleFromSet() { + @DirtiesState + void shouldInsertMutlipleFromSet() { String randomId = UUID.randomUUID().toString(); Map idToPerson = new HashMap(); @@ -153,12 +186,12 @@ public void shouldInsertMutlipleFromSet() { List saved = repository.insert(persons); - assertThat(saved).hasSize(persons.size()); + assertThat(saved).hasSameSizeAs(persons); assertThatAllReferencePersonsWereStoredCorrectly(idToPerson, saved); } @Test // DATAMONGO-1245, DATAMONGO-1464 - public void findByExampleShouldLookUpEntriesCorrectly() { + void findByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -170,8 +203,21 @@ public void findByExampleShouldLookUpEntriesCorrectly() { assertThat(result.getTotalPages()).isEqualTo(1); } + @Test // GH-3751 + void findByExampleShouldReturnUnpagedResults() { + + Person sample = new Person(); + sample.setLastname("Matthews"); + trimDomainType(sample, "id", "createdAt", "email"); + + Page result = repository.findAll(Example.of(sample), Pageable.unpaged()); + + assertThat(result.getContent()).hasSize(2).contains(dave, oliver); + assertThat(result.getTotalPages()).isEqualTo(1); + } + @Test // DATAMONGO-1464 - public void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() { + void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -184,7 +230,7 @@ public void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() { } @Test // DATAMONGO-1245 - public void findAllByExampleShouldLookUpEntriesCorrectly() { + void findAllByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -194,7 +240,8 @@ public void findAllByExampleShouldLookUpEntriesCorrectly() { } @Test // DATAMONGO-1245 - public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() { + @DirtiesState + void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -210,7 +257,8 @@ public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObject() } @Test // DATAMONGO-1245 - public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedObject() { + @DirtiesState + void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedObject() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -226,7 +274,8 @@ public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingPartialNestedOb } @Test // DATAMONGO-1245 - public void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInStrictMode() { + @DirtiesState + void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInStrictMode() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -241,7 +290,8 @@ public void findAllByExampleShouldNotFindEntriesWhenUsingPartialNestedObjectInSt } @Test // DATAMONGO-1245 - public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInStrictMode() { + @DirtiesState + void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInStrictMode() { dave.setAddress(new Address("1600 Pennsylvania Ave NW", "20500", "Washington")); repository.save(dave); @@ -256,7 +306,7 @@ public void findAllByExampleShouldLookUpEntriesCorrectlyWhenUsingNestedObjectInS } @Test // DATAMONGO-1245 - public void findAllByExampleShouldRespectStringMatchMode() { + void findAllByExampleShouldRespectStringMatchMode() { Person sample = new Person(); sample.setLastname("Mat"); @@ -268,7 +318,8 @@ public void findAllByExampleShouldRespectStringMatchMode() { } @Test // DATAMONGO-1245 - public void findAllByExampleShouldResolveDbRefCorrectly() { + @DirtiesState + void findAllByExampleShouldResolveDbRefCorrectly() { User user = new User(); user.setId("c0nf1ux"); @@ -288,7 +339,8 @@ public void findAllByExampleShouldResolveDbRefCorrectly() { } @Test // DATAMONGO-1245 - public void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() { + @DirtiesState + void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() { Person megan = new Person("megan", "tarash"); megan.setLocation(new Point(41.85003D, -87.65005D)); @@ -303,7 +355,8 @@ public void findAllByExampleShouldResolveLegacyCoordinatesCorrectly() { } @Test // DATAMONGO-1245 - public void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() { + @DirtiesState + void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() { Person megan = new Person("megan", "tarash"); megan.setLocation(new GeoJsonPoint(41.85003D, -87.65005D)); @@ -318,7 +371,8 @@ public void findAllByExampleShouldResolveGeoJsonCoordinatesCorrectly() { } @Test // DATAMONGO-1245 - public void findAllByExampleShouldProcessInheritanceCorrectly() { + @DirtiesState + void findAllByExampleShouldProcessInheritanceCorrectly() { PersonExtended reference = new PersonExtended(); reference.setLastname("Matthews"); @@ -334,7 +388,7 @@ public void findAllByExampleShouldProcessInheritanceCorrectly() { } @Test // DATAMONGO-1245 - public void findOneByExampleShouldLookUpEntriesCorrectly() { + void findOneByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setFirstname("Dave"); @@ -345,7 +399,7 @@ public void findOneByExampleShouldLookUpEntriesCorrectly() { } @Test // DATAMONGO-1245 - public void existsByExampleShouldLookUpEntriesCorrectly() { + void existsByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setFirstname("Dave"); @@ -356,7 +410,7 @@ public void existsByExampleShouldLookUpEntriesCorrectly() { } @Test // DATAMONGO-1245 - public void countByExampleShouldLookUpEntriesCorrectly() { + void countByExampleShouldLookUpEntriesCorrectly() { Person sample = new Person(); sample.setLastname("Matthews"); @@ -366,7 +420,8 @@ public void countByExampleShouldLookUpEntriesCorrectly() { } @Test // DATAMONGO-1896 - public void saveAllUsesEntityCollection() { + @DirtiesState + void saveAllUsesEntityCollection() { Person first = new PersonExtended(); first.setEmail("foo@bar.com"); @@ -378,11 +433,207 @@ public void saveAllUsesEntityCollection() { repository.deleteAll(); - repository.saveAll(Arrays.asList(first, second)); + repository.saveAll(asList(first, second)); assertThat(repository.findAll()).containsExactlyInAnyOrder(first, second); } + @Test // DATAMONGO-2130 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @DirtiesState + void countShouldBePossibleInTransaction() { + + MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionTemplate tt = new TransactionTemplate(txmgr); + tt.afterPropertiesSet(); + + long countPreTx = repository.count(); + + long count = tt.execute(status -> { + + Person sample = new Person(); + sample.setLastname("Matthews"); + + repository.save(sample); + + return repository.count(); + }); + + assertThat(count).isEqualTo(countPreTx + 1); + } + + @Test // DATAMONGO-2130 + @EnableIfReplicaSetAvailable + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + @DirtiesState + void existsShouldBePossibleInTransaction() { + + MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionTemplate tt = new TransactionTemplate(txmgr); + tt.afterPropertiesSet(); + + boolean exists = tt.execute(status -> { + + Person sample = new Person(); + sample.setLastname("Matthews"); + + repository.save(sample); + + return repository.existsById(sample.getId()); + }); + + assertThat(exists).isTrue(); + } + + @Test // DATAMONGO-2652 + @DirtiesState + void deleteAllByIds() { + + repository.deleteAllById(asList(dave.getId(), carter.getId())); + + assertThat(repository.findAll()) // + .hasSize(all.size() - 2).doesNotContain(dave, carter); + } + + @Test // GH-3757 + void findByShouldReturnFirstResult() { + + Person probe = new Person(); + probe.setFirstname(oliver.getFirstname()); + + Person result = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::firstValue); + + assertThat(result).isEqualTo(oliver); + } + + @Test // GH-3757 + void findByShouldReturnOneResult() { + + Person probe = new Person(); + probe.setFirstname(oliver.getFirstname()); + + Person result = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::oneValue); + + assertThat(result).isEqualTo(oliver); + + Person probeByLastname = new Person(); + probeByLastname.setLastname(oliver.getLastname()); + + assertThatExceptionOfType(IncorrectResultSizeDataAccessException.class).isThrownBy( + () -> repository.findBy(Example.of(probeByLastname, getMatcher()), FluentQuery.FetchableFluentQuery::one)); + } + + @Test // GH-3757 + void findByShouldReturnAll() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + List result = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::all); + + assertThat(result).hasSize(2); + } + + @Test // GH-3757 + void findByShouldApplySortAll() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + List result = repository.findBy(Example.of(probe, getMatcher()), + it -> it.sortBy(Sort.by("firstname")).all()); + assertThat(result).containsSequence(dave, oliver); + + result = repository.findBy(Example.of(probe, getMatcher()), + it -> it.sortBy(Sort.by(Sort.Direction.DESC, "firstname")).all()); + assertThat(result).containsSequence(oliver, dave); + } + + @Test // GH-3757 + void findByShouldApplyProjection() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Person result = repository.findBy(Example.of(probe, getMatcher()), it -> it.project("firstname").firstValue()); + + assertThat(result.getFirstname()).isNotNull(); + assertThat(result.getLastname()).isNull(); + } + + @Test // GH-3757 + void findByShouldApplyPagination() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Page first = repository.findBy(Example.of(probe, getMatcher()), + it -> it.page(PageRequest.of(0, 1, Sort.by("firstname")))); + assertThat(first.getTotalElements()).isEqualTo(2); + assertThat(first.getContent()).contains(dave); + + Page next = repository.findBy(Example.of(probe, getMatcher()), + it -> it.page(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.getTotalElements()).isEqualTo(2); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-4889 + void findByShouldApplySlice() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + Slice first = repository.findBy(Example.of(probe, getMatcher()), + it -> it.slice(PageRequest.of(0, 1, Sort.by("firstname")))); + assertThat(first.hasNext()).isTrue(); + assertThat(first.getContent()).contains(dave); + + Slice next = repository.findBy(Example.of(probe, getMatcher()), + it -> it.slice(PageRequest.of(1, 1, Sort.by("firstname")))); + + assertThat(next.hasNext()).isFalse(); + assertThat(next.getContent()).contains(oliver); + } + + @Test // GH-3757 + void findByShouldCount() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + long count = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(2L); + + probe = new Person(); + probe.setLastname("foo"); + + count = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::count); + assertThat(count).isEqualTo(0L); + } + + @Test // GH-3757 + void findByShouldReportExists() { + + Person probe = new Person(); + probe.setLastname(oliver.getLastname()); + + boolean exists = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isTrue(); + + probe = new Person(); + probe.setLastname("foo"); + + exists = repository.findBy(Example.of(probe, getMatcher()), FluentQuery.FetchableFluentQuery::exists); + assertThat(exists).isFalse(); + } + + private ExampleMatcher getMatcher() { + return matching().withIgnorePaths("age", "createdAt", "sex", "email", "id"); + } + private void assertThatAllReferencePersonsWereStoredCorrectly(Map references, List saved) { for (Person person : saved) { @@ -429,6 +680,11 @@ public String getCollectionName() { public String getIdAttribute() { return "id"; } + + @Override + public Collation getCollation() { + return null; + } } @Document diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryUnitTests.java new file mode 100644 index 0000000000..f784aea6e8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryUnitTests.java @@ -0,0 +1,242 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import java.util.function.Consumer; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.repository.support.CrudMethodMetadataPostProcessor.DefaultCrudMethodMetadata; +import org.springframework.data.repository.query.FluentQuery.FetchableFluentQuery; + +/** + * Unit tests for {@link SimpleMongoRepository}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +public class SimpleMongoRepositoryUnitTests { + + SimpleMongoRepository repository; + @Mock MongoOperations mongoOperations; + @Mock MongoEntityInformation entityInformation; + + @BeforeEach + public void setUp() { + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToCountForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.count(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).count(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToExistsForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.exists(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).exists(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindWithSortForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy()), Sort.by("nothing")); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindWithPageableForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy()), PageRequest.of(1, 1, Sort.by("nothing"))); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + public void shouldAddDefaultCollationToFindOneForExampleIfPresent() { + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findOne(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).findOne(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @ParameterizedTest // GH-2971 + @MethodSource("findAllCalls") + void shouldAddReadPreferenceToFindAllMethods(Consumer> findCall) + throws NoSuchMethodException { + + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata( + new DefaultCrudMethodMetadata(TestRepositoryWithReadPreference.class, TestRepositoryWithReadPreference.class.getMethod("dummy"))); + + findCall.accept(repository); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFindOne() throws NoSuchMethodException { + + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata( + new DefaultCrudMethodMetadata(TestRepositoryWithReadPreference.class, TestRepositoryWithReadPreference.class.getMethod("dummy"))); + + repository.findOne(Example.of(new TestDummy())); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).findOne(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFluentFetchable() throws NoSuchMethodException { + + ExecutableFind finder = mock(ExecutableFind.class); + when(mongoOperations.query(any())).thenReturn(finder); + when(finder.inCollection(any())).thenReturn(finder); + when(finder.matching(any(Query.class))).thenReturn(finder); + when(finder.as(any())).thenReturn(finder); + + repository = new SimpleMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata( + new DefaultCrudMethodMetadata(TestRepositoryWithReadPreferenceMethod.class, TestRepositoryWithReadPreferenceMethod.class.getMethod("dummy"))); + + repository.findBy(Example.of(new TestDummy()), FetchableFluentQuery::all); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(finder).matching(query.capture()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + private static Stream findAllCalls() { + + Consumer> findAll = SimpleMongoRepository::findAll; + Consumer> findAllWithSort = repo -> repo.findAll(Sort.by("age")); + Consumer> findAllWithPage = repo -> repo + .findAll(PageRequest.of(1, 20, Sort.by("age"))); + Consumer> findAllWithExample = repo -> repo + .findAll(Example.of(new TestDummy())); + Consumer> findAllWithExampleAndSort = repo -> repo + .findAll(Example.of(new TestDummy()), Sort.by("age")); + Consumer> findAllWithExampleAndPage = repo -> repo + .findAll(Example.of(new TestDummy()), PageRequest.of(1, 20, Sort.by("age"))); + + return Stream.of(Arguments.of(findAll), // + Arguments.of(findAllWithSort), // + Arguments.of(findAllWithPage), // + Arguments.of(findAllWithExample), // + Arguments.of(findAllWithExampleAndSort), // + Arguments.of(findAllWithExampleAndPage)); + } + + static class TestDummy { + + } + + interface TestRepository { + + } + + @ReadPreference("secondaryPreferred") + interface TestRepositoryWithReadPreference { + + void dummy(); + } + + interface TestRepositoryWithReadPreferenceMethod { + + @ReadPreference("secondaryPreferred") + void dummy(); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryVersionedEntityTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryVersionedEntityTests.java new file mode 100644 index 0000000000..ad53592b76 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryVersionedEntityTests.java @@ -0,0 +1,182 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assumptions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.mongodb.MongoTransactionManager; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.VersionedPerson; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientClosingTestConfiguration; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReplicaSet; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.transaction.support.TransactionTemplate; + +import com.mongodb.client.MongoClient; + +/** + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(SpringExtension.class) +@ContextConfiguration +public class SimpleMongoRepositoryVersionedEntityTests { + + @Configuration + static class Config extends MongoClientClosingTestConfiguration { + + @Override + public MongoClient mongoClient() { + return MongoTestUtils.client(); + } + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + protected Set> getInitialEntitySet() throws ClassNotFoundException { + return new HashSet<>(Arrays.asList(VersionedPerson.class)); + } + } + + @Autowired private MongoTemplate template; + + private MongoEntityInformation personEntityInformation; + private SimpleMongoRepository repository; + + private VersionedPerson sarah; + + @BeforeEach + public void setUp() { + + MongoPersistentEntity entity = template.getConverter().getMappingContext() + .getRequiredPersistentEntity(VersionedPerson.class); + + personEntityInformation = new MappingMongoEntityInformation(entity); + repository = new SimpleMongoRepository<>(personEntityInformation, template); + repository.deleteAll(); + + sarah = repository.save(new VersionedPerson("Sarah", "Connor")); + } + + @Test // DATAMONGO-2195 + public void deleteWithMatchingVersion() { + + repository.delete(sarah); + + assertThat(template.count(query(where("id").is(sarah.getId())), VersionedPerson.class)).isZero(); + } + + @Test // DATAMONGO-2195 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void deleteWithMatchingVersionInTx() { + + assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + + long countBefore = repository.count(); + + initTxTemplate().execute(status -> { + + VersionedPerson t800 = repository.save(new VersionedPerson("T-800")); + repository.delete(t800); + + return Void.TYPE; + }); + + assertThat(repository.count()).isEqualTo(countBefore); + } + + @Test // DATAMONGO-2195 + public void deleteWithVersionMismatch() { + + sarah.setVersion(5L); + + assertThatExceptionOfType(OptimisticLockingFailureException.class).isThrownBy(() -> repository.delete(sarah)); + + assertThat(template.count(query(where("id").is(sarah.getId())), VersionedPerson.class)).isOne(); + } + + @Test // DATAMONGO-2195 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void deleteWithVersionMismatchInTx() { + + assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + + long countBefore = repository.count(); + + assertThatExceptionOfType(OptimisticLockingFailureException.class) + .isThrownBy(() -> initTxTemplate().execute(status -> { + + VersionedPerson t800 = repository.save(new VersionedPerson("T-800")); + t800.setVersion(5L); + repository.delete(t800); + + return Void.TYPE; + })); + + assertThat(repository.count()).isEqualTo(countBefore); + } + + @Test // DATAMONGO-2195 + public void deleteNonExisting() { + assertThatThrownBy(() -> repository.delete(new VersionedPerson("T-800"))) + .isInstanceOf(OptimisticLockingFailureException.class); + } + + @Test // DATAMONGO-2195 + @EnableIfMongoServerVersion(isGreaterThanEqual = "4.0") + public void deleteNonExistingInTx() { + + assumeThat(ReplicaSet.required().runsAsReplicaSet()).isTrue(); + + initTxTemplate().execute(status -> { + + assertThatThrownBy(() -> repository.delete(new VersionedPerson("T-800"))) + .isInstanceOf(OptimisticLockingFailureException.class); + + return Void.TYPE; + }); + } + + TransactionTemplate initTxTemplate() { + + MongoTransactionManager txmgr = new MongoTransactionManager(template.getMongoDatabaseFactory()); + TransactionTemplate tt = new TransactionTemplate(txmgr); + tt.afterPropertiesSet(); + + return tt; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryUnitTests.java new file mode 100644 index 0000000000..0b172de2cd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryUnitTests.java @@ -0,0 +1,246 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.assertj.core.api.Assertions.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.lang.reflect.Method; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.domain.Example; +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.ReactiveFindOperation.ReactiveFind; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.repository.ReadPreference; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.repository.query.FluentQuery; + +/** + * Unit tests for {@link SimpleReactiveMongoRepository}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +@ExtendWith(MockitoExtension.class) +class SimpleReactiveMongoRepositoryUnitTests { + + private SimpleReactiveMongoRepository repository; + @Mock Mono mono; + @Mock Flux flux; + @Mock ReactiveMongoOperations mongoOperations; + @Mock MongoEntityInformation entityInformation; + + @BeforeEach + void setUp() { + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToCountForExampleIfPresent() { + + when(mongoOperations.count(any(), any(), any())).thenReturn(mono); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.count(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).count(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToExistsForExampleIfPresent() { + + when(mongoOperations.exists(any(), any(), any())).thenReturn(mono); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.exists(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).exists(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToFindForExampleIfPresent() { + + when(mongoOperations.find(any(), any(), any())).thenReturn(flux); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToFindWithSortForExampleIfPresent() { + + when(mongoOperations.find(any(), any(), any())).thenReturn(flux); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findAll(Example.of(new TestDummy()), Sort.by("nothing")).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @Test // DATAMONGO-1854 + void shouldAddDefaultCollationToFindOneForExampleIfPresent() { + + when(entityInformation.getCollectionName()).thenReturn("testdummy"); + doReturn(flux).when(mongoOperations).find(any(Query.class), eq(TestDummy.class), eq("testdummy")); + when(flux.buffer(anyInt())).thenReturn(flux); + when(flux.map(any())).thenReturn(flux); + when(flux.next()).thenReturn(mono); + + Collation collation = Collation.of("en_US"); + + when(entityInformation.getCollation()).thenReturn(collation); + repository.findOne(Example.of(new TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getCollation()).contains(collation); + } + + @ParameterizedTest // GH-2971 + @MethodSource("findAllCalls") + void shouldAddReadPreferenceToFindAllMethods( + Function, Flux> findCall) { + + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata(new CrudMethodMetadata() { + @Override + public Optional getReadPreference() { + return Optional.of(com.mongodb.ReadPreference.secondaryPreferred()); + } + }); + when(mongoOperations.find(any(), any(), any())).thenReturn(Flux.just("ok")); + + findCall.apply(repository).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFindOne() { + + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata(new CrudMethodMetadata() { + @Override + public Optional getReadPreference() { + return Optional.of(com.mongodb.ReadPreference.secondaryPreferred()); + } + }); + when(mongoOperations.find(any(), any(), any())).thenReturn(Flux.just("ok")); + + repository.findOne(Example.of(new SimpleMongoRepositoryUnitTests.TestDummy())).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(mongoOperations).find(query.capture(), any(), any()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + @Test // GH-2971 + void shouldAddReadPreferenceToFluentFetchable() { + + ReactiveFind finder = mock(ReactiveFind.class); + when(mongoOperations.query(any())).thenReturn(finder); + when(finder.inCollection(any())).thenReturn(finder); + when(finder.matching(any(Query.class))).thenReturn(finder); + when(finder.as(any())).thenReturn(finder); + when(finder.all()).thenReturn(Flux.just("ok")); + + repository = new SimpleReactiveMongoRepository<>(entityInformation, mongoOperations); + repository.setRepositoryMethodMetadata(new CrudMethodMetadata() { + @Override + public Optional getReadPreference() { + return Optional.of(com.mongodb.ReadPreference.secondaryPreferred()); + } + }); + + repository.findBy(Example.of(new TestDummy()), FluentQuery.ReactiveFluentQuery::all).subscribe(); + + ArgumentCaptor query = ArgumentCaptor.forClass(Query.class); + verify(finder).matching(query.capture()); + + assertThat(query.getValue().getReadPreference()).isEqualTo(com.mongodb.ReadPreference.secondaryPreferred()); + } + + private static Stream findAllCalls() { + + Function, Flux> findAll = SimpleReactiveMongoRepository::findAll; + Function, Flux> findAllWithSort = repo -> repo + .findAll(Sort.by("age")); + Function, Flux> findAllWithExample = repo -> repo + .findAll(Example.of(new TestDummy())); + Function, Flux> findAllWithExampleAndSort = repo -> repo + .findAll(Example.of(new TestDummy()), Sort.by("age")); + + return Stream.of(Arguments.of(findAll), // + Arguments.of(findAllWithSort), // + Arguments.of(findAllWithExample), // + Arguments.of(findAllWithExampleAndSort)); + } + + private static class TestDummy { + + } + + @ReadPreference("secondaryPreferred") + interface TestRepositoryWithReadPreference { + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryVersionedEntityTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryVersionedEntityTests.java new file mode 100644 index 0000000000..10f5f334a8 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleReactiveMongoRepositoryVersionedEntityTests.java @@ -0,0 +1,120 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import reactor.test.StepVerifier; + +import java.util.Collections; +import java.util.Set; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.dao.OptimisticLockingFailureException; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.repository.VersionedPerson; +import org.springframework.data.mongodb.repository.query.MongoEntityInformation; +import org.springframework.data.mongodb.test.util.MongoTestUtils; +import org.springframework.data.mongodb.test.util.ReactiveMongoClientClosingTestConfiguration; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Mark Paluch + */ +@RunWith(SpringRunner.class) +@ContextConfiguration +public class SimpleReactiveMongoRepositoryVersionedEntityTests { + + @Configuration + static class Config extends ReactiveMongoClientClosingTestConfiguration { + + @Override + public MongoClient reactiveMongoClient() { + return MongoTestUtils.reactiveClient(); + } + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(VersionedPerson.class); + } + } + + @Autowired // + private ReactiveMongoTemplate template; + + private MongoEntityInformation personEntityInformation; + private SimpleReactiveMongoRepository repository; + + private VersionedPerson sarah; + + @Before + public void setUp() { + + MongoPersistentEntity entity = template.getConverter().getMappingContext() + .getRequiredPersistentEntity(VersionedPerson.class); + + personEntityInformation = new MappingMongoEntityInformation(entity); + repository = new SimpleReactiveMongoRepository<>(personEntityInformation, template); + repository.deleteAll().as(StepVerifier::create).verifyComplete(); + + sarah = repository.save(new VersionedPerson("Sarah", "Connor")).block(); + } + + @Test // DATAMONGO-2195 + public void deleteWithMatchingVersion() { + + repository.delete(sarah).as(StepVerifier::create).verifyComplete(); + + template.count(query(where("id").is(sarah.getId())), VersionedPerson.class) // + .as(StepVerifier::create) // + .expectNext(0L).verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteWithVersionMismatch() { + + sarah.setVersion(5L); + + repository.delete(sarah).as(StepVerifier::create).verifyError(OptimisticLockingFailureException.class); + + template.count(query(where("id").is(sarah.getId())), VersionedPerson.class) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + } + + @Test // DATAMONGO-2195 + public void deleteNonExisting() { + + repository.delete(new VersionedPerson("T-800")).as(StepVerifier::create) + .verifyError(OptimisticLockingFailureException.class); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java index 06ade013a1..56e17b7590 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializerUnitTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,41 @@ */ package org.springframework.data.mongodb.repository.support; -import static org.hamcrest.Matchers.*; -import static org.junit.Assert.*; -import static org.springframework.data.mongodb.core.DocumentTestUtils.*; +import static com.querydsl.core.types.ExpressionUtils.path; +import static com.querydsl.core.types.ExpressionUtils.predicate; +import static com.querydsl.core.types.dsl.Expressions.*; +import static org.assertj.core.api.Assertions.*; +import java.util.Arrays; import java.util.Collections; -import java.util.List; import org.bson.Document; import org.bson.types.ObjectId; -import org.hamcrest.collection.IsIterableContainingInOrder; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.QAddress; import org.springframework.data.mongodb.repository.QPerson; +import org.springframework.data.mongodb.repository.User; -import com.mongodb.BasicDBList; -import com.mongodb.BasicDBObject; -import com.mongodb.DBObject; +import com.querydsl.core.types.Ops; +import com.querydsl.core.types.Predicate; +import com.querydsl.core.types.PredicateOperation; +import com.querydsl.core.types.dsl.BooleanExpression; import com.querydsl.core.types.dsl.BooleanOperation; import com.querydsl.core.types.dsl.PathBuilder; import com.querydsl.core.types.dsl.SimplePath; @@ -56,16 +61,18 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Mikhail Kaduchka + * @author Enrique Leon Molina */ -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class SpringDataMongodbSerializerUnitTests { @Mock DbRefResolver dbFactory; - MongoConverter converter; - SpringDataMongodbSerializer serializer; + private MongoConverter converter; + private SpringDataMongodbSerializer serializer; - @Before - public void setUp() { + @BeforeEach + void setUp() { MongoMappingContext context = new MongoMappingContext(); @@ -74,114 +81,207 @@ public void setUp() { } @Test - public void uses_idAsKeyForIdProperty() { + void uses_idAsKeyForIdProperty() { StringPath path = QPerson.person.id; - assertThat(serializer.getKeyForPath(path, path.getMetadata()), is("_id")); + assertThat(serializer.getKeyForPath(path, path.getMetadata())).isEqualTo("_id"); } @Test - public void buildsNestedKeyCorrectly() { + void buildsNestedKeyCorrectly() { StringPath path = QPerson.person.address.street; - assertThat(serializer.getKeyForPath(path, path.getMetadata()), is("street")); + assertThat(serializer.getKeyForPath(path, path.getMetadata())).isEqualTo("street"); } @Test - public void convertsComplexObjectOnSerializing() { + void convertsComplexObjectOnSerializing() { Address address = new Address(); address.street = "Foo"; address.zipCode = "01234"; - DBObject result = serializer.asDBObject("foo", address); - assertThat(result, is(instanceOf(BasicDBObject.class))); - BasicDBObject document = (BasicDBObject) result; + Document document = serializer.asDocument("foo", address); Object value = document.get("foo"); - assertThat(value, is(notNullValue())); - assertThat(value, is(instanceOf(Document.class))); + assertThat(value).isNotNull().isInstanceOf(Document.class); Object reference = converter.convertToMongoType(address); - assertThat(value, is(reference)); + assertThat(value).isEqualTo(reference); } @Test // DATAMONGO-376 - public void returnsEmptyStringIfNoPathExpressionIsGiven() { + void returnsEmptyStringIfNoPathExpressionIsGiven() { QAddress address = QPerson.person.shippingAddresses.any(); - assertThat(serializer.getKeyForPath(address, address.getMetadata()), is("")); + assertThat(serializer.getKeyForPath(address, address.getMetadata())).isEmpty(); } - @Test // DATAMONGO-467 - public void convertsIdPropertyCorrectly() { + @Test // DATAMONGO-467, DATAMONGO-1798 + void appliesImplicitIdConversion() { ObjectId id = new ObjectId(); PathBuilder
          builder = new PathBuilder
          (Address.class, "address"); StringPath idPath = builder.getString("id"); - DBObject result = (DBObject) serializer.visit((BooleanOperation) idPath.eq(id.toString()), (Void) null); - assertThat(result.get("_id"), is(notNullValue())); - assertThat(result.get("_id"), is(instanceOf(ObjectId.class))); - assertThat(result.get("_id"), is((Object) id)); + Document result = (Document) serializer.visit((BooleanOperation) idPath.eq(id.toString()), null); + assertThat(result.get("_id")).isNotNull().isInstanceOf(ObjectId.class); } @Test // DATAMONGO-761 - public void looksUpKeyForNonPropertyPath() { + void looksUpKeyForNonPropertyPath() { PathBuilder
          builder = new PathBuilder
          (Address.class, "address"); SimplePath firstElementPath = builder.getArray("foo", String[].class).get(0); String path = serializer.getKeyForPath(firstElementPath, firstElementPath.getMetadata()); - assertThat(path, is("0")); + assertThat(path).isEqualTo("0"); } - @Test // DATAMONGO-969 - public void shouldConvertObjectIdEvenWhenNestedInOperatorDbObject() { + @Test // DATAMONGO-1485 + void takesCustomConversionForEnumsIntoAccount() { + + MongoMappingContext context = new MongoMappingContext(); + + MappingMongoConverter converter = new MappingMongoConverter(dbFactory, context); + converter.setCustomConversions(new MongoCustomConversions(Collections.singletonList(new SexTypeWriteConverter()))); + converter.afterPropertiesSet(); + + this.converter = converter; + this.serializer = new SpringDataMongodbSerializer(this.converter); - ObjectId value = new ObjectId("53bb9fd14438765b29c2d56e"); - DBObject serialized = serializer.asDBObject("_id", new Document("$ne", value.toString())); + Object mappedPredicate = serializer.handle(QPerson.person.sex.eq(Sex.FEMALE)); - DBObject _id = getTypedValue(new Document(serialized.toMap()), "_id", DBObject.class); - ObjectId $ne = getTypedValue(new Document(_id.toMap()), "$ne", ObjectId.class); - assertThat($ne, is(value)); + assertThat(mappedPredicate).isInstanceOf(Document.class); + assertThat(((Document) mappedPredicate).get("sex")).isEqualTo("f"); } - @Test // DATAMONGO-969 - public void shouldConvertCollectionOfObjectIdEvenWhenNestedInOperatorDocument() { + @Test // DATAMONGO-1848, DATAMONGO-1943 + void shouldRemarshallListsAndDocuments() { - ObjectId firstId = new ObjectId("53bb9fd14438765b29c2d56e"); - ObjectId secondId = new ObjectId("53bb9fda4438765b29c2d56f"); + BooleanExpression criteria = QPerson.person.lastname.isNotEmpty() + .and(QPerson.person.firstname.containsIgnoreCase("foo")).not(); - BasicDBList objectIds = new BasicDBList(); - objectIds.add(firstId.toString()); - objectIds.add(secondId.toString()); + assertThat(serializer.handle(criteria)).isEqualTo(Document.parse("{ \"$or\" : [ { \"lastname\" : { \"$not\" : { " + + "\"$ne\" : \"\"}}} , { \"firstname\" : { \"$not\" : { \"$regex\" : \".*\\\\Qfoo\\\\E.*\" , \"$options\" : \"i\"}}}]}")); + } - DBObject serialized = serializer.asDBObject("_id", new Document("$in", objectIds)); + @Test // DATAMONGO-2228 + void retainsOpsInAndExpression() { - DBObject _id = getTypedValue(new Document(serialized.toMap()), "_id", DBObject.class); - List $in = getTypedValue(new Document(_id.toMap()), "$in", List.class); + PredicateOperation testExpression = predicate(Ops.AND, + predicate(Ops.OR, predicate(Ops.EQ, path(Object.class, "firstname"), constant("John")), + predicate(Ops.EQ, path(Object.class, "firstname"), constant("Sarah"))), + predicate(Ops.OR, predicate(Ops.EQ, path(Object.class, "lastname"), constant("Smith")), + predicate(Ops.EQ, path(Object.class, "lastname"), constant("Connor")))); - assertThat($in, IsIterableContainingInOrder. contains(firstId, secondId)); + assertThat(serializer.handle(testExpression)).isEqualTo(Document.parse( + "{\"$and\": [{\"$or\": [{\"firstname\": \"John\"}, {\"firstname\": \"Sarah\"}]}, {\"$or\": [{\"lastname\": \"Smith\"}, {\"lastname\": \"Connor\"}]}]}")); } - @Test // DATAMONGO-1485 - public void takesCustomConversionForEnumsIntoAccount() { + @Test // DATAMONGO-2475 + void chainedOrsInSameDocument() { - MongoMappingContext context = new MongoMappingContext(); + Predicate predicate = QPerson.person.firstname.eq("firstname_value") + .or(QPerson.person.lastname.eq("lastname_value")).or(QPerson.person.age.goe(30)).or(QPerson.person.age.loe(20)) + .or(QPerson.person.uniqueId.isNull()); - MappingMongoConverter converter = new MappingMongoConverter(dbFactory, context); - converter.setCustomConversions(new MongoCustomConversions(Collections.singletonList(new SexTypeWriteConverter()))); - converter.afterPropertiesSet(); + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse( + "{\"$or\": [{\"firstname\": \"firstname_value\"}, {\"lastname\": \"lastname_value\"}, {\"age\": {\"$gte\": 30}}, {\"age\": {\"$lte\": 20}}, {\"uniqueId\": {\"$exists\": false}}]}")); + } - this.converter = converter; - this.serializer = new SpringDataMongodbSerializer(this.converter); + @Test // DATAMONGO-2475 + void chainedNestedOrsInSameDocument() { + + Predicate predicate = QPerson.person.firstname.eq("firstname_value") + .or(QPerson.person.lastname.eq("lastname_value")).or(QPerson.person.address.street.eq("spring")); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse( + "{\"$or\": [{\"firstname\": \"firstname_value\"}, {\"lastname\": \"lastname_value\"}, {\"add.street\": \"spring\"}]}")); + } + + @Test // DATAMONGO-2475 + void chainedAndsInSameDocument() { + + Predicate predicate = QPerson.person.firstname.eq("firstname_value") + .and(QPerson.person.lastname.eq("lastname_value")).and(QPerson.person.age.goe(30)) + .and(QPerson.person.age.loe(20)).and(QPerson.person.uniqueId.isNull()); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse( + "{\"$and\": [{\"firstname\": \"firstname_value\", \"lastname\": \"lastname_value\", \"age\": {\"$gte\": 30}, \"uniqueId\": {\"$exists\": false}}, {\"age\": {\"$lte\": 20}}]}")); + } - Object mappedPredicate = this.serializer.handle(QPerson.person.sex.eq(Sex.FEMALE)); + @Test // DATAMONGO-2475 + void chainMultipleAndFlattensCorrectly() { + + Document p1doc = Document.parse("{ \"$or\" : [ { \"firstname\" : \"fn\"}, { \"lastname\" : \"ln\" } ] }"); + Document p2doc = Document + .parse("{ \"$or\" : [ { \"age\" : { \"$gte\" : 20 } }, { \"age\" : { \"$lte\" : 30} } ] }"); + Document p3doc = Document.parse("{ \"$or\" : [ { \"add.city\" : \"c\"}, { \"add.zipCode\" : \"0\" } ] }"); + Document expected = new Document("$and", Arrays.asList(p1doc, p2doc, p3doc)); + + Predicate predicate1 = QPerson.person.firstname.eq("fn").or(QPerson.person.lastname.eq("ln")); + Predicate predicate2 = QPerson.person.age.goe(20).or(QPerson.person.age.loe(30)); + Predicate predicate3 = QPerson.person.address.city.eq("c").or(QPerson.person.address.zipCode.eq("0")); + PredicateOperation testExpression = predicate(Ops.AND, predicate1, predicate2, predicate3); + + assertThat(serializer.handle(testExpression)).isEqualTo(expected); + } + + @Test // GH-4037 + void parsesDocumentReference() { + + User user = new User(); + user.setId("007"); + Predicate predicate = QPerson.person.spiritAnimal.eq(user); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse("{ 'spiritAnimal' : '007' }")); + } + + @Test // GH-4037 + void parsesDocumentReferenceOnId() { + + User user = new User(); + user.setId("007"); + Predicate predicate = QPerson.person.spiritAnimal.id.eq("007"); + + assertThat(serializer.handle(predicate)).isEqualTo(Document.parse("{ 'spiritAnimal' : '007' }")); + } + + @Test // GH-4709 + void appliesConversionToIdType() { + + Predicate predicate = QSpringDataMongodbSerializerUnitTests_Outer.outer.embeddedObject.id + .eq("64268a7b17ac6a00018bf312"); + + assertThat(serializer.handle(predicate)) + .isEqualTo(new Document("embedded_object._id", new ObjectId("64268a7b17ac6a00018bf312"))); + } + + @Test // GH-4709 + void appliesConversionToIdTypeForExplicitTypeRef() { + + Predicate predicate = QQuerydslRepositorySupportTests_WithMongoId.withMongoId.id.eq("64268a7b17ac6a00018bf312"); + + assertThat(serializer.handle(predicate)).isEqualTo(new Document("_id", "64268a7b17ac6a00018bf312")); + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "record") + class Outer { + + @Id private String id; + + @Field("embedded_object") private Inner embeddedObject; + } + + @org.springframework.data.mongodb.core.mapping.Document(collection = "embedded_object") + class Inner { + @Id private String id; + } - assertThat(mappedPredicate, is(instanceOf(DBObject.class))); - assertThat(((DBObject) mappedPredicate).get("sex"), is((Object) "f")); + public class WithMongoId { + @MongoId private String id; } class Address { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/util/SliceUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/util/SliceUtilsUnitTests.java new file mode 100644 index 0000000000..8dc952e8a7 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/util/SliceUtilsUnitTests.java @@ -0,0 +1,80 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.util; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verifyNoInteractions; + +import java.util.stream.Stream; + +import org.bson.Document; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; + +/** + * Unit test for {@link SliceUtils}. + * + * @author Christoph Strobl + */ +class SliceUtilsUnitTests { + + @ParameterizedTest // GH-4889 + @MethodSource("paged") + void pagedPageableModifiesQuery(Pageable page) { + + Query source = new BasicQuery(Document.parse("{ 'spring' : 'data' }")); + + Query target = SliceUtils.limitResult(source, page); + + assertThat(target.getQueryObject()).isEqualTo(source.getQueryObject()); + assertThat(target).isNotSameAs(source); + assertThat(target.isLimited()).isTrue(); + assertThat(target.getSkip()).isEqualTo(page.getOffset()); + assertThat(target.getLimit()).isEqualTo(page.toLimit().max() + 1); + assertThat(target.getSortObject()).isEqualTo(source.getSortObject()); + } + + @ParameterizedTest // GH-4889 + @MethodSource("unpaged") + void unpagedPageableDoesNotModifyQuery(Pageable page) { + + Query source = spy(new BasicQuery(Document.parse("{ 'spring' : 'data' }"))); + + Query target = SliceUtils.limitResult(source, page); + + verifyNoInteractions(source); + + assertThat(target).isSameAs(source); + assertThat(target.isLimited()).isFalse(); + } + + public static Stream paged() { + return Stream.of(Arguments.of(Pageable.ofSize(1)), Arguments.of(PageRequest.of(0, 10)), + Arguments.of(PageRequest.of(0, 10, Direction.ASC, "name"))); + } + + public static Stream unpaged() { + return Stream.of(Arguments.of(Pageable.unpaged()), Arguments.of(Pageable.unpaged(Sort.by("name")))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AfterTransactionAssertion.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AfterTransactionAssertion.java new file mode 100644 index 0000000000..b0a9b5608d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AfterTransactionAssertion.java @@ -0,0 +1,60 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.springframework.data.domain.Persistable; + +/** + * @author Christoph Strobl + * @currentRead Shadow's Edge - Brent Weeks + */ +public class AfterTransactionAssertion { + + private final T persistable; + private boolean expectToBePresent; + + public AfterTransactionAssertion(T persistable) { + this.persistable = persistable; + } + + public void isPresent() { + expectToBePresent = true; + } + + public void isNotPresent() { + expectToBePresent = false; + } + + public Object getId() { + return persistable.getId(); + } + + public boolean shouldBePresent() { + return expectToBePresent; + } + + public T getPersistable() { + return this.persistable; + } + + public boolean isExpectToBePresent() { + return this.expectToBePresent; + } + + public void setExpectToBePresent(boolean expectToBePresent) { + this.expectToBePresent = expectToBePresent; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java index 6fd3c4cc1e..9e6eef1e6e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AssertErrors.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java index cbfd71ef83..7449a66020 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Assertions.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,7 +32,7 @@ private Assertions() { /** * Create assertion for {@link Document}. * - * @param actual the actual value. + * @param document the actual value. * @return the created assertion object. */ public static DocumentAssert assertThat(Document document) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AtlasContainer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AtlasContainer.java new file mode 100644 index 0000000000..c3a97a03bc --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/AtlasContainer.java @@ -0,0 +1,58 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.springframework.core.env.StandardEnvironment; + +import org.testcontainers.mongodb.MongoDBAtlasLocalContainer; +import org.testcontainers.utility.DockerImageName; + +/** + * Extension to MongoDBAtlasLocalContainer. + * + * @author Christoph Strobl + */ +public class AtlasContainer extends MongoDBAtlasLocalContainer { + + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("mongodb/mongodb-atlas-local"); + private static final String DEFAULT_TAG = "8.0.0"; + private static final String LATEST = "latest"; + + private AtlasContainer(String dockerImageName) { + super(DockerImageName.parse(dockerImageName)); + } + + private AtlasContainer(DockerImageName dockerImageName) { + super(dockerImageName); + } + + public static AtlasContainer bestMatch() { + return tagged(new StandardEnvironment().getProperty("mongodb.atlas.version", DEFAULT_TAG)); + } + + public static AtlasContainer latest() { + return tagged(LATEST); + } + + public static AtlasContainer version8() { + return tagged(DEFAULT_TAG); + } + + public static AtlasContainer tagged(String tag) { + return new AtlasContainer(DEFAULT_IMAGE_NAME.withTag(tag)); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java index 2a6b345c38..b3c2361eea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/BasicDbListBuilder.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusions.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusions.java new file mode 100644 index 0000000000..94971e8f59 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusions.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * Annotation used to exclude entries from the classpath. + * Simplified version of ClassPathExclusions. + * + * @author Christoph Strobl + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@ExtendWith(ClassPathExclusionsExtension.class) +public @interface ClassPathExclusions { + + /** + * One or more packages that should be excluded from the classpath. + * + * @return the excluded packages + */ + String[] packages(); + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusionsExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusionsExtension.java new file mode 100644 index 0000000000..db2b4730b2 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ClassPathExclusionsExtension.java @@ -0,0 +1,129 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.reflect.Method; + +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.InvocationInterceptor; +import org.junit.jupiter.api.extension.ReflectiveInvocationContext; +import org.junit.platform.engine.discovery.DiscoverySelectors; +import org.junit.platform.launcher.Launcher; +import org.junit.platform.launcher.LauncherDiscoveryRequest; +import org.junit.platform.launcher.TestPlan; +import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder; +import org.junit.platform.launcher.core.LauncherFactory; +import org.junit.platform.launcher.listeners.SummaryGeneratingListener; +import org.junit.platform.launcher.listeners.TestExecutionSummary; +import org.springframework.util.CollectionUtils; + +/** + * Simplified version of ModifiedClassPathExtension. + * + * @author Christoph Strobl + */ +class ClassPathExclusionsExtension implements InvocationInterceptor { + + @Override + public void interceptBeforeAllMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptBeforeEachMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptAfterEachMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptAfterAllMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + intercept(invocation, extensionContext); + } + + @Override + public void interceptTestMethod(Invocation invocation, ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) throws Throwable { + interceptMethod(invocation, invocationContext, extensionContext); + } + + @Override + public void interceptTestTemplateMethod(Invocation invocation, + ReflectiveInvocationContext invocationContext, ExtensionContext extensionContext) throws Throwable { + interceptMethod(invocation, invocationContext, extensionContext); + } + + private void interceptMethod(Invocation invocation, ReflectiveInvocationContext invocationContext, + ExtensionContext extensionContext) throws Throwable { + + if (isModifiedClassPathClassLoader(extensionContext)) { + invocation.proceed(); + return; + } + + Class testClass = extensionContext.getRequiredTestClass(); + Method testMethod = invocationContext.getExecutable(); + PackageExcludingClassLoader modifiedClassLoader = PackageExcludingClassLoader.get(testClass, testMethod); + if (modifiedClassLoader == null) { + invocation.proceed(); + return; + } + invocation.skip(); + ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader(); + Thread.currentThread().setContextClassLoader(modifiedClassLoader); + try { + runTest(extensionContext.getUniqueId()); + } finally { + Thread.currentThread().setContextClassLoader(originalClassLoader); + } + } + + private void runTest(String testId) throws Throwable { + + LauncherDiscoveryRequest request = LauncherDiscoveryRequestBuilder.request() + .selectors(DiscoverySelectors.selectUniqueId(testId)).build(); + Launcher launcher = LauncherFactory.create(); + TestPlan testPlan = launcher.discover(request); + SummaryGeneratingListener listener = new SummaryGeneratingListener(); + launcher.registerTestExecutionListeners(listener); + launcher.execute(testPlan); + TestExecutionSummary summary = listener.getSummary(); + if (!CollectionUtils.isEmpty(summary.getFailures())) { + throw summary.getFailures().get(0).getException(); + } + } + + private void intercept(Invocation invocation, ExtensionContext extensionContext) throws Throwable { + if (isModifiedClassPathClassLoader(extensionContext)) { + invocation.proceed(); + return; + } + invocation.skip(); + } + + private boolean isModifiedClassPathClassLoader(ExtensionContext extensionContext) { + Class testClass = extensionContext.getRequiredTestClass(); + ClassLoader classLoader = testClass.getClassLoader(); + return classLoader.getClass().getName().equals(PackageExcludingClassLoader.class.getName()); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java index 9e5ee66586..8f28f0fdf0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDB.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,19 +20,22 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Set; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.bson.Document; import org.junit.rules.TestRule; import org.junit.runner.Description; import org.junit.runners.model.Statement; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; /** * {@link CleanMongoDB} is a junit {@link TestRule} implementation to be used as for wiping data from MongoDB instance. @@ -46,7 +49,7 @@ */ public class CleanMongoDB implements TestRule { - private static final Logger LOGGER = LoggerFactory.getLogger(CleanMongoDB.class); + private static final Log LOGGER = LogFactory.getLog(CleanMongoDB.class); /** * Defines contents of MongoDB. @@ -55,7 +58,7 @@ public enum Struct { DATABASE, COLLECTION, INDEX; } - @SuppressWarnings("serial")// + @SuppressWarnings("serial") // private Set preserveDatabases = new HashSet() { { add("admin"); @@ -83,7 +86,7 @@ public CleanMongoDB() { * @throws UnknownHostException */ public CleanMongoDB(String host, int port) throws UnknownHostException { - this(new MongoClient(host, port)); + this(MongoTestUtils.client(host, port)); } /** @@ -96,7 +99,7 @@ public CleanMongoDB(MongoClient client) { } /** - * Removes everything by dropping every single {@link DB}. + * Removes everything by dropping every single {@link MongoDatabase}. * * @return */ @@ -108,7 +111,7 @@ public static CleanMongoDB everything() { } /** - * Removes everything from the databases with given name by dropping the according {@link DB}. + * Removes everything from the databases with given name by dropping the according {@link MongoDatabase}. * * @param dbNames * @return @@ -122,7 +125,7 @@ public static CleanMongoDB databases(String... dbNames) { } /** - * Drops the {@link DBCollection} with given names from every single {@link DB} containing them. + * Drops the {@link MongoCollection} with given names from every single {@link MongoDatabase} containing them. * * @param collectionNames * @return @@ -132,7 +135,7 @@ public static CleanMongoDB collections(String... collectionNames) { } /** - * Drops the {@link DBCollection} with given names from the named {@link DB}. + * Drops the {@link MongoCollection} with given names from the named {@link MongoDatabase}. * * @param dbName * @param collectionNames @@ -147,7 +150,7 @@ public static CleanMongoDB collections(String dbName, Collection collect } /** - * Drops all index structures from every single {@link DBCollection}. + * Drops all index structures from every single {@link MongoCollection}. * * @return */ @@ -156,7 +159,7 @@ public static CleanMongoDB indexes() { } /** - * Drops all index structures from every single {@link DBCollection}. + * Drops all index structures from every single {@link MongoCollection}. * * @param collectionNames * @return @@ -182,7 +185,7 @@ public CleanMongoDB clean(Struct... types) { } /** - * Defines the {@link DB}s to be used.
          + * Defines the {@link MongoDatabase}s to be used.
          * Impact along with {@link CleanMongoDB#clean(Struct...)}: *
            *
          • {@link Struct#DATABASE}: Forces drop of named databases.
          • @@ -200,7 +203,7 @@ public CleanMongoDB useDatabases(String... dbNames) { } /** - * Excludes the given {@link DB}s from being processed. + * Excludes the given {@link MongoDatabase}s from being processed. * * @param dbNames * @return @@ -211,7 +214,7 @@ public CleanMongoDB preserveDatabases(String... dbNames) { } /** - * Defines the {@link DBCollection}s to be used.
            + * Defines the {@link MongoCollection}s to be used.
            * Impact along with {@link CleanMongoDB#clean(Struct...)}: *
              *
            • {@link Struct#COLLECTION}: Forces drop of named collections.
            • @@ -230,7 +233,7 @@ private CleanMongoDB useCollections(Collection collectionNames) { } /** - * Defines the {@link DBCollection}s and {@link DB} to be used.
              + * Defines the {@link MongoCollection}s and {@link MongoDatabase} to be used.
              * Impact along with {@link CleanMongoDB#clean(Struct...)}: *
                *
              • {@link Struct#COLLECTION}: Forces drop of named collections in given db.
              • @@ -256,10 +259,6 @@ Statement apply() { return apply(null, null); } - /* - * (non-Javadoc) - * @see org.junit.rules.TestRule#apply(org.junit.runners.model.Statement, org.junit.runner.Description) - */ public Statement apply(Statement base, Description description) { return new MongoCleanStatement(base); } @@ -274,7 +273,7 @@ private void doClean() { continue; } - DB db = client.getDB(dbName); + MongoDatabase db = client.getDatabase(dbName); dropCollectionsOrIndexIfRequried(db, initCollectionNames(db)); } } @@ -285,26 +284,35 @@ private boolean dropDbIfRequired(String dbName) { return false; } - client.dropDatabase(dbName); - LOGGER.debug("Dropping DB '{}'. ", dbName); + client.getDatabase(dbName).drop(); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropping DB '%s'; ", dbName)); + } return true; } - private void dropCollectionsOrIndexIfRequried(DB db, Collection collectionsToUse) { + private void dropCollectionsOrIndexIfRequried(MongoDatabase db, Collection collectionsToUse) { + + Collection availableCollections = db.listCollectionNames().into(new LinkedHashSet<>()); for (String collectionName : collectionsToUse) { - if (db.collectionExists(collectionName)) { + if (availableCollections.contains(collectionName)) { - DBCollection collection = db.getCollectionFromString(collectionName); + MongoCollection collection = db.getCollection(collectionName); if (collection != null) { if (types.contains(Struct.COLLECTION)) { collection.drop(); - LOGGER.debug("Dropping collection '{}' for DB '{}'. ", collectionName, db.getName()); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropping collection '%s' for DB '%s'; ", collectionName, db.getName())); + } } else if (types.contains(Struct.INDEX)) { collection.dropIndexes(); - LOGGER.debug("Dropping indexes in collection '{}' for DB '{}'. ", collectionName, db.getName()); + if(LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Dropping indexes in collection '%s' for DB '%s'; ", collectionName, db.getName())); + } } } } @@ -319,16 +327,16 @@ private Collection initDbNames() { Collection dbNamesToUse = dbNames; if (dbNamesToUse.isEmpty()) { - dbNamesToUse = client.getDatabaseNames(); + dbNamesToUse = client.listDatabaseNames().into(new LinkedHashSet<>()); } return dbNamesToUse; } - private Collection initCollectionNames(DB db) { + private Collection initCollectionNames(MongoDatabase db) { Collection collectionsToUse = collectionNames; if (CollectionUtils.isEmpty(collectionsToUse)) { - collectionsToUse = db.getCollectionNames(); + collectionsToUse = db.listCollectionNames().into(new LinkedHashSet<>()); } return collectionsToUse; } @@ -354,14 +362,13 @@ public void evaluate() throws Throwable { boolean isInternal = false; if (client == null) { - client = new MongoClient(); + client = MongoTestUtils.client(); isInternal = true; } doClean(); if (isInternal) { - client.close(); client = null; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java index 152e582321..ecb18d4e04 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBJunitRunListener.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java index 85356054d3..f2fd993ef8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CleanMongoDBTests.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,27 +18,35 @@ import static org.mockito.Mockito.*; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; -import java.util.HashSet; -import org.junit.Before; -import org.junit.Test; +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.junit.runner.Description; -import org.junit.runner.RunWith; import org.junit.runners.model.Statement; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import org.springframework.data.mongodb.test.util.CleanMongoDB.Struct; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; -import com.mongodb.DB; -import com.mongodb.DBCollection; -import com.mongodb.MongoClient; +import com.mongodb.client.ListDatabasesIterable; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.MongoIterable; /** * @author Christoph Strobl + * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner.class) -public class CleanMongoDBTests { +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class CleanMongoDBTests { private CleanMongoDB cleaner; @@ -50,97 +58,96 @@ public class CleanMongoDBTests { private @Mock MongoClient mongoClientMock; // Some Mock DBs - private @Mock DB db1mock, db2mock; - private @Mock DBCollection db1collection1mock, db1collection2mock, db2collection1mock; + private @Mock MongoDatabase db1mock, db2mock, admin; + private @Mock MongoCollection db1collection1mock, db1collection2mock, db2collection1mock; - @SuppressWarnings("serial") - @Before - public void setUp() { + @SuppressWarnings({ "serial", "unchecked" }) + @BeforeEach + void setUp() throws ClassNotFoundException { // DB setup - when(mongoClientMock.getDatabaseNames()).thenReturn(Arrays.asList("admin", "db1", "db2")); - when(mongoClientMock.getDB(eq("db1"))).thenReturn(db1mock); - when(mongoClientMock.getDB(eq("db2"))).thenReturn(db2mock); + + ListDatabasesIterable dbIterable = mock(ListDatabasesIterable.class); + when(dbIterable.into(any(Collection.class))).thenReturn(Arrays.asList("admin", "db1", "db2")); + when(mongoClientMock.listDatabaseNames()).thenReturn(dbIterable); + when(mongoClientMock.getDatabase(eq("db1"))).thenReturn(db1mock); + when(mongoClientMock.getDatabase(eq("db2"))).thenReturn(db2mock); // collections have to exist - when(db1mock.collectionExists(anyString())).thenReturn(true); - when(db2mock.collectionExists(anyString())).thenReturn(true); - - // init collection names per database - when(db1mock.getCollectionNames()).thenReturn(new HashSet() { - { - add("db1collection1"); - add("db1collection2"); - } - }); - when(db2mock.getCollectionNames()).thenReturn(Collections.singleton("db2collection1")); + MongoIterable collectionIterable = mock(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db1mock).collectionNameIterableType()); + when(collectionIterable.into(any(Collection.class))).thenReturn(Arrays.asList("db1collection1", "db1collection2")); + doReturn(collectionIterable).when(db1mock).listCollectionNames(); + + MongoIterable collectionIterable2 = mock(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(db2mock).collectionNameIterableType()); + when(collectionIterable2.into(any(Collection.class))).thenReturn(Collections.singletonList("db2collection1")); + doReturn(collectionIterable2).when(db2mock).listCollectionNames(); // return collections according to names - when(db1mock.getCollectionFromString(eq("db1collection1"))).thenReturn(db1collection1mock); - when(db1mock.getCollectionFromString(eq("db1collection2"))).thenReturn(db1collection2mock); - when(db2mock.getCollectionFromString(eq("db2collection1"))).thenReturn(db2collection1mock); + when(db1mock.getCollection(eq("db1collection1"))).thenReturn(db1collection1mock); + when(db1mock.getCollection(eq("db1collection2"))).thenReturn(db1collection2mock); + when(db2mock.getCollection(eq("db2collection1"))).thenReturn(db2collection1mock); cleaner = new CleanMongoDB(mongoClientMock); } @Test - public void preservesSystemDBsCorrectlyWhenCleaningDatabase() throws Throwable { + void preservesSystemDBsCorrectlyWhenCleaningDatabase() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("admin")); + verify(admin, never()).drop(); } @Test - public void preservesNamedDBsCorrectlyWhenCleaningDatabase() throws Throwable { + void preservesNamedDBsCorrectlyWhenCleaningDatabase() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.preserveDatabases("db1"); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); + verify(db1mock, never()).drop(); } @Test - public void dropsAllDBsCorrectlyWhenCleaingDatabaseAndNotExplictDBNamePresent() throws Throwable { + void dropsAllDBsCorrectlyWhenCleaingDatabaseAndNotExplictDBNamePresent() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, times(1)).dropDatabase(eq("db1")); - verify(mongoClientMock, times(1)).dropDatabase(eq("db2")); + verify(db1mock).drop(); + verify(db2mock).drop(); } @Test - public void dropsSpecifiedDBsCorrectlyWhenExplicitNameSet() throws Throwable { + void dropsSpecifiedDBsCorrectlyWhenExplicitNameSet() throws Throwable { cleaner.clean(Struct.DATABASE); cleaner.useDatabases("db2"); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, times(1)).dropDatabase(eq("db2")); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); + verify(db2mock).drop(); + verify(db1mock, never()).drop(); } @Test - public void doesNotRemoveAnyDBwhenCleaningCollections() throws Throwable { + void doesNotRemoveAnyDBwhenCleaningCollections() throws Throwable { cleaner.clean(Struct.COLLECTION); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); - verify(mongoClientMock, never()).dropDatabase(eq("db2")); - verify(mongoClientMock, never()).dropDatabase(eq("admin")); + verify(db1mock, never()).drop(); + verify(db2mock, never()).drop(); + verify(admin, never()).drop(); } @Test - public void doesNotDropCollectionsFromPreservedDBs() throws Throwable { + void doesNotDropCollectionsFromPreservedDBs() throws Throwable { cleaner.clean(Struct.COLLECTION); cleaner.preserveDatabases("db1"); @@ -153,7 +160,7 @@ public void doesNotDropCollectionsFromPreservedDBs() throws Throwable { } @Test - public void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() throws Throwable { + void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() throws Throwable { cleaner.clean(Struct.COLLECTION); @@ -165,7 +172,7 @@ public void removesAllCollectionsFromAllDatabasesWhenNotLimitedToSpecificOnes() } @Test - public void removesOnlyNamedCollectionsWhenSpecified() throws Throwable { + void removesOnlyNamedCollectionsWhenSpecified() throws Throwable { cleaner.clean(Struct.COLLECTION); cleaner.useCollections("db1collection2"); @@ -178,15 +185,15 @@ public void removesOnlyNamedCollectionsWhenSpecified() throws Throwable { } @Test - public void removesIndexesCorrectly() throws Throwable { + void removesIndexesCorrectly() throws Throwable { cleaner.clean(Struct.INDEX); cleaner.apply(baseStatementMock, descriptionMock).evaluate(); - verify(mongoClientMock, never()).dropDatabase(eq("db1")); - verify(mongoClientMock, never()).dropDatabase(eq("db2")); - verify(mongoClientMock, never()).dropDatabase(eq("admin")); + verify(db1mock, never()).drop(); + verify(db2mock, never()).drop(); + verify(admin, never()).drop(); verify(db1collection1mock, times(1)).dropIndexes(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Client.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Client.java new file mode 100644 index 0000000000..064c6edf7b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Client.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * Marks a field or method as to be autowired by JUnit's dependency injection facilities for injection of a MongoDB + * client instance. Depends on {@link MongoClientExtension}. + * + * @author Christoph Strobl + * @see com.mongodb.client.MongoClient + * @see com.mongodb.reactivestreams.client.MongoClient + * @see ReplSetClient + * @see MongoClientExtension + */ +@Target({ ElementType.FIELD, ElementType.PARAMETER }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@ExtendWith(MongoClientExtension.class) +public @interface Client { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CollectionInfo.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CollectionInfo.java new file mode 100644 index 0000000000..2ae41f734d --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/CollectionInfo.java @@ -0,0 +1,98 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.List; + +import org.bson.Document; +import org.springframework.util.ObjectUtils; + +import com.mongodb.client.model.Collation; + +/** + * Value Object providing a methods for accessing collection/view information within a raw {@link Document}. + * + * @author Christoph Strobl + */ +public class CollectionInfo { + + private final Document source; + + public static CollectionInfo from(Document source) { + return new CollectionInfo(source); + } + + CollectionInfo(Document source) { + this.source = source; + } + + /** + * @return the collection/view name. + */ + public String getName() { + return source.getString("name"); + } + + /** + * @return {@literal true} if the {@literal type} equals {@literal view}. + */ + public boolean isView() { + return ObjectUtils.nullSafeEquals("view", source.get("type")); + } + + /** + * @return the {@literal options.viewOn} value. + * @throws IllegalStateException if not {@link #isView() a view}. + */ + public String getViewTarget() { + + if (isView()) { + return getOptionValue("viewOn", String.class); + } + throw new IllegalStateException(getName() + " is not a view"); + } + + /** + * @return the {@literal options.pipeline} value. + * @throws IllegalStateException if not {@link #isView() a view}. + */ + public List getViewPipeline() { + + if (isView()) { + return getOptions().getList("pipeline", Document.class); + } + + throw new IllegalStateException(getName() + " is not a view"); + } + + /** + * @return the {@literal options.collation} value. + * @throws IllegalStateException if not {@link #isView() a view}. + */ + public Collation getCollation() { + + return org.springframework.data.mongodb.core.query.Collation.from(getOptionValue("collation", Document.class)) + .toMongoCollation(); + } + + private Document getOptions() { + return source.get("options", Document.class); + } + + private T getOptionValue(String key, Class type) { + return getOptions().get(key, type); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DirtiesStateExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DirtiesStateExtension.java new file mode 100644 index 0000000000..2407208fe0 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DirtiesStateExtension.java @@ -0,0 +1,112 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.reflect.Method; + +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.BeforeEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +/** + * Extension to consider tests that {@code @DirtiesState} and {@code @ProvidesState} through annotations. + * + * @author Mark Paluch + */ +public class DirtiesStateExtension implements BeforeEachCallback, AfterEachCallback { + + /** + * Test method that changes the data state by saving or deleting objects. + */ + @Retention(RetentionPolicy.RUNTIME) + public @interface DirtiesState { + + } + + /** + * Test method that sets up its state within the test method itself. + */ + @Retention(RetentionPolicy.RUNTIME) + public @interface ProvidesState { + + } + + /** + * Interface to be implemented by tests that make use of {@link DirtiesStateExtension}. + */ + public interface StateFunctions { + + /** + * Clear the state. + */ + void clear(); + + /** + * Setup the test fixture. + */ + void setupState(); + } + + static final String STATE_KEY = "state"; + + @Override + public void beforeEach(ExtensionContext context) throws Exception { + + Method method = context.getTestMethod().orElse(null); + Object instance = context.getTestInstance().orElse(null); + + if (method == null || instance == null) { + return; + } + + if (method.isAnnotationPresent(ProvidesState.class)) { + ((StateFunctions) instance).clear(); + return; + } + + ExtensionContext.Store mongo = getStore(context); + Boolean state = mongo.get(STATE_KEY, Boolean.class); + + if (state == null) { + + ((StateFunctions) instance).clear(); + ((StateFunctions) instance).setupState(); + mongo.put(STATE_KEY, true); + } + } + + private ExtensionContext.Store getStore(ExtensionContext context) { + return context.getParent().get() + .getStore(ExtensionContext.Namespace.create("mongo-" + context.getRequiredTestClass().getName())); + } + + @Override + public void afterEach(ExtensionContext context) throws Exception { + + Method method = context.getTestMethod().orElse(null); + + if (method == null) { + return; + } + + if (method.isAnnotationPresent(DirtiesState.class) || method.isAnnotationPresent(ProvidesState.class)) { + ExtensionContext.Store mongo = getStore(context); + mongo.remove(STATE_KEY); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java index c7a05f0350..d4360b4d95 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +21,6 @@ import static org.assertj.core.error.ShouldNotContain.*; import static org.assertj.core.error.ShouldNotContainKeys.*; -import lombok.AccessLevel; -import lombok.Getter; -import lombok.RequiredArgsConstructor; - import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashSet; @@ -67,14 +63,10 @@ public class DocumentAssert extends AbstractMapAssert lookup = lookup(key); @@ -85,14 +77,32 @@ public DocumentAssert containsEntry(String key, Object value) { return myself; } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContainEntry(java.lang.Object, java.lang.Object) + /** + * Verifies that the actual value is equal to the given one by accepting the expected {@link Document} in its + * JSON/BSON representation. + *

                + * Example: + * + *

                +	 *  // assertions will pass
                +	 * assertThat(Document.parse("{foo: 1}").isEqualTo("{foo: 1}");
                +	 * 
                + * + * @param expectedBson the given value to compare the actual value to in BSON/JSON format. + * @return {@code this} assertion object. + * @throws AssertionError if the actual value is not equal to the given one. + * @see Document#parse(String) */ + public DocumentAssert isEqualTo(String expectedBson) { + + isEqualTo(Document.parse(expectedBson)); + return myself; + } + @Override public DocumentAssert doesNotContainEntry(String key, Object value) { - Assert.hasText(key, "The key to look for must not be empty!"); + Assert.hasText(key, "The key to look for must not be empty"); Lookup lookup = lookup(key); @@ -103,21 +113,13 @@ public DocumentAssert doesNotContainEntry(String key, Object value) { return myself; } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#containsKey(java.lang.Object) - */ @Override public DocumentAssert containsKey(String key) { return containsKeys(key); } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#containsKeys(java.lang.Object[]) - */ @Override - public final DocumentAssert containsKeys(String... keys) { + protected DocumentAssert containsKeysForProxy(String[] keys) { Set notFound = new LinkedHashSet<>(); @@ -135,21 +137,13 @@ public final DocumentAssert containsKeys(String... keys) { return myself; } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContainKey(java.lang.Object) - */ @Override public DocumentAssert doesNotContainKey(String key) { return doesNotContainKeys(key); } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContainKeys(java.lang.Object[]) - */ @Override - public final DocumentAssert doesNotContainKeys(String... keys) { + protected DocumentAssert doesNotContainKeysForProxy(String[] keys) { Set found = new LinkedHashSet<>(); for (String key : keys) { @@ -169,13 +163,8 @@ public final DocumentAssert doesNotContainKeys(String... keys) { // used in soft assertions which need to be able to proxy method - @SafeVarargs requiring method to be final prevents // using proxies. - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#contains(java.util.Map.Entry[]) - */ - @SafeVarargs @Override - public final DocumentAssert contains(Map.Entry... entries) { + protected DocumentAssert containsForProxy(Entry[] entries) { // if both actual and values are empty, then assertion passes. if (actual.isEmpty() && entries.length == 0) { @@ -194,14 +183,8 @@ public final DocumentAssert contains(Map.Entry... entries) { - + protected DocumentAssert containsAnyOfForProxy(Entry[] entries) { for (Map.Entry entry : entries) { if (containsEntry(entry)) { return myself; @@ -211,24 +194,13 @@ public final DocumentAssert containsAnyOf(Map.Entry... entries) { + protected DocumentAssert containsOnlyForProxy(Entry[] entries) { throw new UnsupportedOperationException(); } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContain(java.util.Map.Entry[]) - */ - @SafeVarargs @Override - public final DocumentAssert doesNotContain(Map.Entry... entries) { - + protected DocumentAssert doesNotContainForProxy(Entry[] entries) { Set> found = new LinkedHashSet<>(); for (Map.Entry entry : entries) { @@ -243,13 +215,8 @@ public final DocumentAssert doesNotContain(Map.Entry... entries) { + protected DocumentAssert containsExactlyForProxy(Entry[] entries) { throw new UnsupportedOperationException(); } @@ -267,20 +234,14 @@ private Lookup lookup(String path) { @SuppressWarnings("unchecked") private static Lookup lookup(Bson source, String path) { - String[] fragments = path.split("(? it = Arrays.asList(fragments).iterator(); Object current = source; @@ -288,8 +249,8 @@ private static Lookup lookup(Bson source, String path) { String key = it.next().replace("\\.", "."); - if (!(current instanceof Bson) && !key.startsWith("[")) { - return Lookup.found(null); + if ((!(current instanceof Bson) && !(current instanceof Map)) && !key.startsWith("[")) { + return Lookup.notFound(); } if (key.startsWith("[")) { @@ -316,6 +277,17 @@ private static Lookup lookup(Bson source, String path) { current = document.get(key); } + else if (current instanceof Map) { + + Map document = (Map) current; + + if (!it.hasNext() && !document.containsKey(key)) { + return Lookup.notFound(); + } + + current = document.get(key); + } + if (!it.hasNext()) { return Lookup.found((T) current); } @@ -325,10 +297,6 @@ private static Lookup lookup(Bson source, String path) { return Lookup.notFound(); } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#hasEntrySatisfying(java.lang.Object, org.assertj.core.api.Condition) - */ @Override public DocumentAssert hasEntrySatisfying(String key, Condition valueCondition) { @@ -341,10 +309,6 @@ public DocumentAssert hasEntrySatisfying(String key, Condition v return myself; } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#hasEntrySatisfying(java.lang.Object, java.util.function.Consumer) - */ @Override public DocumentAssert hasEntrySatisfying(String key, Consumer valueRequirements) { @@ -355,13 +319,16 @@ public DocumentAssert hasEntrySatisfying(String key, Consumer va return myself; } - @RequiredArgsConstructor(access = AccessLevel.PRIVATE) - @Getter static class Lookup { private final T value; private final boolean pathFound; + private Lookup(T value, boolean pathFound) { + this.value = value; + this.pathFound = pathFound; + } + /** * Factory method to construct a lookup with a hit. * @@ -380,5 +347,13 @@ static Lookup found(T value) { static Lookup notFound() { return new Lookup<>(null, false); } + + public T getValue() { + return this.value; + } + + public boolean isPathFound() { + return this.pathFound; + } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfMongoServerVersion.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfMongoServerVersion.java new file mode 100644 index 0000000000..fa56d2c2a1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfMongoServerVersion.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * {@code @EnableIfMongoServerVersion} is used to signal that the annotated test class or test method is only + * enabled if the value of the specified version boundaries {@link #isGreaterThanEqual()} and + * {@link #isLessThan()} match the connected MongoDB server version. + * + * @author Christoph Strobl + * @since 3.0 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@Tag("version-specific") +@ExtendWith(MongoServerCondition.class) +public @interface EnableIfMongoServerVersion { + + /** + * Inclusive lower bound of MongoDB server range. + * + * @return {@code 0.0.0} by default. + */ + String isGreaterThanEqual() default "0.0.0"; + + /** + * Exclusive upper bound of MongoDB server range. + * + * @return {@code 9999.9999.9999} by default. + */ + String isLessThan() default "9999.9999.9999"; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfReplicaSetAvailable.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfReplicaSetAvailable.java new file mode 100644 index 0000000000..bc22e53569 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfReplicaSetAvailable.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * {@link EnableIfReplicaSetAvailable} marks a specific test class or method to be only executed against a server + * running in replicaSet mode. Intended to be used along with {@link MongoServerCondition}. + * + * @author Christoph Strobl + * @since 3.0 + */ +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Tag("replSet") +@ExtendWith(MongoServerCondition.class) +public @interface EnableIfReplicaSetAvailable { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfVectorSearchAvailable.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfVectorSearchAvailable.java new file mode 100644 index 0000000000..da008d9ee4 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/EnableIfVectorSearchAvailable.java @@ -0,0 +1,37 @@ +/* + * Copyright 2024 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtendWith; + +/** + * @author Christoph Strobl + */ +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Tag("vector-search") +@ExtendWith(MongoServerCondition.class) +public @interface EnableIfVectorSearchAvailable { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeReactiveClientFromClassPath.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeReactiveClientFromClassPath.java new file mode 100644 index 0000000000..894f2ec882 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeReactiveClientFromClassPath.java @@ -0,0 +1,34 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Christoph Strobl + * @see ClassPathExclusions + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@ClassPathExclusions(packages = { "com.mongodb.reactivestreams.client" }) +public @interface ExcludeReactiveClientFromClassPath { + +} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/RelatedDocument.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeSyncClientFromClassPath.java similarity index 62% rename from spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/RelatedDocument.java rename to spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeSyncClientFromClassPath.java index 027738306a..eba7d228ef 100644 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/RelatedDocument.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ExcludeSyncClientFromClassPath.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2024-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,19 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.springframework.data.mongodb.crossstore; +package org.springframework.data.mongodb.test.util; +import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * @author Thomas Risberg - * @deprecated will be removed without replacement. + * @author Christoph Strobl + * @see ClassPathExclusions */ -@Deprecated @Retention(RetentionPolicy.RUNTIME) -@Target({ ElementType.FIELD }) -public @interface RelatedDocument { +@Target({ ElementType.TYPE, ElementType.METHOD }) +@Documented +@ClassPathExclusions(packages = { "com.mongodb.client" }) +public @interface ExcludeSyncClientFromClassPath { + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/IsBsonObject.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/IsBsonObject.java deleted file mode 100644 index defd428dd5..0000000000 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/IsBsonObject.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.test.util; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -import org.bson.Document; -import org.bson.conversions.Bson; -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; -import org.hamcrest.core.IsEqual; -import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.util.ClassUtils; - -/** - * @author Christoph Strobl - * @param - */ -public class IsBsonObject extends TypeSafeMatcher { - - private List expectations = new ArrayList<>(); - private Integer expectedSize; - - public static IsBsonObject isBsonObject() { - return new IsBsonObject(); - } - - @Override - protected void describeMismatchSafely(T item, Description mismatchDescription) { - mismatchDescription.appendText("was ").appendValue(SerializationUtils.serializeToJsonSafely(item)); - } - - @Override - public void describeTo(Description description) { - - if (expectedSize != null) { - description.appendText(String.format("Expected to contain %s fields. ", expectedSize)); - } - - for (ExpectedBsonContent expectation : expectations) { - - if (expectation.not) { - description.appendText(String.format("Path %s should not be present. ", expectation.path)); - } else if (expectation.value == null) { - description.appendText(String.format("Expected to find path %s. ", expectation.path)); - } else { - description.appendText(String.format("Expected to find %s for path %s. ", expectation.value, expectation.path)); - } - } - } - - @Override - protected boolean matchesSafely(T item) { - - if (expectedSize != null && item instanceof Document) { - - Document document = (Document) item; - if (expectedSize != document.keySet().size()) { - return false; - } - } - - if (expectations.isEmpty()) { - return true; - } - - for (ExpectedBsonContent expectation : expectations) { - - Object o = getValue(item, expectation.path); - - if (o == null && expectation.not) { - return true; - } - - if (o == null) { - return false; - } - - if (expectation.type != null) { - - if (ClassUtils.isAssignable(List.class, expectation.type) - && ClassUtils.isAssignable(List.class, o.getClass())) { - return true; - } - - return ClassUtils.isAssignable(expectation.type, o.getClass()); - } - - if (expectation.value != null && !new IsEqual(expectation.value).matches(o)) { - return false; - } - - if (o != null && expectation.not) { - return false; - } - - } - return true; - } - - public IsBsonObject containing(String key) { - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject containing(String key, Class type) { - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - expected.type = type; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject containing(String key, Object value) { - - if (value == null) { - return notContaining(key); - } - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - expected.type = ClassUtils.getUserClass(value); - expected.value = value; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject notContaining(String key) { - - ExpectedBsonContent expected = new ExpectedBsonContent(); - expected.path = key; - expected.not = true; - - this.expectations.add(expected); - return this; - } - - public IsBsonObject withSize(int size) { - - this.expectedSize = Integer.valueOf(size); - return this; - } - - static class ExpectedBsonContent { - String path; - Class type; - Object value; - boolean not = false; - } - - Object getValue(Bson source, String path) { - - String[] fragments = path.split("(? it = Arrays.asList(fragments).iterator(); - - Object current = source; - while (it.hasNext()) { - - String key = it.next().replace("\\.", "."); - - if (!(current instanceof Bson) && !key.startsWith("[")) { - return null; - } - - if (key.startsWith("[")) { - String indexNumber = key.substring(1, key.indexOf("]")); - if (current instanceof List) { - current = ((List) current).get(Integer.valueOf(indexNumber)); - } - if (!it.hasNext()) { - return current; - } - } else { - - if (current instanceof Document) { - current = ((Document) current).get(key); - } - - if (!it.hasNext()) { - return current; - } - - } - } - - throw new NoSuchElementException(String.format("Unable to find '%s' in %s.", path, source)); - } -} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java new file mode 100644 index 0000000000..15a0538600 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java @@ -0,0 +1,50 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.lang.Nullable; + +/** + * Utility to configure {@link org.springframework.data.mongodb.core.mapping.MongoMappingContext} properties. + * + * @author Christoph Strobl + */ +public class MappingContextConfigurer { + + private @Nullable Set> intitalEntitySet; + boolean autocreateIndex = false; + + public void autocreateIndex(boolean autocreateIndex) { + this.autocreateIndex = autocreateIndex; + } + + public void initialEntitySet(Set> initialEntitySet) { + this.intitalEntitySet = initialEntitySet; + } + + public void initialEntitySet(Class... initialEntitySet) { + this.intitalEntitySet = Set.of(initialEntitySet); + } + + Set> initialEntitySet() { + return intitalEntitySet != null ? intitalEntitySet : Collections.emptySet(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientClosingTestConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientClosingTestConfiguration.java new file mode 100644 index 0000000000..c5c23162d1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientClosingTestConfiguration.java @@ -0,0 +1,42 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import javax.annotation.PreDestroy; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.test.util.ReflectionTestUtils; + +/** + * @author Christoph Strobl + */ +public abstract class MongoClientClosingTestConfiguration extends AbstractMongoClientConfiguration { + + @Autowired(required = false) MongoDatabaseFactory dbFactory; + + @PreDestroy + public void destroy() { + + if (dbFactory != null) { + Object mongo = ReflectionTestUtils.getField(dbFactory, "mongoClient"); + if (mongo != null) { + ReflectionTestUtils.invokeMethod(mongo, "close"); + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientExtension.java new file mode 100644 index 0000000000..357a87168e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoClientExtension.java @@ -0,0 +1,196 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import static org.junit.platform.commons.util.AnnotationUtils.*; +import static org.junit.platform.commons.util.ReflectionUtils.*; + +import java.lang.reflect.Field; +import java.util.function.Predicate; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.Extension; +import org.junit.jupiter.api.extension.ExtensionConfigurationException; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.junit.jupiter.api.extension.ExtensionContext.Store; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; +import org.junit.platform.commons.util.ExceptionUtils; +import org.junit.platform.commons.util.ReflectionUtils; + +import org.springframework.util.ClassUtils; + +import com.mongodb.client.MongoClient; + +/** + * JUnit {@link Extension} providing parameter resolution for synchronous and reactive MongoDB client instances. + * + * @author Christoph Strobl + * @see Client + * @see ReplSetClient + */ +public class MongoClientExtension implements Extension, BeforeAllCallback, AfterAllCallback, ParameterResolver { + + private static final Log LOGGER = LogFactory.getLog(MongoClientExtension.class); + + private static final Namespace NAMESPACE = MongoExtensions.Client.NAMESPACE; + + private static final String SYNC_KEY = MongoExtensions.Client.SYNC_KEY; + private static final String REACTIVE_KEY = MongoExtensions.Client.REACTIVE_KEY; + private static final String SYNC_REPLSET_KEY = MongoExtensions.Client.SYNC_REPLSET_KEY; + private static final String REACTIVE_REPLSET_KEY = MongoExtensions.Client.REACTIVE_REPLSET_KEY; + + @Override + public void afterAll(ExtensionContext extensionContext) throws Exception { + + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + injectFields(context, null, ReflectionUtils::isStatic); + } + + private void injectFields(ExtensionContext context, Object testInstance, Predicate predicate) { + + findAnnotatedFields(context.getRequiredTestClass(), Client.class, predicate).forEach(field -> { + assertValidFieldCandidate(field); + try { + makeAccessible(field).set(testInstance, getMongoClient(field.getType(), context, false)); + } catch (Throwable t) { + ExceptionUtils.throwAsUncheckedException(t); + } + }); + + findAnnotatedFields(context.getRequiredTestClass(), ReplSetClient.class, predicate).forEach(field -> { + assertValidFieldCandidate(field); + try { + makeAccessible(field).set(testInstance, getMongoClient(field.getType(), context, true)); + } catch (Throwable t) { + ExceptionUtils.throwAsUncheckedException(t); + } + }); + } + + protected Object getMongoClient(Class type, ExtensionContext extensionContext, boolean replSet) { + + Store store = extensionContext.getStore(NAMESPACE); + + if (ClassUtils.isAssignable(com.mongodb.client.MongoClient.class, type)) { + + LOGGER.debug("Obtaining sync client from store."); + return store.getOrComputeIfAbsent(replSet ? SYNC_REPLSET_KEY : SYNC_KEY, it -> syncClient(replSet), + SyncClientHolder.class).client; + } + + if (ClassUtils.isAssignable(com.mongodb.reactivestreams.client.MongoClient.class, type)) { + + LOGGER.debug("Obtaining reactive client from store."); + return store.getOrComputeIfAbsent(replSet ? REACTIVE_REPLSET_KEY : REACTIVE_KEY, key -> reactiveClient(replSet), + ReactiveClientHolder.class).client; + } + + throw new IllegalStateException("Damn - something went wrong."); + } + + private ReactiveClientHolder reactiveClient(boolean replSet) { + + LOGGER.debug(String.format("Creating new reactive %sclient.", replSet ? "replica set " : "")); + return new ReactiveClientHolder(replSet ? MongoTestUtils.reactiveReplSetClient() : MongoTestUtils.reactiveClient()); + } + + private SyncClientHolder syncClient(boolean replSet) { + + LOGGER.debug(String.format("Creating new sync %sclient.", replSet ? "replica set " : "")); + return new SyncClientHolder(replSet ? MongoTestUtils.replSetClient() : MongoTestUtils.client()); + } + + boolean holdsReplSetClient(ExtensionContext context) { + + Store store = context.getStore(NAMESPACE); + return store.get(SYNC_REPLSET_KEY) != null || store.get(REACTIVE_REPLSET_KEY) != null; + } + + private void assertValidFieldCandidate(Field field) { + + assertSupportedType("field", field.getType()); + } + + private void assertSupportedType(String target, Class type) { + + if (type != com.mongodb.client.MongoClient.class && type != com.mongodb.reactivestreams.client.MongoClient.class) { + throw new ExtensionConfigurationException(String.format( + "Can only resolve @MongoClient %s of type %s or %s but was: %s", target, MongoClient.class.getName(), + com.mongodb.reactivestreams.client.MongoClient.class.getName(), type.getName())); + } + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return parameterContext.isAnnotated(Client.class) || parameterContext.isAnnotated(ReplSetClient.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + + Class parameterType = parameterContext.getParameter().getType(); + boolean replSet = parameterContext.getParameter().getAnnotation(ReplSetClient.class) != null; + return getMongoClient(parameterType, extensionContext, replSet); + } + + static class SyncClientHolder implements Store.CloseableResource { + + final MongoClient client; + + SyncClientHolder(MongoClient client) { + this.client = client; + } + + @Override + public void close() { + try { + client.close(); + } catch (RuntimeException e) { + // so what? + } + } + } + + static class ReactiveClientHolder implements Store.CloseableResource { + + final com.mongodb.reactivestreams.client.MongoClient client; + + ReactiveClientHolder(com.mongodb.reactivestreams.client.MongoClient client) { + this.client = client; + } + + @Override + public void close() { + + try { + client.close(); + } catch (RuntimeException e) { + // so what? + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java new file mode 100644 index 0000000000..44b7ae3e45 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; + +/** + * Utility to configure {@link MongoCustomConversions}. + * + * @author Christoph Strobl + */ +public class MongoConverterConfigurer { + + CustomConversions customConversions; + + public void customConversions(CustomConversions customConversions) { + this.customConversions = customConversions; + } + + public void customConverters(Converter... converters) { + customConversions(new MongoCustomConversions(Arrays.asList(converters))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoExtensions.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoExtensions.java new file mode 100644 index 0000000000..c90f7e999b --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoExtensions.java @@ -0,0 +1,41 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; + +/** + * @author Christoph Strobl + */ +class MongoExtensions { + + static class Client { + + static final Namespace NAMESPACE = Namespace.create(MongoClientExtension.class); + static final String SYNC_KEY = "mongo.client.sync"; + static final String REACTIVE_KEY = "mongo.client.reactive"; + static final String SYNC_REPLSET_KEY = "mongo.client.replset.sync"; + static final String REACTIVE_REPLSET_KEY = "mongo.client.replset.reactive"; + } + + static class Termplate { + + static final Namespace NAMESPACE = Namespace.create(MongoTemplateExtension.class); + static final String SYNC = "mongo.template.sync"; + static final String REACTIVE = "mongo.template.reactive"; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoServerCondition.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoServerCondition.java new file mode 100644 index 0000000000..d811e0a1ef --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoServerCondition.java @@ -0,0 +1,97 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import org.junit.jupiter.api.extension.ConditionEvaluationResult; +import org.junit.jupiter.api.extension.ExecutionCondition; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.data.util.Version; + +/** + * @author Christoph Strobl + */ +public class MongoServerCondition implements ExecutionCondition { + + private static final Namespace NAMESPACE = Namespace.create("mongodb", "server"); + + private static final Version ANY = new Version(9999, 9999, 9999); + private static final Version DEFAULT_HIGH = ANY; + private static final Version DEFAULT_LOW = new Version(0, 0, 0); + + @Override + public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext context) { + + if (context.getTags().contains("replSet")) { + if (!serverIsPartOfReplicaSet(context)) { + return ConditionEvaluationResult.disabled("Disabled for servers not running in replicaSet mode."); + } + } + + if(context.getTags().contains("vector-search")) { + if(!atlasEnvironment(context)) { + return ConditionEvaluationResult.disabled("Disabled for servers not supporting Vector Search."); + } + } + + if (context.getTags().contains("version-specific") && context.getElement().isPresent()) { + + EnableIfMongoServerVersion version = AnnotatedElementUtils.findMergedAnnotation(context.getElement().get(), + EnableIfMongoServerVersion.class); + + Version serverVersion = serverVersion(context); + + if (version != null && !serverVersion.equals(ANY)) { + + Version expectedMinVersion = Version.parse(version.isGreaterThanEqual()); + if (!expectedMinVersion.equals(ANY) && !expectedMinVersion.equals(DEFAULT_LOW)) { + if (serverVersion.isLessThan(expectedMinVersion)) { + return ConditionEvaluationResult.disabled(String + .format("Disabled for server version %s; Requires at least %s.", serverVersion, expectedMinVersion)); + } + } + + Version expectedMaxVersion = Version.parse(version.isLessThan()); + if (!expectedMaxVersion.equals(ANY) && !expectedMaxVersion.equals(DEFAULT_HIGH)) { + if (serverVersion.isGreaterThanOrEqualTo(expectedMaxVersion)) { + return ConditionEvaluationResult.disabled(String + .format("Disabled for server version %s; Only supported until %s.", serverVersion, expectedMaxVersion)); + } + } + } + } + + return ConditionEvaluationResult.enabled("Enabled by default"); + } + + private boolean serverIsPartOfReplicaSet(ExtensionContext context) { + + return context.getStore(NAMESPACE).getOrComputeIfAbsent("--replSet", (key) -> MongoTestUtils.serverIsReplSet(), + Boolean.class); + } + + private Version serverVersion(ExtensionContext context) { + + return context.getStore(NAMESPACE).getOrComputeIfAbsent(Version.class, (key) -> MongoTestUtils.serverVersion(), + Version.class); + } + + private boolean atlasEnvironment(ExtensionContext context) { + return context.getStore(NAMESPACE).getOrComputeIfAbsent(Version.class, (key) -> MongoTestUtils.isVectorSearchEnabled(), + Boolean.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTemplateExtension.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTemplateExtension.java new file mode 100644 index 0000000000..301d1ef499 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTemplateExtension.java @@ -0,0 +1,152 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.reflect.Field; +import java.util.List; +import java.util.function.Predicate; + +import org.junit.jupiter.api.extension.Extension; +import org.junit.jupiter.api.extension.ExtensionConfigurationException; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Store; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.TestInstancePostProcessor; +import org.junit.platform.commons.util.AnnotationUtils; +import org.junit.platform.commons.util.ExceptionUtils; +import org.junit.platform.commons.util.ReflectionUtils; +import org.junit.platform.commons.util.StringUtils; + +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.ReactiveMongoOperations; +import org.springframework.data.mongodb.test.util.MongoExtensions.Termplate; +import org.springframework.data.util.ParsingUtils; +import org.springframework.util.ClassUtils; + +/** + * JUnit {@link Extension} providing parameter resolution for synchronous and reactive MongoDB Template API objects. + * + * @author Christoph Strobl + * @see Template + * @see MongoTestTemplate + * @see ReactiveMongoTestTemplate + */ +public class MongoTemplateExtension extends MongoClientExtension implements TestInstancePostProcessor { + + private static final String DEFAULT_DATABASE = "database"; + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + + super.beforeAll(context); + + injectFields(context, null, ReflectionUtils::isStatic); + } + + @Override + public void postProcessTestInstance(Object testInstance, ExtensionContext context) throws Exception { + injectFields(context, testInstance, it -> true); + } + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return super.supportsParameter(parameterContext, extensionContext) || parameterContext.isAnnotated(Template.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + + if (parameterContext.getParameter().getAnnotation(Template.class) == null) { + return super.resolveParameter(parameterContext, extensionContext); + } + + Class parameterType = parameterContext.getParameter().getType(); + return getMongoTemplate(parameterType, parameterContext.getParameter().getAnnotation(Template.class), + extensionContext); + } + + private void injectFields(ExtensionContext context, Object testInstance, Predicate predicate) { + + AnnotationUtils.findAnnotatedFields(context.getRequiredTestClass(), Template.class, predicate).forEach(field -> { + + assertValidFieldCandidate(field); + + try { + + ReflectionUtils.makeAccessible(field).set(testInstance, + getMongoTemplate(field.getType(), field.getAnnotation(Template.class), context)); + } catch (Throwable t) { + ExceptionUtils.throwAsUncheckedException(t); + } + }); + } + + private void assertValidFieldCandidate(Field field) { + + assertSupportedType("field", field.getType()); + } + + private void assertSupportedType(String target, Class type) { + + if (!ClassUtils.isAssignable(MongoOperations.class, type) + && !ClassUtils.isAssignable(ReactiveMongoOperations.class, type)) { + throw new ExtensionConfigurationException( + String.format("Can only resolve @%s %s of type %s or %s but was: %s", Template.class.getSimpleName(), target, + MongoOperations.class.getName(), ReactiveMongoOperations.class.getName(), type.getName())); + } + } + + private Object getMongoTemplate(Class type, Template options, ExtensionContext extensionContext) { + + Store templateStore = extensionContext.getStore(MongoExtensions.Termplate.NAMESPACE); + + boolean replSetClient = holdsReplSetClient(extensionContext) || options.replicaSet(); + + String dbName = StringUtils.isNotBlank(options.database()) ? options.database() + : extensionContext.getTestClass().map(it -> { + List target = ParsingUtils.splitCamelCaseToLower(it.getSimpleName()); + return org.springframework.util.StringUtils.collectionToDelimitedString(target, "-"); + }).orElse(DEFAULT_DATABASE); + + if (ClassUtils.isAssignable(MongoOperations.class, type)) { + + String key = Termplate.SYNC + "-" + dbName; + return templateStore.getOrComputeIfAbsent(key, it -> { + + com.mongodb.client.MongoClient client = (com.mongodb.client.MongoClient) getMongoClient( + com.mongodb.client.MongoClient.class, extensionContext, replSetClient); + return new MongoTestTemplate(client, dbName, options.initialEntitySet()); + }); + } + + if (ClassUtils.isAssignable(ReactiveMongoOperations.class, type)) { + + String key = Termplate.REACTIVE + "-" + dbName; + return templateStore.getOrComputeIfAbsent(key, it -> { + + com.mongodb.reactivestreams.client.MongoClient client = (com.mongodb.reactivestreams.client.MongoClient) getMongoClient( + com.mongodb.reactivestreams.client.MongoClient.class, extensionContext, replSetClient); + return new ReactiveMongoTestTemplate(client, dbName, options.initialEntitySet()); + }); + } + + throw new IllegalStateException("Damn - something went wrong."); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java new file mode 100644 index 0000000000..b65d6278fe --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java @@ -0,0 +1,91 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Collections; +import java.util.Optional; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.MethodInvocationRecorder; + +/** + * @author Christoph Strobl + */ +public class MongoTestMappingContext extends MongoMappingContext { + + private MappingContextConfigurer contextConfigurer; + private MongoConverterConfigurer converterConfigurer; + + public static MongoTestMappingContext newTestContext() { + return new MongoTestMappingContext(conig -> {}).init(); + } + + public MongoTestMappingContext(MappingContextConfigurer contextConfig) { + + this.contextConfigurer = contextConfig; + this.converterConfigurer = new MongoConverterConfigurer(); + } + + public MongoTestMappingContext(Consumer contextConfig) { + + this(new MappingContextConfigurer()); + contextConfig.accept(contextConfigurer); + } + + public MongoPersistentProperty getPersistentPropertyFor(Class type, Function property) { + + MongoPersistentEntity persistentEntity = getRequiredPersistentEntity(type); + return persistentEntity.getPersistentProperty(MethodInvocationRecorder.forProxyOf(type).record(property).getPropertyPath().get()); + } + + public MongoTestMappingContext customConversions(MongoConverterConfigurer converterConfig) { + + this.converterConfigurer = converterConfig; + return this; + } + + public MongoTestMappingContext customConversions(Consumer converterConfig) { + + converterConfig.accept(converterConfigurer); + return this; + } + + public MongoTestMappingContext init() { + + setInitialEntitySet(contextConfigurer.initialEntitySet()); + setAutoIndexCreation(contextConfigurer.autocreateIndex); + if (converterConfigurer.customConversions != null) { + setSimpleTypeHolder(converterConfigurer.customConversions.getSimpleTypeHolder()); + } else { + setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + } + + super.afterPropertiesSet(); + return this; + } + + @Override + public void afterPropertiesSet() { + init(); + } + + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java new file mode 100644 index 0000000000..40948a0e22 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java @@ -0,0 +1,179 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.springframework.context.ApplicationContext; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; +import org.testcontainers.shaded.org.awaitility.Awaitility; + +import com.mongodb.MongoWriteException; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; + +/** + * A {@link MongoTemplate} with configuration hooks and extension suitable for tests. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class MongoTestTemplate extends MongoTemplate { + + private final MongoTestTemplateConfiguration cfg; + + public MongoTestTemplate(MongoClient client, String database, Class... initialEntities) { + this(cfg -> { + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(database); + }); + + cfg.configureMappingContext(it -> { + + it.autocreateIndex(false); + it.initialEntitySet(initialEntities); + }); + }); + } + + public MongoTestTemplate(Consumer cfg) { + + this(() -> { + + MongoTestTemplateConfiguration config = new MongoTestTemplateConfiguration(); + cfg.accept(config); + return config; + }); + } + + public MongoTestTemplate(Supplier config) { + this(config.get()); + } + + public MongoTestTemplate(MongoTestTemplateConfiguration config) { + super(config.databaseFactory(), config.mongoConverter()); + + ApplicationContext applicationContext = config.getApplicationContext(); + EntityCallbacks callbacks = config.getEntityCallbacks(); + if (callbacks != null) { + setEntityCallbacks(callbacks); + } + if (applicationContext != null) { + setApplicationContext(applicationContext); + } + + this.cfg = config; + } + + public void flush() { + flush(PersistentEntities.of(getConverter().getMappingContext()).stream().map(it -> getCollectionName(it.getType())) + .collect(Collectors.toList())); + } + + public void flushDatabase() { + flush(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(getDb()).listCollectionNames()); + } + + public void flush(Iterable collections) { + + for (String collection : collections) { + MongoCollection mongoCollection = getCollection(collection); + try { + mongoCollection.deleteMany(new Document()); + } catch (MongoWriteException e) { + mongoCollection.drop(); + } + } + } + + public void flush(Class... entities) { + flush(Arrays.stream(entities).map(this::getCollectionName).collect(Collectors.toList())); + } + + public void flush(String... collections) { + flush(Arrays.asList(collections)); + } + + public void flush(Object... objects) { + + flush(Arrays.stream(objects).map(it -> { + + if (it instanceof String) { + return (String) it; + } + if (it instanceof Class) { + return getCollectionName((Class) it); + } + return it.toString(); + }).collect(Collectors.toList())); + } + + public void dropDatabase() { + getDb().drop(); + } + + public void dropIndexes(String... collections) { + for (String collection : collections) { + getCollection(collection).dropIndexes(); + } + } + + public void dropIndexes(Class... entities) { + for (Class entity : entities) { + getCollection(getCollectionName(entity)).dropIndexes(); + } + } + + public void doInCollection(Class entityClass, Consumer> callback) { + execute(entityClass, (collection -> { + callback.accept(collection); + return null; + })); + } + + public void awaitIndexCreation(Class type, String indexName) { + awaitIndexCreation(getCollectionName(type), indexName, Duration.ofSeconds(10)); + } + + public void awaitIndexCreation(String collectionName, String indexName, Duration timeout) { + + Awaitility.await().atMost(timeout).pollInterval(Duration.ofMillis(200)).until(() -> { + + List execute = this.execute(collectionName, + coll -> coll + .aggregate(List.of(Document.parse("{'$listSearchIndexes': { 'name' : '%s'}}".formatted(indexName)))) + .into(new ArrayList<>())); + for (Document doc : execute) { + if (doc.getString("name").equals(indexName)) { + return doc.getString("status").equals("READY"); + } + } + return false; + }); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java new file mode 100644 index 0000000000..09149c02ef --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -0,0 +1,269 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.ObjectFactory; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationListener; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.data.auditing.IsNewAwareAuditingHandler; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; +import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + * @since 3.0 + */ +public class MongoTestTemplateConfiguration { + + private final DatabaseFactoryConfigurer dbFactoryConfig = new DatabaseFactoryConfigurer(); + private final MappingContextConfigurer mappingContextConfigurer = new MappingContextConfigurer(); + private final MongoConverterConfigurer mongoConverterConfigurer = new MongoConverterConfigurer(); + private final AuditingConfigurer auditingConfigurer = new AuditingConfigurer(); + private final ApplicationContextConfigurer applicationContextConfigurer = new ApplicationContextConfigurer(); + + private MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private ApplicationContext context; + + private com.mongodb.client.MongoClient syncClient; + private com.mongodb.reactivestreams.client.MongoClient reactiveClient; + private MongoDatabaseFactory syncFactory; + private SimpleReactiveMongoDatabaseFactory reactiveFactory; + + MongoConverter mongoConverter() { + + if (converter == null) { + + if (dbFactoryConfig.syncClient != null || syncClient != null) { + converter = new MappingMongoConverter(new DefaultDbRefResolver(databaseFactory()), mappingContext()); + } else { + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext()); + } + + if (mongoConverterConfigurer.customConversions != null) { + converter.setCustomConversions(mongoConverterConfigurer.customConversions); + } + if (auditingConfigurer.hasAuditingHandler()) { + converter.setEntityCallbacks(getEntityCallbacks()); + } + converter.afterPropertiesSet(); + } + + return converter; + } + + EntityCallbacks getEntityCallbacks() { + + EntityCallbacks callbacks = null; + if (getApplicationContext() != null) { + callbacks = EntityCallbacks.create(getApplicationContext()); + } + if (!auditingConfigurer.hasAuditingHandler()) { + return callbacks; + } + if (callbacks == null) { + callbacks = EntityCallbacks.create(); + } + + callbacks.addEntityCallback(new AuditingEntityCallback(new ObjectFactory() { + @Override + public IsNewAwareAuditingHandler getObject() throws BeansException { + return auditingConfigurer.auditingHandlerFunction.apply(converter.getMappingContext()); + } + })); + return callbacks; + + } + + List> getApplicationEventListener() { + return new ArrayList<>(applicationContextConfigurer.listeners); + } + + @Nullable + ApplicationContext getApplicationContext() { + + if (applicationContextConfigurer.applicationContext == null) { + return null; + } + + if (context != null) { + return context; + } + + context = applicationContextConfigurer.applicationContext; + + if (context instanceof ConfigurableApplicationContext) { + + ConfigurableApplicationContext configurableApplicationContext = (ConfigurableApplicationContext) this.context; + getApplicationEventListener().forEach(configurableApplicationContext::addApplicationListener); + + configurableApplicationContext.refresh(); + } + return context; + } + + MongoMappingContext mappingContext() { + + if (mappingContext == null) { + mappingContext = new MongoTestMappingContext(mappingContextConfigurer).customConversions(mongoConverterConfigurer) + .init(); + } + + return mappingContext; + } + + MongoDatabaseFactory databaseFactory() { + + if (syncFactory == null) { + syncFactory = new SimpleMongoClientDatabaseFactory(syncClient(), defaultDatabase()); + } + + return syncFactory; + } + + ReactiveMongoDatabaseFactory reactiveDatabaseFactory() { + + if (reactiveFactory == null) { + reactiveFactory = new SimpleReactiveMongoDatabaseFactory(reactiveClient(), defaultDatabase()); + } + + return reactiveFactory; + } + + public MongoTestTemplateConfiguration configureDatabaseFactory(Consumer dbFactory) { + + dbFactory.accept(dbFactoryConfig); + return this; + } + + public MongoTestTemplateConfiguration configureMappingContext( + Consumer mappingContextConfigurerConsumer) { + mappingContextConfigurerConsumer.accept(mappingContextConfigurer); + return this; + } + + public MongoTestTemplateConfiguration configureApplicationContext( + Consumer applicationContextConfigurerConsumer) { + + applicationContextConfigurerConsumer.accept(applicationContextConfigurer); + return this; + } + + public MongoTestTemplateConfiguration configureAuditing(Consumer auditingConfigurerConsumer) { + + auditingConfigurerConsumer.accept(auditingConfigurer); + return this; + } + + public MongoTestTemplateConfiguration configureConversion( + Consumer mongoConverterConfigurerConsumer) { + + mongoConverterConfigurerConsumer.accept(mongoConverterConfigurer); + return this; + } + + com.mongodb.client.MongoClient syncClient() { + + if (syncClient == null) { + syncClient = dbFactoryConfig.syncClient != null ? dbFactoryConfig.syncClient : MongoTestUtils.client(); + } + + return syncClient; + } + + com.mongodb.reactivestreams.client.MongoClient reactiveClient() { + + if (reactiveClient == null) { + reactiveClient = dbFactoryConfig.reactiveClient != null ? dbFactoryConfig.reactiveClient + : MongoTestUtils.reactiveClient(); + } + + return reactiveClient; + } + + String defaultDatabase() { + return dbFactoryConfig.defaultDatabase != null ? dbFactoryConfig.defaultDatabase : "test"; + } + + public static class DatabaseFactoryConfigurer { + + com.mongodb.client.MongoClient syncClient; + com.mongodb.reactivestreams.client.MongoClient reactiveClient; + String defaultDatabase; + + public void client(com.mongodb.client.MongoClient client) { + this.syncClient = client; + } + + public void client(com.mongodb.reactivestreams.client.MongoClient client) { + this.reactiveClient = client; + } + + public void defaultDb(String defaultDatabase) { + this.defaultDatabase = defaultDatabase; + } + } + + public static class AuditingConfigurer { + + Function auditingHandlerFunction; + + public void auditingHandler(Function auditingHandlerFunction) { + this.auditingHandlerFunction = auditingHandlerFunction; + } + + IsNewAwareAuditingHandler auditingHandlers(MongoMappingContext mongoMappingContext) { + return auditingHandlerFunction.apply(mongoMappingContext); + } + + boolean hasAuditingHandler() { + return auditingHandlerFunction != null; + } + } + + public static class ApplicationContextConfigurer { + + List>> listeners = new ArrayList<>(); + ApplicationContext applicationContext; + + public void applicationContext(ApplicationContext context) { + this.applicationContext = context; + } + + public void addEventListener(ApplicationListener> listener) { + this.listeners.add(listener); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java new file mode 100644 index 0000000000..f88caf80dd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java @@ -0,0 +1,308 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; +import reactor.util.retry.Retry; + +import java.time.Duration; +import java.util.List; + +import org.bson.Document; +import org.springframework.core.env.Environment; +import org.springframework.core.env.StandardEnvironment; +import org.springframework.data.mongodb.SpringDataMongoDB; +import org.springframework.data.util.Version; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ConnectionString; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.reactivestreams.client.MongoClients; + +/** + * Utility to create (and reuse) imperative and reactive {@code MongoClient} instances. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +public class MongoTestUtils { + + private static final Environment ENV = new StandardEnvironment(); + private static final Duration DEFAULT_TIMEOUT = Duration.ofMillis(10); + + public static final String CONNECTION_STRING = "mongodb://127.0.0.1:27017/?replicaSet=rs0&w=majority&uuidrepresentation=javaLegacy"; + + private static final String CONNECTION_STRING_PATTERN = "mongodb://%s:%s/?w=majority&uuidrepresentation=javaLegacy"; + + private static final Version ANY = new Version(9999, 9999, 9999); + + /** + * Create a new {@link com.mongodb.client.MongoClient} with defaults. + * + * @return new instance of {@link com.mongodb.client.MongoClient}. + */ + public static MongoClient client() { + return client("127.0.0.1", 27017); + } + + public static MongoClient client(String host, int port) { + return client(new ConnectionString(String.format(CONNECTION_STRING_PATTERN, host, port))); + } + + public static MongoClient client(ConnectionString connectionString) { + return com.mongodb.client.MongoClients.create(connectionString, SpringDataMongoDB.driverInformation()); + } + + /** + * Create a new {@link com.mongodb.reactivestreams.client.MongoClient} with defaults. + * + * @return new instance of {@link com.mongodb.reactivestreams.client.MongoClient}. + */ + public static com.mongodb.reactivestreams.client.MongoClient reactiveClient() { + return reactiveClient("127.0.0.1", 27017); + } + + public static com.mongodb.reactivestreams.client.MongoClient reactiveClient(String host, int port) { + + ConnectionString connectionString = new ConnectionString(String.format(CONNECTION_STRING_PATTERN, host, port)); + return MongoClients.create(connectionString, SpringDataMongoDB.driverInformation()); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static MongoCollection createOrReplaceCollection(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + MongoDatabase database = client.getDatabase(dbName).withWriteConcern(WriteConcern.MAJORITY) + .withReadPreference(ReadPreference.primary()); + + boolean collectionExists = database.listCollections().filter(new Document("name", collectionName)).first() != null; + + if (collectionExists) { + + database.getCollection(collectionName).drop(); + giveTheServerALittleTimeToThink(); + } + + database.createCollection(collectionName); + giveTheServerALittleTimeToThink(); + + return database.getCollection(collectionName); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static Mono createOrReplaceCollection(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + com.mongodb.reactivestreams.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + return Mono.from(database.getCollection(collectionName).drop()) // + .delayElement(getTimeout()) // server replication time + .then(Mono.from(database.createCollection(collectionName))) // + .delayElement(getTimeout()); // server replication time + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void createOrReplaceCollectionNow(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + createOrReplaceCollection(dbName, collectionName, client) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void dropCollectionNow(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + com.mongodb.reactivestreams.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + Mono.from(database.getCollection(collectionName).drop()) // + .delayElement(getTimeout()).retryWhen(Retry.backoff(3, Duration.ofMillis(250))) // + .as(StepVerifier::create) // + .verifyComplete(); + } + + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void dropCollectionNow(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + com.mongodb.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + database.getCollection(collectionName).drop(); + } + + /** + * Remove all documents from the {@link MongoCollection} with given name in the according {@link MongoDatabase + * database}. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void flushCollection(String dbName, String collectionName, + com.mongodb.reactivestreams.client.MongoClient client) { + + com.mongodb.reactivestreams.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + Mono.from(database.getCollection(collectionName).deleteMany(new Document())) // + .delayElement(getTimeout()).then() // + .as(StepVerifier::create) // + .verifyComplete(); + } + + public static void flushCollection(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + com.mongodb.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + database.getCollection(collectionName).deleteMany(new Document()); + } + + /** + * Create a new {@link com.mongodb.client.MongoClient} with defaults suitable for replica set usage. + * + * @return new instance of {@link com.mongodb.client.MongoClient}. + */ + public static com.mongodb.client.MongoClient replSetClient() { + return com.mongodb.client.MongoClients.create(CONNECTION_STRING); + } + + /** + * Create a new {@link com.mongodb.reactivestreams.client.MongoClient} with defaults suitable for replica set usage. + * + * @return new instance of {@link com.mongodb.reactivestreams.client.MongoClient}. + */ + public static com.mongodb.reactivestreams.client.MongoClient reactiveReplSetClient() { + return MongoClients.create(CONNECTION_STRING); + } + + /** + * @return the server version extracted from buildInfo. + * @since 3.0 + */ + public static Version serverVersion() { + + try (MongoClient client = client()) { + + MongoDatabase database = client.getDatabase("test"); + Document result = database.runCommand(new Document("buildInfo", 1)); + + return Version.parse(result.get("version", String.class)); + } catch (Exception e) { + return ANY; + } + } + + /** + * @return check if the server is running as part of a replica set. + * @since 3.0 + */ + public static boolean serverIsReplSet() { + + try (MongoClient client = MongoTestUtils.client()) { + + return client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")).get("argv", List.class) + .contains("--replSet"); + } catch (Exception e) { + return false; + } + } + + @SuppressWarnings("unchecked") + public static boolean isVectorSearchEnabled() { + try (MongoClient client = MongoTestUtils.client()) { + + return client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")).get("argv", List.class) + .stream().anyMatch(it -> { + if(it instanceof String cfgString) { + return cfgString.startsWith("searchIndexManagementHostAndPort"); + } + return false; + }); + } catch (Exception e) { + return false; + } + } + + public static Duration getTimeout() { + + return ObjectUtils.nullSafeEquals("jenkins", ENV.getProperty("user.name")) ? Duration.ofMillis(100) + : DEFAULT_TIMEOUT; + } + + private static void giveTheServerALittleTimeToThink() { + + try { + Thread.sleep(getTimeout().toMillis()); // server replication time + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + public static CollectionInfo readCollectionInfo(MongoDatabase db, String collectionName) { + + List list = db.runCommand(new Document().append("listCollections", 1).append("filter", new Document("name", collectionName))) + .get("cursor", Document.class).get("firstBatch", List.class); + + if(list.isEmpty()) { + throw new IllegalStateException(String.format("Collection %s not found.", collectionName)); + } + return CollectionInfo.from(list.get(0)); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java index 4b4ee87d7e..c632d0326e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersion.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,16 +21,21 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; + /** * {@link MongoVersion} allows specifying an version range of mongodb that is applicable for a specific test method. To - * be used along with {@link MongoVersionRule}. + * be used along with {@link MongoVersionRule} or {@link MongoServerCondition}. * * @author Christoph Strobl * @since 2.1 + * @deprecated Use {@link EnableIfMongoServerVersion} instead. */ @Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) +@Target({ ElementType.TYPE, ElementType.METHOD }) @Documented +@EnableIfMongoServerVersion +@Deprecated public @interface MongoVersion { /** @@ -38,6 +43,7 @@ * * @return {@code 0.0.0} by default. */ + @AliasFor(annotation = EnableIfMongoServerVersion.class, attribute = "isGreaterThanEqual") String asOf() default "0.0.0"; /** @@ -45,5 +51,6 @@ * * @return {@code 9999.9999.9999} by default. */ + @AliasFor(annotation = EnableIfMongoServerVersion.class, attribute = "isLessThan") String until() default "9999.9999.9999"; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java index cd124bc626..a680d33a32 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoVersionRule.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,31 +25,36 @@ import org.junit.runner.Description; import org.junit.runners.model.Statement; import org.springframework.data.util.Version; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.springframework.test.context.junit4.SpringRunner; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; /** * {@link TestRule} verifying server tests are executed against match a given version. This one can be used as - * {@link ClassRule} eg. in context depending tests run with {@link SpringJUnit4ClassRunner} when the context would fail - * to start in case of invalid version, or as simple {@link Rule} on specific tests. + * {@link ClassRule} eg. in context depending tests run with {@link SpringRunner} when the context would fail to start + * in case of invalid version, or as simple {@link Rule} on specific tests. * * @author Christoph Strobl * @author Mark Paluch * @since 1.6 + * @deprecated Use {@link MongoServerCondition} instead. */ +@Deprecated public class MongoVersionRule implements TestRule { private static final Version ANY = new Version(9999, 9999, 9999); private static final Version DEFAULT_HIGH = ANY; private static final Version DEFAULT_LOW = new Version(0, 0, 0); + public static MongoVersionRule REQUIRES_4_2 = MongoVersionRule + .atLeast(org.springframework.data.util.Version.parse("4.2")); + private final AtomicReference currentVersion = new AtomicReference<>(null); private final Version minVersion; private final Version maxVersion; - private String host = "localhost"; + private String host = "127.0.0.1"; private int port = 27017; public MongoVersionRule(Version min, Version max) { @@ -128,12 +133,19 @@ public void evaluate() throws Throwable { Version maxVersion = MongoVersionRule.this.maxVersion.equals(ANY) ? DEFAULT_HIGH : MongoVersionRule.this.maxVersion; - if (MongoVersionRule.this.minVersion.equals(ANY) && MongoVersionRule.this.maxVersion.equals(ANY)) { - + if (description.getAnnotation(MongoVersion.class) != null) { MongoVersion version = description.getAnnotation(MongoVersion.class); if (version != null) { - minVersion = Version.parse(version.asOf()); - maxVersion = Version.parse(version.until()); + + Version expectedMinVersion = Version.parse(version.asOf()); + if (!expectedMinVersion.equals(ANY) && !expectedMinVersion.equals(DEFAULT_LOW)) { + minVersion = expectedMinVersion; + } + + Version expectedMaxVersion = Version.parse(version.until()); + if (!expectedMaxVersion.equals(ANY) && !expectedMaxVersion.equals(DEFAULT_HIGH)) { + maxVersion = expectedMaxVersion; + } } } @@ -168,11 +180,9 @@ private Version fetchCurrentVersion() { try { - MongoClient client; - client = new MongoClient(host, port); + MongoClient client = MongoTestUtils.client(host, port); MongoDatabase database = client.getDatabase("test"); Document result = database.runCommand(new Document("buildInfo", 1)); - client.close(); return Version.parse(result.get("version", String.class)); } catch (Exception e) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/PackageExcludingClassLoader.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/PackageExcludingClassLoader.java new file mode 100644 index 0000000000..caec182aad --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/PackageExcludingClassLoader.java @@ -0,0 +1,142 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.io.File; +import java.lang.management.ManagementFactory; +import java.lang.reflect.Method; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.BinaryOperator; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collector; +import java.util.stream.Stream; + +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.util.ClassUtils; + +/** + * Simplified version of ModifiedClassPathClassLoader. + * + * @author Christoph Strobl + */ +class PackageExcludingClassLoader extends URLClassLoader { + + private final Set excludedPackages; + private final ClassLoader junitLoader; + + PackageExcludingClassLoader(URL[] urls, ClassLoader parent, Collection excludedPackages, + ClassLoader junitClassLoader) { + + super(urls, parent); + this.excludedPackages = Set.copyOf(excludedPackages); + this.junitLoader = junitClassLoader; + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + + if (name.startsWith("org.junit") || name.startsWith("org.hamcrest")) { + return Class.forName(name, false, this.junitLoader); + } + + String packageName = ClassUtils.getPackageName(name); + if (this.excludedPackages.contains(packageName)) { + throw new ClassNotFoundException(name); + } + return super.loadClass(name); + } + + static PackageExcludingClassLoader get(Class testClass, Method testMethod) { + + List excludedPackages = readExcludedPackages(testClass, testMethod); + + if (excludedPackages.isEmpty()) { + return null; + } + + ClassLoader testClassClassLoader = testClass.getClassLoader(); + Stream urls = null; + if (testClassClassLoader instanceof URLClassLoader urlClassLoader) { + urls = Stream.of(urlClassLoader.getURLs()); + } else { + urls = Stream.of(ManagementFactory.getRuntimeMXBean().getClassPath().split(File.pathSeparator)) + .map(PackageExcludingClassLoader::toURL); + } + + return new PackageExcludingClassLoader(urls.toArray(URL[]::new), testClassClassLoader.getParent(), excludedPackages, + testClassClassLoader); + } + + private static List readExcludedPackages(Class testClass, Method testMethod) { + + return Stream.of( // + AnnotatedElementUtils.findMergedAnnotation(testClass, ClassPathExclusions.class), + AnnotatedElementUtils.findMergedAnnotation(testMethod, ClassPathExclusions.class) // + ).filter(Objects::nonNull) // + .map(ClassPathExclusions::packages) // + .collect(new CombingArrayCollector()); + } + + private static URL toURL(String entry) { + try { + return new File(entry).toURI().toURL(); + } catch (Exception ex) { + throw new IllegalArgumentException(ex); + } + } + + private static class CombingArrayCollector implements Collector, List> { + + @Override + public Supplier> supplier() { + return ArrayList::new; + } + + @Override + public BiConsumer, T[]> accumulator() { + return (target, values) -> target.addAll(Arrays.asList(values)); + } + + @Override + public BinaryOperator> combiner() { + return (r1, r2) -> { + r1.addAll(r2); + return r1; + }; + } + + @Override + public Function, List> finisher() { + return i -> (List) i; + } + + @Override + public Set characteristics() { + return Collections.unmodifiableSet(EnumSet.of(Characteristics.IDENTITY_FINISH)); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoClientClosingTestConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoClientClosingTestConfiguration.java new file mode 100644 index 0000000000..bc61f81625 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoClientClosingTestConfiguration.java @@ -0,0 +1,44 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import javax.annotation.PreDestroy; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.reactivestreams.client.MongoClient; + +/** + * @author Christoph Strobl + */ +public abstract class ReactiveMongoClientClosingTestConfiguration extends AbstractReactiveMongoConfiguration { + + @Autowired(required = false) ReactiveMongoDatabaseFactory dbFactory; + + @PreDestroy + public void destroy() { + + if (dbFactory != null) { + Object mongo = ReflectionTestUtils.getField(dbFactory, "mongo"); + if (mongo != null) { + ((MongoClient) mongo).close(); + } + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java new file mode 100644 index 0000000000..9955daa98e --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java @@ -0,0 +1,147 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.Arrays; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import org.bson.Document; +import org.reactivestreams.Publisher; + +import org.springframework.context.ApplicationContext; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * A {@link ReactiveMongoTemplate} with configuration hooks and extension suitable for tests. + * + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 3.0 + */ +public class ReactiveMongoTestTemplate extends ReactiveMongoTemplate { + + private final MongoTestTemplateConfiguration cfg; + + public ReactiveMongoTestTemplate(MongoClient client, String database, Class... initialEntities) { + this(cfg -> { + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(database); + }); + + cfg.configureMappingContext(it -> { + + it.autocreateIndex(false); + it.initialEntitySet(initialEntities); + }); + }); + } + + public ReactiveMongoTestTemplate(Consumer cfg) { + + this(new Supplier() { + @Override + public MongoTestTemplateConfiguration get() { + + MongoTestTemplateConfiguration config = new MongoTestTemplateConfiguration(); + cfg.accept(config); + return config; + } + }); + } + + public ReactiveMongoTestTemplate(Supplier config) { + this(config.get()); + } + + public ReactiveMongoTestTemplate(MongoTestTemplateConfiguration config) { + super(config.reactiveDatabaseFactory(), config.mongoConverter()); + + ApplicationContext applicationContext = config.getApplicationContext(); + if (applicationContext != null) { + setApplicationContext(applicationContext); + } + + this.cfg = config; + } + + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return cfg.reactiveDatabaseFactory(); + } + + public Mono flush() { + return flush(Flux.fromStream( + PersistentEntities.of(getConverter().getMappingContext()).stream().map(it -> getCollectionName(it.getType())))); + } + + public Mono flushDatabase() { + return flush(getMongoDatabase().flatMapMany(MongoDatabase::listCollectionNames)); + } + + public Mono flush(Class... entities) { + return flush(Flux.fromStream(Arrays.asList(entities).stream().map(this::getCollectionName))); + } + + public Mono flush(String... collections) { + return flush(Flux.fromArray(collections)); + } + + public Mono flush(Publisher collectionNames) { + + return Flux.from(collectionNames) + .flatMap(collection -> getCollection(collection).flatMapMany(it -> it.deleteMany(new Document())).then() + .onErrorResume(it -> getCollection(collection).flatMapMany(MongoCollection::drop).then())) + .then(); + } + + public Mono flush(Object... objects) { + + return flush(Flux.fromStream(Arrays.asList(objects).stream().map(it -> { + + if (it instanceof String) { + return (String) it; + } + if (it instanceof Class) { + return getCollectionName((Class) it); + } + return it.toString(); + }))); + } + + public Mono dropDatabase() { + return getMongoDatabase().map(MongoDatabase::drop).then(); + } + + public Mono dropIndexes(String... collections) { + return Flux.fromArray(collections).flatMap(it -> getCollection(it).map(MongoCollection::dropIndexes).then()).then(); + } + + public Mono dropIndexes(Class... entities) { + return Flux.fromArray(entities) + .flatMap(it -> getCollection(getCollectionName(it)).map(MongoCollection::dropIndexes).then()).then(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplSetClient.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplSetClient.java new file mode 100644 index 0000000000..8342c5b5ee --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplSetClient.java @@ -0,0 +1,39 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Marks a field or method as to be autowired by JUnit's dependency injection facilities for injection of a MongoDB + * client instance connected to a replica set. Depends on {@link MongoClientExtension}. + * + * @author Christoph Strobl + * @see com.mongodb.client.MongoClient + * @see com.mongodb.reactivestreams.client.MongoClient + * @see Client + * @see MongoClientExtension + */ +@Target({ ElementType.FIELD, ElementType.PARAMETER }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface ReplSetClient { + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java index 88f69cdfde..d2b770b818 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReplicaSet.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,13 +25,15 @@ import org.junit.runners.model.Statement; import org.springframework.test.annotation.IfProfileValue; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; /** * {@link TestRule} evaluating if MongoDB Server is running with {@code --replSet} flag. * * @author Christoph Strobl + * @deprecated Use {@link MongoServerCondition} with {@link EnableIfReplicaSetAvailable} instead. */ +@Deprecated public class ReplicaSet implements TestRule { boolean required = false; @@ -83,7 +85,7 @@ public void evaluate() throws Throwable { } if (!runsAsReplicaSet()) { - throw new AssumptionViolatedException("Not runnig in repl set mode"); + throw new AssumptionViolatedException("Not running in repl set mode"); } base.evaluate(); } @@ -94,12 +96,11 @@ public boolean runsAsReplicaSet() { if (runsAsReplicaSet.get() == null) { - try (MongoClient client = new MongoClient()) { + MongoClient client = MongoTestUtils.client(); - boolean tmp = client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")) - .get("argv", List.class).contains("--replSet"); - runsAsReplicaSet.compareAndSet(null, tmp); - } + boolean tmp = client.getDatabase("admin").runCommand(new Document("getCmdLineOpts", "1")).get("argv", List.class) + .contains("--replSet"); + runsAsReplicaSet.compareAndSet(null, tmp); } return runsAsReplicaSet.get(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Template.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Template.java new file mode 100644 index 0000000000..a50497f335 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/Template.java @@ -0,0 +1,61 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.ExtensionContext; + +/** + * Annotation to inject {@link org.springframework.data.mongodb.core.MongoOperations} and + * {@link org.springframework.data.mongodb.core.ReactiveMongoOperations} parameters as method arguments and into + * {@code static} fields. + * + * @author Christoph Strobl + * @since 3.0 + * @see MongoTemplateExtension + */ +@Target({ ElementType.FIELD, ElementType.PARAMETER }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@ExtendWith(MongoTemplateExtension.class) +public @interface Template { + + /** + * @return name of the database to use. Use empty String to generate the database name for the + * {@link ExtensionContext#getTestClass() test class}. + */ + String database() default ""; + + /** + * Pre-initialize the {@link org.springframework.data.mapping.context.MappingContext} with the given entities. + * + * @return empty by default. + */ + Class[] initialEntitySet() default {}; + + /** + * Use a {@link ReplSetClient} if {@literal true}. + * + * @return false by default. + */ + boolean replicaSet() default false; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoClientVersionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoClientVersionUnitTests.java new file mode 100644 index 0000000000..053498ebbd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoClientVersionUnitTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.test.util.ClassPathExclusions; +import org.springframework.util.ClassUtils; + +import com.mongodb.internal.build.MongoDriverVersion; + +/** + * Tests for {@link MongoClientVersion}. + * + * @author Christoph Strobl + */ +class MongoClientVersionUnitTests { + + @Test // GH-4578 + void parsesClientVersionCorrectly() { + assertThat(MongoClientVersion.isVersion5orNewer()).isEqualTo(MongoDriverVersion.VERSION.startsWith("5")); + } + + @Test // GH-4578 + @ClassPathExclusions(packages = { "com.mongodb.internal.build" }) + void fallsBackToClassLookupIfDriverVersionNotPresent() { + assertThat(MongoClientVersion.isVersion5orNewer()).isEqualTo( + ClassUtils.isPresent("com.mongodb.internal.connection.StreamFactoryFactory", this.getClass().getClassLoader())); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapterUnitTests.java new file mode 100644 index 0000000000..ab8e17a469 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapterUnitTests.java @@ -0,0 +1,49 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import static org.assertj.core.api.Assertions.*; + +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.test.util.ExcludeReactiveClientFromClassPath; +import org.springframework.data.mongodb.test.util.ExcludeSyncClientFromClassPath; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + */ +class MongoCompatibilityAdapterUnitTests { + + @Test // GH-4578 + @ExcludeReactiveClientFromClassPath + void returnsListCollectionNameIterableTypeCorrectly() { + + String expectedType = MongoClientVersion.isVersion5orNewer() ? "ListCollectionNamesIterable" : "MongoIterable"; + assertThat(MongoCompatibilityAdapter.mongoDatabaseAdapter().forDb(null).collectionNameIterableType()) + .satisfies(type -> assertThat(ClassUtils.getShortName(type)).isEqualTo(expectedType)); + + } + + @Test // GH-4578 + @ExcludeSyncClientFromClassPath + void returnsListCollectionNamePublisherTypeCorrectly() { + + String expectedType = MongoClientVersion.isVersion5orNewer() ? "ListCollectionNamesPublisher" : "Publisher"; + assertThat(MongoCompatibilityAdapter.reactiveMongoDatabaseAdapter().forDb(null).collectionNamePublisherType()) + .satisfies(type -> assertThat(ClassUtils.getShortName(type)).isEqualTo(expectedType)); + + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java new file mode 100644 index 0000000000..1dc7030e70 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java @@ -0,0 +1,254 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.temporal.Temporal; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Stream; + +import org.bson.BsonArray; +import org.bson.BsonDouble; +import org.bson.BsonInt32; +import org.bson.BsonInt64; +import org.bson.BsonObjectId; +import org.bson.BsonString; +import org.bson.Document; +import org.bson.types.ObjectId; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.util.BsonUtils; + +import com.mongodb.BasicDBList; + +/** + * Unit tests for {@link BsonUtils}. + * + * @author Christoph Strobl + * @author Mark Paluch + */ +class BsonUtilsTest { + + @Test // DATAMONGO-625 + void simpleToBsonValue() { + + assertThat(BsonUtils.simpleToBsonValue(Long.valueOf(10))).isEqualTo(new BsonInt64(10)); + assertThat(BsonUtils.simpleToBsonValue(new Integer(10))).isEqualTo(new BsonInt32(10)); + assertThat(BsonUtils.simpleToBsonValue(Double.valueOf(0.1D))).isEqualTo(new BsonDouble(0.1D)); + assertThat(BsonUtils.simpleToBsonValue("value")).isEqualTo(new BsonString("value")); + } + + @Test // DATAMONGO-625 + void primitiveToBsonValue() { + assertThat(BsonUtils.simpleToBsonValue(10L)).isEqualTo(new BsonInt64(10)); + } + + @Test // DATAMONGO-625 + void objectIdToBsonValue() { + + ObjectId source = new ObjectId(); + assertThat(BsonUtils.simpleToBsonValue(source)).isEqualTo(new BsonObjectId(source)); + } + + @Test // DATAMONGO-625 + void bsonValueToBsonValue() { + + BsonObjectId source = new BsonObjectId(new ObjectId()); + assertThat(BsonUtils.simpleToBsonValue(source)).isSameAs(source); + } + + @Test // DATAMONGO-625 + void unsupportedToBsonValue() { + assertThatExceptionOfType(IllegalArgumentException.class) + .isThrownBy(() -> BsonUtils.simpleToBsonValue(new Object())); + } + + @Test // GH-3571 + void removeNullIdIfNull() { + + Document source = new Document("_id", null).append("value", "v-1"); + + assertThat(BsonUtils.removeNullId(source)).isTrue(); + assertThat(source).doesNotContainKey("_id").containsKey("value"); + } + + @Test // GH-3571 + void removeNullIdDoesNotTouchNonNullOn() { + + Document source = new Document("_id", "id-value").append("value", "v-1"); + + assertThat(BsonUtils.removeNullId(source)).isFalse(); + assertThat(source).containsKeys("_id", "value"); + } + + @Test // GH-3571 + void asCollectionDoesNotModifyCollection() { + + Object source = new ArrayList<>(0); + + assertThat(BsonUtils.asCollection(source)).isSameAs(source); + } + + @Test // GH-3571 + void asCollectionConvertsArrayToCollection() { + + Object source = new String[] { "one", "two" }; + + assertThat((Collection) BsonUtils.asCollection(source)).containsExactly("one", "two"); + } + + @Test // GH-3571 + void asCollectionConvertsWrapsNonIterable() { + + Object source = 100L; + + assertThat((Collection) BsonUtils.asCollection(source)).containsExactly(source); + } + + @Test // GH-3702 + void supportsBsonShouldReportIfConversionSupported() { + + assertThat(BsonUtils.supportsBson("foo")).isFalse(); + assertThat(BsonUtils.supportsBson(new Document())).isTrue(); + assertThat(BsonUtils.supportsBson(new BasicDBList())).isTrue(); + assertThat(BsonUtils.supportsBson(Collections.emptyMap())).isTrue(); + } + + @ParameterizedTest // GH-4432 + @MethodSource("javaTimeInstances") + void convertsJavaTimeTypesToBsonDateTime(Temporal source) { + + assertThat(BsonUtils.simpleToBsonValue(source)) + .isEqualTo(new Document("value", source).toBsonDocument().get("value")); + } + + @ParameterizedTest // GH-4432 + @MethodSource("collectionLikeInstances") + void convertsCollectionLikeToBsonArray(Object source) { + + assertThat(BsonUtils.simpleToBsonValue(source)) + .isEqualTo(new Document("value", source).toBsonDocument().get("value")); + } + + @Test // GH-4432 + void convertsPrimitiveArrayToBsonArray() { + + assertThat(BsonUtils.simpleToBsonValue(new int[] { 1, 2, 3 })) + .isEqualTo(new BsonArray(List.of(new BsonInt32(1), new BsonInt32(2), new BsonInt32(3)))); + } + + @ParameterizedTest + @MethodSource("fieldNames") + void resolveValueForField(FieldName fieldName, boolean exists) { + + Map source = new LinkedHashMap<>(); + source.put("a", "a-value"); // top level + source.put("b", new Document("a", "b.a-value")); // path + source.put("c.a", "c.a-value"); // key + + if(exists) { + assertThat(BsonUtils.resolveValue(source, fieldName)).isEqualTo(fieldName.name() + "-value"); + } else { + assertThat(BsonUtils.resolveValue(source, fieldName)).isNull(); + } + } + + @Test + void retainsOrderWhenMappingValues() { + + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + source.append("0", "third-entry"); + source.append("9", "fourth-entry"); + + Document target = BsonUtils.mapValues(source, (key, value) -> value); + assertThat(source).isNotSameAs(target).containsExactlyEntriesOf(source); + } + + @Test + void retainsOrderWhenMappingKeys() { + + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + + Document target = BsonUtils.mapEntries(source, entry -> entry.getKey().toUpperCase(), Entry::getValue); + assertThat(target).containsExactly(Map.entry("Z", "first-entry"), Map.entry("A", "second-entry")); + } + + @Test + void appliesValueMapping() { + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + + Document target = BsonUtils.mapValues(source, + (key, value) -> new StringBuilder(value.toString()).reverse().toString()); + assertThat(target).containsValues("yrtne-tsrif", "yrtne-dnoces"); + } + + @Test + void appliesKeyMapping() { + + Document source = new Document(); + source.append("z", "first-entry"); + source.append("a", "second-entry"); + + Document target = BsonUtils.mapEntries(source, entry -> entry.getKey().toUpperCase(), Entry::getValue); + assertThat(target).containsKeys("Z", "A"); + } + + static Stream fieldNames() { + return Stream.of(// + Arguments.of(FieldName.path("a"), true), // + Arguments.of(FieldName.path("b.a"), true), // + Arguments.of(FieldName.path("c.a"), false), // + Arguments.of(FieldName.name("d"), false), // + Arguments.of(FieldName.name("b.a"), false), // + Arguments.of(FieldName.name("c.a"), true) // + ); + } + + static Stream javaTimeInstances() { + + return Stream.of(Arguments.of(Instant.now()), Arguments.of(LocalDate.now()), Arguments.of(LocalDateTime.now()), + Arguments.of(LocalTime.now())); + } + + static Stream collectionLikeInstances() { + + return Stream.of(Arguments.of(new String[] { "1", "2", "3" }), Arguments.of(List.of("1", "2", "3")), + Arguments.of(new Integer[] { 1, 2, 3 }), Arguments.of(List.of(1, 2, 3)), + Arguments.of(new Date[] { new Date() }), Arguments.of(List.of(new Date())), + Arguments.of(new LocalDate[] { LocalDate.now() }), Arguments.of(List.of(LocalDate.now()))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java new file mode 100644 index 0000000000..20b5060f77 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java @@ -0,0 +1,699 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.json; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonRegularExpression; +import org.bson.Document; +import org.bson.codecs.DecoderContext; +import org.junit.jupiter.api.Test; +import org.springframework.data.expression.ValueExpressionParser; +import org.springframework.data.spel.EvaluationContextProvider; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.ParseException; +import org.springframework.expression.TypedValue; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.expression.spel.support.StandardEvaluationContext; + +/** + * Unit tests for {@link ParameterBindingJsonReader}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Rocco Lagrotteria + */ +class ParameterBindingJsonReaderUnitTests { + + @Test + void bindUnquotedStringValue() { + + Document target = parse("{ 'lastname' : ?0 }", "kohlin"); + assertThat(target).isEqualTo(new Document("lastname", "kohlin")); + } + + @Test + void bindQuotedStringValue() { + + Document target = parse("{ 'lastname' : '?0' }", "kohlin"); + assertThat(target).isEqualTo(new Document("lastname", "kohlin")); + } + + @Test + void bindUnquotedIntegerValue() { + + Document target = parse("{ 'lastname' : ?0 } ", 100); + assertThat(target).isEqualTo(new Document("lastname", 100)); + } + + @Test + void bindMultiplePlacholders() { + + Document target = parse("{ 'lastname' : ?0, 'firstname' : '?1' }", "Kohlin", "Dalinar"); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : 'Kohlin', 'firstname' : 'Dalinar' }")); + } + + @Test + void bindQuotedIntegerValue() { + + Document target = parse("{ 'lastname' : '?0' }", 100); + assertThat(target).isEqualTo(new Document("lastname", "100")); + } + + @Test // GH-4806 + void regexConsidersOptions() { + + Document target = parse("{ 'c': /^true$/i }"); + + BsonRegularExpression pattern = target.get("c", BsonRegularExpression.class); + assertThat(pattern.getPattern()).isEqualTo("^true$"); + assertThat(pattern.getOptions()).isEqualTo("i"); + } + + @Test // GH-4806 + void regexConsidersBindValueWithOptions() { + + Document target = parse("{ 'c': /^?0$/i }", "foo"); + + BsonRegularExpression pattern = target.get("c", BsonRegularExpression.class); + assertThat(pattern.getPattern()).isEqualTo("^foo$"); + assertThat(pattern.getOptions()).isEqualTo("i"); + } + + @Test // GH-4806 + void treatsQuotedValueThatLooksLikeRegexAsPlainString() { + + Document target = parse("{ 'c': '/^?0$/i' }", "foo"); + + assertThat(target.get("c")).isInstanceOf(String.class); + } + + @Test // GH-4806 + void treatsStringParameterValueThatLooksLikeRegexAsPlainString() { + + Document target = parse("{ 'c': ?0 }", "/^foo$/i"); + + assertThat(target.get("c")).isInstanceOf(String.class); + } + + @Test + void bindValueToRegex() { + + Document target = parse("{ 'lastname' : { '$regex' : '^(?0)'} }", "kohlin"); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { '$regex' : '^(kohlin)'} }")); + } + + @Test + void bindValueToMultiRegex() { + + Document target = parse( + "{'$or' : [{'firstname': {'$regex': '.*?0.*', '$options': 'i'}}, {'lastname' : {'$regex': '.*?0xyz.*', '$options': 'i'}} ]}", + "calamity"); + assertThat(target).isEqualTo(Document.parse( + "{ \"$or\" : [ { \"firstname\" : { \"$regex\" : \".*calamity.*\" , \"$options\" : \"i\"}} , { \"lastname\" : { \"$regex\" : \".*calamityxyz.*\" , \"$options\" : \"i\"}}]}")); + } + + @Test + void bindMultipleValuesToSingleToken() { + + Document target = parse("{$where: 'return this.date.getUTCMonth() == ?2 && this.date.getUTCDay() == ?3;'}", 0, 1, 2, + 3, 4); + assertThat(target) + .isEqualTo(Document.parse("{$where: 'return this.date.getUTCMonth() == 2 && this.date.getUTCDay() == 3;'}")); + } + + @Test + void bindValueToDbRef() { + + Document target = parse("{ 'reference' : { $ref : 'reference', $id : ?0 }}", "kohlin"); + assertThat(target).isEqualTo(Document.parse("{ 'reference' : { $ref : 'reference', $id : 'kohlin' }}")); + } + + @Test + void bindToKey() { + + Document target = parse("{ ?0 : ?1 }", "firstname", "kaladin"); + assertThat(target).isEqualTo(Document.parse("{ 'firstname' : 'kaladin' }")); + } + + @Test + void bindListValue() { + + Document target = parse("{ 'lastname' : { $in : ?0 } }", Arrays.asList("Kohlin", "Davar")); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { $in : ['Kohlin', 'Davar' ]} }")); + } + + @Test + void bindListOfBinaryValue() { + + byte[] value = "Kohlin".getBytes(StandardCharsets.UTF_8); + List args = Collections.singletonList(value); + + Document target = parse("{ 'lastname' : { $in : ?0 } }", args); + assertThat(target).isEqualTo(new Document("lastname", new Document("$in", args))); + } + + @Test + void bindExtendedExpression() { + + Document target = parse("{'id':?#{ [0] ? { $exists :true} : [1] }}", true, "firstname", "kaladin"); + assertThat(target).isEqualTo(Document.parse("{ \"id\" : { \"$exists\" : true}}")); + } + + @Test + void bindDocumentValue() { + + Document target = parse("{ 'lastname' : ?0 }", new Document("$eq", "Kohlin")); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { '$eq' : 'Kohlin' } }")); + } + + @Test + void arrayWithoutBinding() { + + Document target = parse("{ 'lastname' : { $in : [\"Kohlin\", \"Davar\"] } }"); + assertThat(target).isEqualTo(Document.parse("{ 'lastname' : { $in : ['Kohlin', 'Davar' ]} }")); + } + + @Test + void bindSpEL() { + + Document target = parse("{ arg0 : ?#{[0]} }", 100.01D); + assertThat(target).isEqualTo(new Document("arg0", 100.01D)); + } + + @Test // DATAMONGO-2315 + void bindDateAsDate() { + + Date date = new Date(); + Document target = parse("{ 'end_date' : { $gte : { $date : ?0 } } }", date); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + date.getTime() + " } } } ")); + } + + @Test // DATAMONGO-2315 + void bindQuotedDateAsDate() { + + Date date = new Date(); + Document target = parse("{ 'end_date' : { $gte : { $date : '?0' } } }", date); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + date.getTime() + " } } } ")); + } + + @Test // DATAMONGO-2315 + void bindStringAsDate() { + + Document target = parse("{ 'end_date' : { $gte : { $date : ?0 } } }", "2019-07-04T12:19:23.000Z"); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : '2019-07-04T12:19:23.000Z' } } } ")); + } + + @Test // DATAMONGO-2315 + void bindNumberAsDate() { + + Long time = new Date().getTime(); + Document target = parse("{ 'end_date' : { $gte : { $date : ?0 } } }", time); + + assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + time + " } } } ")); + } + + @Test // GH-3750 + public void shouldParseISODate() { + + String json = "{ 'value' : ISODate(\"1970-01-01T00:00:00Z\") }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isZero(); + } + + @Test // GH-3750 + public void shouldParseISODateWith24HourTimeSpecification() { + + String json = "{ 'value' : ISODate(\"2013-10-04T12:07:30.443Z\") }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isEqualTo(1380888450443L); + } + + @Test // GH-3750 + public void shouldParse$date() { + + String json = "{ 'value' : { \"$date\" : \"2015-04-16T14:55:57.626Z\" } }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isEqualTo(1429196157626L); + } + + @Test // GH-3750 + public void shouldParse$dateWithTimeOffset() { + + String json = "{ 'value' :{ \"$date\" : \"2015-04-16T16:55:57.626+02:00\" } }"; + Date value = parse(json).get("value", Date.class); + assertThat(value.getTime()).isEqualTo(1429196157626L); + } + + @Test // GH-4282 + public void shouldReturnNullAsSuch() { + + String json = "{ 'value' : ObjectId(?0) }"; + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> parse(json, new Object[] { null })) + .withMessageContaining("hexString"); + } + + @Test // DATAMONGO-2418 + void shouldNotAccessSpElEvaluationContextWhenNoSpElPresentInBindableTarget() { + + Object[] args = new Object[] { "value" }; + EvaluationContext evaluationContext = new StandardEvaluationContext() { + + @Override + public TypedValue getRootObject() { + throw new RuntimeException("o_O"); + } + }; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader("{ 'name':'?0' }", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("name", "value")); + } + + @Test // DATAMONGO-2476 + void bindUnquotedParameterInArray() { + + Document target = parse("{ 'name' : { $in : [?0] } }", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Collections.singletonList("kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindMultipleUnquotedParameterInArray() { + + Document target = parse("{ 'name' : { $in : [?0,?1] } }", "dalinar", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Arrays.asList("dalinar", "kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindUnquotedParameterInArrayWithSpaces() { + + Document target = parse("{ 'name' : { $in : [ ?0 ] } }", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Collections.singletonList("kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindQuotedParameterInArray() { + + Document target = parse("{ 'name' : { $in : ['?0'] } }", "kohlin"); + assertThat(target).isEqualTo(new Document("name", new Document("$in", Collections.singletonList("kohlin")))); + } + + @Test // DATAMONGO-2476 + void bindQuotedMulitParameterInArray() { + + Document target = parse("{ 'name' : { $in : ['?0,?1'] } }", "dalinar", "kohlin"); + assertThat(target) + .isEqualTo(new Document("name", new Document("$in", Collections.singletonList("dalinar,kohlin")))); + } + + @Test // DATAMONGO-1894 + void discoversNoDependenciesInExpression() { + + String json = "{ $and : [?#{ [0] == null ? { '$where' : 'true' } : { 'v1' : { '$in' : {[0]} } } }]}"; + + ExpressionDependencies expressionDependencies = new ParameterBindingDocumentCodec().captureExpressionDependencies( + json, it -> new Object(), ValueExpressionParser.create(SpelExpressionParser::new)); + + assertThat(expressionDependencies).isEqualTo(ExpressionDependencies.none()); + } + + @Test // DATAMONGO-1894 + void discoversCorrectlyDependenciesInExpression() { + + String json = "{ hello: ?#{hasRole('foo')} }"; + + ExpressionDependencies expressionDependencies = new ParameterBindingDocumentCodec().captureExpressionDependencies( + json, it -> new Object(), ValueExpressionParser.create(SpelExpressionParser::new)); + + assertThat(expressionDependencies).isNotEmpty(); + assertThat(expressionDependencies.get()).hasSize(1); + } + + @Test // DATAMONGO-2523 + void bindSpelExpressionInArrayCorrectly/* closing bracket must not have leading whitespace! */() { + + Document target = parse("{ $and : [?#{ [0] == null ? { '$where' : 'true' } : { 'v1' : { '$in' : {[0]} } } }]}", 1); + + assertThat(target).isEqualTo(Document.parse("{\"$and\": [{\"v1\": {\"$in\": [1]}}]}")); + } + + @Test // DATAMONGO-2545 + void shouldABindArgumentsViaIndexInSpelExpressions() { + + Object[] args = new Object[] { "yess", "nooo" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader( + "{ 'isBatman' : ?#{ T(" + this.getClass().getName() + ").isBatman() ? [0] : [1] }}", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("isBatman", "nooo")); + } + + @Test // DATAMONGO-2545 + void shouldAllowMethodArgumentPlaceholdersInSpelExpressions/*becuase this worked before*/() { + + Object[] args = new Object[] { "yess", "nooo" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader( + "{ 'isBatman' : ?#{ T(" + this.getClass().getName() + ").isBatman() ? '?0' : '?1' }}", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("isBatman", "nooo")); + } + + @Test // DATAMONGO-2545 + void shouldAllowMethodArgumentPlaceholdersInQuotedSpelExpressions/*because this worked before*/() { + + Object[] args = new Object[] { "yess", "nooo" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader( + "{ 'isBatman' : \"?#{ T(" + this.getClass().getName() + ").isBatman() ? '?0' : '?1' }\" }", + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("isBatman", "nooo")); + } + + @Test // DATAMONGO-2545 + void evaluatesSpelExpressionDefiningEntireQuery() { + + Object[] args = new Object[] {}; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + evaluationContext.setRootObject(new DummySecurityObject(new DummyWithId("wonderwoman"))); + + String json = "?#{ T(" + this.getClass().getName() + + ").isBatman() ? {'_class': { '$eq' : 'region' }} : { '$and' : { {'_class': { '$eq' : 'region' } }, {'user.supervisor': principal.id } } } }"; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target) + .isEqualTo(new Document("$and", Arrays.asList(new Document("_class", new Document("$eq", "region")), + new Document("user.supervisor", "wonderwoman")))); + } + + @Test // GH-3871 + public void capturingExpressionDependenciesShouldNotThrowParseErrorForSpelOnlyJson() { + + Object[] args = new Object[] { "1", "2" }; + String json = "?#{ true ? { 'name': #name } : { 'name' : #name + 'trouble' } }"; + + new ParameterBindingDocumentCodec().captureExpressionDependencies(json, (index) -> args[index], + ValueExpressionParser.create(SpelExpressionParser::new)); + } + + @Test // GH-3871, GH-4089 + public void bindEntireQueryUsingSpelExpressionWhenEvaluationResultIsDocument() { + + Object[] args = new Object[] { "expected", "unexpected" }; + String json = "?#{ true ? { 'name': ?0 } : { 'name' : ?1 } }"; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + assertThat(target).isEqualTo(new Document("name", "expected")); + } + + @Test // GH-3871, GH-4089 + public void throwsExceptionWhenBindEntireQueryUsingSpelExpressionIsMalFormatted() { + + Object[] args = new Object[] { "expected", "unexpected" }; + String json = "?#{ true ? { 'name': ?0 { } } : { 'name' : ?1 } }"; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + assertThatExceptionOfType(ParseException.class).isThrownBy(() -> { + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + + new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + }); + } + + @Test // GH-3871, GH-4089 + public void bindEntireQueryUsingSpelExpressionWhenEvaluationResultIsJsonStringContainingUUID() { + + Object[] args = new Object[] { UUID.fromString("cfbca728-4e39-4613-96bc-f920b5c37e16"), "unexpected" }; + String json = "?#{ true ? { 'name': ?0 } : { 'name' : ?1 } }"; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target.get("name")).isInstanceOf(UUID.class); + } + + @Test // GH-3871 + void bindEntireQueryUsingSpelExpression() { + + Object[] args = new Object[] { "region" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + evaluationContext.setRootObject(new DummySecurityObject(new DummyWithId("wonderwoman"))); + + String json = "?#{ T(" + this.getClass().getName() + ").applyFilterByUser('?0' ,principal.id) }"; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target) + .isEqualTo(new Document("$and", Arrays.asList(new Document("_class", new Document("$eq", "region")), + new Document("user.supervisor", "wonderwoman")))); + } + + @Test // GH-3871 + void bindEntireQueryUsingParameter() { + + Object[] args = new Object[] { "{ 'itWorks' : true }" }; + StandardEvaluationContext evaluationContext = (StandardEvaluationContext) EvaluationContextProvider.DEFAULT + .getEvaluationContext(args); + + String json = "?0"; + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, + new ParameterBindingContext((index) -> args[index], new SpelExpressionParser(), () -> evaluationContext)); + Document target = new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + + assertThat(target).isEqualTo(new Document("itWorks", true)); + } + + @Test // DATAMONGO-2571 + void shouldParseRegexCorrectly() { + + Document target = parse("{ $and: [{'fieldA': {$in: [/ABC.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}"); + assertThat(target) + .isEqualTo(Document.parse("{ $and: [{'fieldA': {$in: [/ABC.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}")); + } + + @Test // DATAMONGO-2571 + void shouldParseRegexWithPlaceholderCorrectly() { + + Document target = parse("{ $and: [{'fieldA': {$in: [/?0.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}", "ABC"); + assertThat(target) + .isEqualTo(Document.parse("{ $and: [{'fieldA': {$in: [/ABC.*/, /CDE.*F/]}}, {'fieldB': {$ne: null}}]}")); + } + + @Test // DATAMONGO-2633 + void shouldParseNestedArrays() { + + Document target = parse("{ 'stores.location' : { $geoWithin: { $centerSphere: [ [ ?0, 48.799029 ] , ?1 ] } } }", + 1.948516D, 0.004D); + assertThat(target).isEqualTo(Document + .parse("{ 'stores.location' : { $geoWithin: { $centerSphere: [ [ 1.948516, 48.799029 ] , 0.004 ] } } }")); + } + + @Test // GH-3633 + void parsesNullValue() { + + Document target = parse("{ 'parent' : null }"); + assertThat(target).isEqualTo(new Document("parent", null)); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaArgumentIndex() { + + String source = "new java.lang.Object()"; + Document target = parse("{ arg0 : ?#{[0]} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaParameterPlaceholder() { + + String source = "new java.lang.Object()"; + Document target = parse("{ arg0 : :#{?0} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void errorsOnNonDocument() { + + String source = "new java.lang.Object()"; + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> parse(":#{?0}", source)); + } + + @Test // GH-4089 + void bindsFullDocument() { + + Document source = new Document(); + assertThat(parse(":#{?0}", source)).isSameAs(source); + } + + @Test // GH-4089 + void enforcesStringSpelArgumentTypeViaParameterPlaceholderWhenQuoted() { + + Integer source = 10; + Document target = parse("{ arg0 : :#{'?0'} }", source); + assertThat(target.get("arg0")).isEqualTo("10"); + } + + @Test // GH-4089 + void enforcesSpelArgumentTypeViaParameterPlaceholderWhenQuoted() { + + String source = "new java.lang.Object()"; + Document target = parse("{ arg0 : :#{'?0'} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaParameterPlaceholderWhenValueContainsSingleQuotes() { + + String source = "' + new java.lang.Object() + '"; + Document target = parse("{ arg0 : :#{?0} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-4089 + void retainsSpelArgumentTypeViaParameterPlaceholderWhenValueContainsDoubleQuotes() { + + String source = "\\\" + new java.lang.Object() + \\\""; + Document target = parse("{ arg0 : :#{?0} }", source); + assertThat(target.get("arg0")).isEqualTo(source); + } + + @Test // GH-3750 + void shouldParseUUIDasStandardRepresentation() { + + String json = "{ 'value' : UUID(\"b5f21e0c-2a0d-42d6-ad03-d827008d8ab6\") }"; + + BsonBinary value = parse(json).get("value", BsonBinary.class); + assertThat(value.getType()).isEqualTo(BsonBinarySubType.UUID_STANDARD.getValue()); + } + + @Test // GH-3750 + public void shouldParse$uuidAsStandardRepresentation() { + + String json = "{ 'value' : { '$uuid' : \"73ff-d26444b-34c6-990e8e-7d1dfc035d4\" } } }"; + BsonBinary value = parse(json).get("value", BsonBinary.class); + assertThat(value.getType()).isEqualTo(BsonBinarySubType.UUID_STANDARD.getValue()); + } + + private static Document parse(String json, Object... args) { + + ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, args); + return new ParameterBindingDocumentCodec().decode(reader, DecoderContext.builder().build()); + } + + // DATAMONGO-2545 + public static boolean isBatman() { + return false; + } + + public static String applyFilterByUser(String _class, String username) { + switch (username) { + case "batman": + return "{'_class': { '$eq' : '" + _class + "' }}"; + default: + return "{ '$and' : [ {'_class': { '$eq' : '" + _class + "' } }, {'user.supervisor': '" + username + "' } ] }"; + } + } + + public static class DummySecurityObject { + + DummyWithId principal; + + public DummySecurityObject(DummyWithId principal) { + this.principal = principal; + } + + public DummyWithId getPrincipal() { + return this.principal; + } + + public void setPrincipal(DummyWithId principal) { + this.principal = principal; + } + + public String toString() { + return "ParameterBindingJsonReaderUnitTests.DummySecurityObject(principal=" + this.getPrincipal() + ")"; + } + } + + public static class DummyWithId { + + String id; + + public DummyWithId(String id) { + this.id = id; + } + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String toString() { + return "ParameterBindingJsonReaderUnitTests.DummyWithId(id=" + this.getId() + ")"; + } + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/BulkOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/BulkOperationExtensionsTests.kt new file mode 100644 index 0000000000..e5c08c74dc --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/BulkOperationExtensionsTests.kt @@ -0,0 +1,79 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Test +import org.springframework.data.mongodb.core.query.Criteria +import org.springframework.data.mongodb.core.query.Query +import org.springframework.data.mongodb.core.query.Update +import org.springframework.data.mongodb.core.query.UpdateDefinition +import org.springframework.data.util.Pair.of + +/** + * Unit tests for BulkOperationExtensions. + * @author 2tsumo-hitori + */ +class BulkOperationExtensionsTests { + + private val bulkOperation = mockk(relaxed = true) + + @Test // GH-4911 + fun `BulkOperation#updateMulti using kotlin#Pair should call its Java counterpart`() { + + val list: MutableList> = mutableListOf() + list.add(where("value", "v2") to set("value", "v3")) + + bulkOperation.updateMulti(list) + + val expected = list.map { (query, update) -> of(query, update) } + verify { bulkOperation.updateMulti(expected) } + } + + @Test // GH-4911 + fun `BulkOperation#upsert using kotlin#Pair should call its Java counterpart`() { + + val list: MutableList> = mutableListOf() + list.add(where("value", "v2") to set("value", "v3")) + + bulkOperation.upsert(list) + + val expected = list.map { (query, update) -> of(query, update) } + verify { bulkOperation.upsert(expected) } + } + + @Test // GH-4911 + fun `BulkOperation#updateOne using kotlin#Pair should call its Java counterpart`() { + + val list: MutableList> = mutableListOf() + list.add(where("value", "v2") to set("value", "v3")) + + bulkOperation.updateOne(list) + + val expected = list.map { (query, update) -> of(query, update) } + verify { bulkOperation.updateOne(expected) } + } + + private fun where(field: String, value: String): Query { + return Query().addCriteria(Criteria.where(field).`is`(value)) + } + + private fun set(field: String, value: String): Update { + return Update().set(field, value) + } + +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/Entities.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/Entities.kt new file mode 100644 index 0000000000..f29df408b2 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/Entities.kt @@ -0,0 +1,32 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import org.springframework.data.annotation.Id +import org.springframework.data.annotation.LastModifiedDate +import org.springframework.data.annotation.Version +import org.springframework.data.mongodb.core.mapping.Document +import java.time.Instant + +@Document("versioned-auditable") +data class KAuditableVersionedEntity( + @Id val id: String?, + val value: String, + @Version val version: Long?, + @LastModifiedDate val modificationDate: Instant? +) { + fun withValue(value: String) = copy(value = value) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt index 305fa06701..01804af55f 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableAggregationOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,23 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.Mockito -import org.mockito.junit.MockitoJUnitRunner /** * @author Sebastien Deleuze * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner::class) class ExecutableAggregationOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ExecutableAggregationOperation - - @Test // DATAMONGO-1689 - fun `aggregateAndReturn(KClass) extension should call its Java counterpart`() { - - operation.aggregateAndReturn(First::class) - verify(operation).aggregateAndReturn(First::class.java) - } + val operation = mockk(relaxed = true) @Test // DATAMONGO-1689 fun `aggregateAndReturn() with reified type parameter extension should call its Java counterpart`() { operation.aggregateAndReturn() - verify(operation).aggregateAndReturn(First::class.java) + verify { operation.aggregateAndReturn(First::class.java) } } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt index 4f8a60d34a..516b01793a 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableFindOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,62 +15,67 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner /** * @author Sebastien Deleuze * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner::class) class ExecutableFindOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ExecutableFindOperation + val operation = mockk(relaxed = true) - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operationWithProjection: ExecutableFindOperation.FindWithProjection + val operationWithProjection = mockk>(relaxed = true) - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var distinctWithProjection: ExecutableFindOperation.DistinctWithProjection + val distinctWithProjection = mockk(relaxed = true) - @Test // DATAMONGO-1689 - fun `ExecutableFindOperation#query(KClass) extension should call its Java counterpart`() { + val findDistinct = mockk(relaxed = true) - operation.query(First::class) - verify(operation).query(First::class.java) - } + val executableFind = mockk>(relaxed = true) - @Test // DATAMONGO-1689 - fun `ExecutableFindOperation#query() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-1689 + fun `ExecutableFindOperation#query() with reified type parameter extension should call its Java counterpart`() { - operation.query() - verify(operation).query(First::class.java) - } + operation.query() + verify { operation.query(First::class.java) } + } - @Test // DATAMONGO-1689 - fun `ExecutableFindOperation#FindOperationWithProjection#asType(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-1689, DATAMONGO-2086 + fun `ExecutableFindOperation#FindOperationWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { - operationWithProjection.asType(First::class) - verify(operationWithProjection).`as`(First::class.java) - } + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } - @Test // DATAMONGO-1689 - fun `ExecutableFindOperation#FindOperationWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-2086 + fun `ExecutableFindOperation#DistinctWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { - operationWithProjection.asType() - verify(operationWithProjection).`as`(First::class.java) - } + distinctWithProjection.asType() + verify { distinctWithProjection.`as`(User::class.java) } + } - @Test // DATAMONGO-1761 - fun `ExecutableFindOperation#DistinctWithProjection#asType(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-2417 + fun `ExecutableFindOperation#distrinct() using KProperty1 should call its Java counterpart`() { - distinctWithProjection.asType(First::class) - verify(distinctWithProjection).`as`(First::class.java) - } + every { operation.query(KotlinUser::class.java) } returns executableFind + + operation.distinct(KotlinUser::username) + verify { + operation.query(KotlinUser::class.java) + executableFind.distinct("username") + } + } + + @Test // DATAMONGO-2417 + fun `ExecutableFindOperation#FindDistinct#field() using KProperty should call its Java counterpart`() { + + findDistinct.distinct(KotlinUser::username) + verify { findDistinct.distinct("username") } + } + + data class KotlinUser(val username: String) } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt index c47a792d02..27b8063958 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableInsertOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,24 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner /** * @author Sebastien Deleuze * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner::class) class ExecutableInsertOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ExecutableInsertOperation + val operation = mockk(relaxed = true) - @Test // DATAMONGO-1689 - fun `insert(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-1689 + fun `insert() with reified type parameter extension should call its Java counterpart`() { - operation.insert(First::class) - verify(operation).insert(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `insert() with reified type parameter extension should call its Java counterpart`() { - - operation.insert() - verify(operation).insert(First::class.java) - } + operation.insert() + verify { operation.insert(First::class.java) } + } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensionsTests.kt new file mode 100644 index 0000000000..8cda511a54 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableMapReduceOperationExtensionsTests.kt @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.mockk +import io.mockk.verify +import org.junit.Test + +/** + * @author Christoph Strobl + * @author Sebastien Deleuze + */ +class ExecutableMapReduceOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + val operationWithProjection = mockk>(relaxed = true) + + @Test // DATAMONGO-1929 + fun `ExecutableMapReduceOperation#mapReduce() with reified type parameter extension should call its Java counterpart`() { + + operation.mapReduce() + verify { operation.mapReduce(First::class.java) } + } + + @Test // DATAMONGO-1929, DATAMONGO-2086 + fun `ExecutableMapReduceOperation#MapReduceWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt index 8ecf94192a..81e1702114 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableRemoveOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,24 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner /** * @author Sebastien Deleuze * @author Mark Paluch */ -@RunWith(MockitoJUnitRunner::class) class ExecutableRemoveOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ExecutableRemoveOperation + val operation = mockk(relaxed = true) - @Test // DATAMONGO-1689 - fun `remove(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-1689 + fun `remove() with reified type parameter extension should call its Java counterpart`() { - operation.remove(First::class) - verify(operation).remove(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `remove() with reified type parameter extension should call its Java counterpart`() { - - operation.remove() - verify(operation).remove(First::class.java) - } + operation.remove() + verify { operation.remove(First::class.java) } + } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt index 18a69b265b..86bddda4c8 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ExecutableUpdateOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,25 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner /** - * Unit tests for [ExecutableUpdateOperationExtensions]. + * Unit tests for `ExecutableUpdateOperationExtensions.kt`. * * @author Christoph Strobl + * @author Sebastien Deleuze */ -@RunWith(MockitoJUnitRunner::class) class ExecutableUpdateOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ExecutableUpdateOperation - - @Test // DATAMONGO-1719 - fun `update(KClass) extension should call its Java counterpart`() { - - operation.update(First::class) - verify(operation).update(First::class.java) - } + val operation = mockk(relaxed = true) @Test // DATAMONGO-1719 fun `update() with reified type parameter extension should call its Java counterpart`() { operation.update() - verify(operation).update(First::class.java) + verify { operation.update(First::class.java) } } -} \ No newline at end of file +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt index 13125c6a45..1fc68c85c5 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/MongoOperationsExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,14 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.mock import example.first.First import example.second.Second +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.Mockito.* -import org.mockito.junit.MockitoJUnitRunner import org.springframework.data.mongodb.core.BulkOperations.BulkMode import org.springframework.data.mongodb.core.aggregation.Aggregation -import org.springframework.data.mongodb.core.mapreduce.GroupBy import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions -import org.springframework.data.mongodb.core.query.Criteria -import org.springframework.data.mongodb.core.query.NearQuery import org.springframework.data.mongodb.core.query.Query import org.springframework.data.mongodb.core.query.Update @@ -38,141 +31,76 @@ import org.springframework.data.mongodb.core.query.Update * @author Mark Paluch * @author Christoph Strobl */ -@RunWith(MockitoJUnitRunner::class) class MongoOperationsExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operations: MongoOperations - - @Test // DATAMONGO-1689 - fun `getCollectionName(KClass) extension should call its Java counterpart`() { - - operations.getCollectionName(First::class) - verify(operations).getCollectionName(First::class.java) - } + val operations = mockk(relaxed = true) @Test // DATAMONGO-1689 fun `getCollectionName() with reified type parameter extension should call its Java counterpart`() { operations.getCollectionName() - verify(operations).getCollectionName(First::class.java) + verify { operations.getCollectionName(First::class.java) } } @Test // DATAMONGO-1689 fun `execute(CollectionCallback) with reified type parameter extension should call its Java counterpart`() { - val collectionCallback = mock>() + val collectionCallback = mockk>() operations.execute(collectionCallback) - verify(operations).execute(First::class.java, collectionCallback) + verify { operations.execute(First::class.java, collectionCallback) } } @Test // DATAMONGO-1689 fun `stream(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.stream(query) - verify(operations).stream(query, First::class.java) + verify { operations.stream(query, First::class.java) } } @Test // DATAMONGO-1689 fun `stream(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.stream(query, collectionName) - verify(operations).stream(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `createCollection(KClass) extension should call its Java counterpart`() { - - operations.createCollection(First::class) - verify(operations).createCollection(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `createCollection(KClass, CollectionOptions) extension should call its Java counterpart`() { - - val collectionOptions = mock() - operations.createCollection(First::class, collectionOptions) - verify(operations).createCollection(First::class.java, collectionOptions) + verify { operations.stream(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `createCollection() with reified type parameter extension should call its Java counterpart`() { operations.createCollection() - verify(operations).createCollection(First::class.java) + verify { operations.createCollection(First::class.java) } } @Test // DATAMONGO-1689 fun `createCollection(CollectionOptions) with reified type parameter extension should call its Java counterpart`() { - val collectionOptions = mock() + val collectionOptions = mockk() operations.createCollection(collectionOptions) - verify(operations).createCollection(First::class.java, collectionOptions) - } - - - @Test // DATAMONGO-1689 - fun `collectionExists(KClass) extension should call its Java counterpart`() { - - operations.collectionExists(First::class) - verify(operations).collectionExists(First::class.java) + verify { operations.createCollection(First::class.java, collectionOptions) } } @Test // DATAMONGO-1689 fun `collectionExists() with reified type parameter extension should call its Java counterpart`() { operations.collectionExists() - verify(operations).collectionExists(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `dropCollection(KClass) extension should call its Java counterpart`() { - - operations.dropCollection(First::class) - verify(operations).dropCollection(First::class.java) + verify { operations.collectionExists(First::class.java) } } @Test // DATAMONGO-1689 fun `dropCollection() with reified type parameter extension should call its Java counterpart`() { operations.dropCollection() - verify(operations).dropCollection(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `indexOps(KClass) extension should call its Java counterpart`() { - - operations.indexOps(First::class) - verify(operations).indexOps(First::class.java) + verify { operations.dropCollection(First::class.java) } } @Test // DATAMONGO-1689 fun `indexOps() with reified type parameter extension should call its Java counterpart`() { operations.indexOps() - verify(operations).indexOps(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `bulkOps(BulkMode, KClass) extension should call its Java counterpart`() { - - val bulkMode = BulkMode.ORDERED - - operations.bulkOps(bulkMode, First::class) - verify(operations).bulkOps(bulkMode, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `bulkOps(BulkMode, KClass, String) extension should call its Java counterpart`() { - - val bulkMode = BulkMode.ORDERED - val collectionName = "foo" - - operations.bulkOps(bulkMode, First::class, collectionName) - verify(operations).bulkOps(bulkMode, First::class.java, collectionName) + verify { operations.indexOps(First::class.java) } } @Test // DATAMONGO-1689 @@ -181,7 +109,7 @@ class MongoOperationsExtensionsTests { val bulkMode = BulkMode.ORDERED operations.bulkOps(bulkMode) - verify(operations).bulkOps(bulkMode, First::class.java) + verify { operations.bulkOps(bulkMode, First::class.java) } } @Test // DATAMONGO-1689 @@ -191,14 +119,14 @@ class MongoOperationsExtensionsTests { val collectionName = "foo" operations.bulkOps(bulkMode, collectionName) - verify(operations).bulkOps(bulkMode, First::class.java, collectionName) + verify { operations.bulkOps(bulkMode, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAll() with reified type parameter extension should call its Java counterpart`() { operations.findAll() - verify(operations).findAll(First::class.java) + verify { operations.findAll(First::class.java) } } @Test // DATAMONGO-1689 @@ -207,66 +135,63 @@ class MongoOperationsExtensionsTests { val collectionName = "foo" operations.findAll(collectionName) - verify(operations).findAll(First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `group(String, GroupBy) with reified type parameter extension should call its Java counterpart`() { - - val collectionName = "foo" - val groupBy = mock() - - operations.group(collectionName, groupBy) - verify(operations).group(collectionName, groupBy, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `group(Criteria, String, GroupBy) with reified type parameter extension should call its Java counterpart`() { - - val criteria = mock() - val collectionName = "foo" - val groupBy = mock() - - operations.group(criteria, collectionName, groupBy) - verify(operations).group(criteria, collectionName, groupBy, First::class.java) + verify { operations.findAll(First::class.java, collectionName) } } - @Test // DATAMONGO-1689 - fun `aggregate(Aggregation, KClass) with reified type parameter extension should call its Java counterpart`() { + @Test // #3508 + fun `aggregate(Aggregation) with reified type parameter extension should call its Java counterpart`() { - val aggregation = mock() + val aggregation = mockk() - operations.aggregate(aggregation, Second::class) - verify(operations).aggregate(aggregation, Second::class.java, First::class.java) + operations.aggregate(aggregation) + verify { + operations.aggregate( + aggregation, + Second::class.java, + First::class.java + ) + } } @Test // DATAMONGO-1689 fun `aggregate(Aggregation, String) with reified type parameter extension should call its Java counterpart`() { - val aggregation = mock() + val aggregation = mockk() val collectionName = "foo" operations.aggregate(aggregation, collectionName) - verify(operations).aggregate(aggregation, collectionName, First::class.java) + verify { operations.aggregate(aggregation, collectionName, First::class.java) } } - @Test // DATAMONGO-1689 - fun `aggregateStream(Aggregation, KClass) with reified type parameter extension should call its Java counterpart`() { + @Test // #3508 + fun `aggregateStream(Aggregation) with reified type parameter extension should call its Java counterpart`() { - val aggregation = mock() + val aggregation = mockk() - operations.aggregateStream(aggregation, Second::class) - verify(operations).aggregateStream(aggregation, Second::class.java, First::class.java) + operations.aggregateStream(aggregation) + verify { + operations.aggregateStream( + aggregation, + Second::class.java, + First::class.java + ) + } } @Test // DATAMONGO-1689 fun `aggregateStream(Aggregation, String) with reified type parameter extension should call its Java counterpart`() { - val aggregation = mock() + val aggregation = mockk() val collectionName = "foo" operations.aggregateStream(aggregation, collectionName) - verify(operations).aggregateStream(aggregation, collectionName, First::class.java) + verify { + operations.aggregateStream( + aggregation, + collectionName, + First::class.java + ) + } } @Test // DATAMONGO-1689 @@ -277,7 +202,7 @@ class MongoOperationsExtensionsTests { val reduceFunction = "baz" operations.mapReduce(collectionName, mapFunction, reduceFunction) - verify(operations).mapReduce(collectionName, mapFunction, reduceFunction, First::class.java) + verify { operations.mapReduce(collectionName, mapFunction, reduceFunction, First::class.java) } } @Test // DATAMONGO-1689 @@ -286,110 +211,82 @@ class MongoOperationsExtensionsTests { val collectionName = "foo" val mapFunction = "bar" val reduceFunction = "baz" - val options = mock() + val options = mockk() operations.mapReduce(collectionName, mapFunction, reduceFunction, options) - verify(operations).mapReduce(collectionName, mapFunction, reduceFunction, options, First::class.java) + verify { operations.mapReduce(collectionName, mapFunction, reduceFunction, options, First::class.java) } } @Test // DATAMONGO-1689 fun `mapReduce(Query, String, String, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" val mapFunction = "bar" val reduceFunction = "baz" operations.mapReduce(query, collectionName, mapFunction, reduceFunction) - verify(operations).mapReduce(query, collectionName, mapFunction, reduceFunction, First::class.java) + verify { operations.mapReduce(query, collectionName, mapFunction, reduceFunction, First::class.java) } } @Test // DATAMONGO-1689 fun `mapReduce(Query, String, String, String, MapReduceOptions) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" val mapFunction = "bar" val reduceFunction = "baz" - val options = mock() + val options = mockk() operations.mapReduce(query, collectionName, mapFunction, reduceFunction, options) - verify(operations).mapReduce(query, collectionName, mapFunction, reduceFunction, options, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `geoNear(Query) with reified type parameter extension should call its Java counterpart`() { - - val query = NearQuery.near(0.0, 0.0) - - operations.geoNear(query) - verify(operations).geoNear(query, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `geoNear(Query, String) with reified type parameter extension should call its Java counterpart`() { - - val collectionName = "foo" - val query = NearQuery.near(0.0, 0.0) - - operations.geoNear(query, collectionName) - verify(operations).geoNear(query, First::class.java, collectionName) + verify { operations.mapReduce(query, collectionName, mapFunction, reduceFunction, options, First::class.java) } } @Test // DATAMONGO-1689 fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.findOne(query) - verify(operations).findOne(query, First::class.java) + verify { operations.findOne(query, First::class.java) } } @Test // DATAMONGO-1689 fun `findOne(Query, String) with reified type parameter extension should call its Java counterpart`() { val collectionName = "foo" - val query = mock() + val query = mockk() operations.findOne(query, collectionName) - verify(operations).findOne(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `exists(Query, KClass) extension should call its Java counterpart`() { - - val query = mock() - - operations.exists(query, First::class) - verify(operations).exists(query, First::class.java) + verify { operations.findOne(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `exists(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.exists(query) - verify(operations).exists(query, First::class.java) + verify { operations.exists(query, First::class.java) } } @Test // DATAMONGO-1689 fun `find(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.find(query) - verify(operations).find(query, First::class.java) + verify { operations.find(query, First::class.java) } } @Test // DATAMONGO-1689 fun `find(Query, String) with reified type parameter extension should call its Java counterpart`() { val collectionName = "foo" - val query = mock() + val query = mockk() operations.find(query, collectionName) - verify(operations).find(query, First::class.java, collectionName) + verify { operations.find(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 @@ -398,7 +295,7 @@ class MongoOperationsExtensionsTests { val id = 1L operations.findById(id) - verify(operations).findById(id, First::class.java) + verify { operations.findById(id, First::class.java) } } @Test // DATAMONGO-1689 @@ -408,327 +305,192 @@ class MongoOperationsExtensionsTests { val id = 1L operations.findById(id, collectionName) - verify(operations).findById(id, First::class.java, collectionName) + verify { operations.findById(id, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAndModify(Query, Update, FindAndModifyOptions) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() - val options = mock() + val query = mockk() + val update = mockk() + val options = mockk() operations.findAndModify(query, update, options) - verify(operations).findAndModify(query, update, options, First::class.java) + verify { operations.findAndModify(query, update, options, First::class.java) } } @Test // DATAMONGO-1689 fun `findAndModify(Query, Update, FindAndModifyOptions, String) with reified type parameter extension should call its Java counterpart`() { val collectionName = "foo" - val query = mock() - val update = mock() - val options = mock() + val query = mockk() + val update = mockk() + val options = mockk() operations.findAndModify(query, update, options, collectionName) - verify(operations).findAndModify(query, update, options, First::class.java, collectionName) + verify { operations.findAndModify(query, update, options, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.findAndRemove(query) - verify(operations).findAndRemove(query, First::class.java) + verify { operations.findAndRemove(query, First::class.java) } } @Test // DATAMONGO-1689 fun `findAndRemove(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.findAndRemove(query, collectionName) - verify(operations).findAndRemove(query, First::class.java, collectionName) + verify { operations.findAndRemove(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `count() with reified type parameter extension should call its Java counterpart`() { operations.count() - verify(operations).count(any(), eq(First::class.java)) + verify { operations.count(any(), eq(First::class.java)) } } @Test // DATAMONGO-1689 fun `count(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.count(query) - verify(operations).count(query, First::class.java) + verify { operations.count(query, First::class.java) } } @Test // DATAMONGO-1689 fun `count(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.count(query, collectionName) - verify(operations).count(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `count(Query, KClass) with reified type parameter extension should call its Java counterpart`() { - - val query = mock() - - operations.count(query, First::class) - verify(operations).count(query, First::class.java) + verify { operations.count(query, First::class.java, collectionName) } } - @Test // DATAMONGO-1689 - fun `count(Query, KClass, String) with reified type parameter extension should call its Java counterpart`() { - - val query = mock() - val collectionName = "foo" - - operations.count(query, First::class, collectionName) - verify(operations).count(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `insert(Collection, KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-2208 + fun `insert(Collection) with reified type parameter extension should call its Java counterpart`() { val collection = listOf(First(), First()) - operations.insert(collection, First::class) - verify(operations).insert(collection, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `upsert(Query, Update, KClass) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - - operations.upsert(query, update, First::class) - verify(operations).upsert(query, update, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `upsert(Query, Update, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - val collectionName = "foo" - - operations.upsert(query, update, First::class, collectionName) - verify(operations).upsert(query, update, First::class.java, collectionName) + operations.insert(collection) + verify { operations.insert(collection, First::class.java) } } @Test // DATAMONGO-1689 fun `upsert(Query, Update) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() operations.upsert(query, update) - verify(operations).upsert(query, update, First::class.java) + verify { operations.upsert(query, update, First::class.java) } } @Test // DATAMONGO-1689 fun `upsert(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() val collectionName = "foo" operations.upsert(query, update, collectionName) - verify(operations).upsert(query, update, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `updateFirst(Query, Update, KClass) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - - operations.updateFirst(query, update, First::class) - verify(operations).updateFirst(query, update, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `updateFirst(Query, Update, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - val collectionName = "foo" - - operations.updateFirst(query, update, First::class, collectionName) - verify(operations).updateFirst(query, update, First::class.java, collectionName) + verify { operations.upsert(query, update, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `updateFirst(Query, Update) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() operations.updateFirst(query, update) - verify(operations).updateFirst(query, update, First::class.java) + verify { operations.updateFirst(query, update, First::class.java) } } @Test // DATAMONGO-1689 fun `updateFirst(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() val collectionName = "foo" operations.updateFirst(query, update, collectionName) - verify(operations).updateFirst(query, update, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `updateMulti(Query, Update, KClass) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - - operations.updateMulti(query, update, First::class) - verify(operations).updateMulti(query, update, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `updateMulti(Query, Update, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - val collectionName = "foo" - - operations.updateMulti(query, update, First::class, collectionName) - verify(operations).updateMulti(query, update, First::class.java, collectionName) + verify { operations.updateFirst(query, update, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `updateMulti(Query, Update) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() operations.updateMulti(query, update) - verify(operations).updateMulti(query, update, First::class.java) + verify { operations.updateMulti(query, update, First::class.java) } } @Test // DATAMONGO-1689 fun `updateMulti(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() val collectionName = "foo" operations.updateMulti(query, update, collectionName) - verify(operations).updateMulti(query, update, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `remove(Query, KClass) extension should call its Java counterpart`() { - - val query = mock() - - operations.remove(query, First::class) - verify(operations).remove(query, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `remove(Query, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val collectionName = "foo" - - operations.remove(query, First::class, collectionName) - verify(operations).remove(query, First::class.java, collectionName) + verify { operations.updateMulti(query, update, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `remove(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.remove(query) - verify(operations).remove(query, First::class.java) + verify { operations.remove(query, First::class.java) } } @Test // DATAMONGO-1689 fun `remove(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.remove(query, collectionName) - verify(operations).remove(query, First::class.java, collectionName) + verify { operations.remove(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAllAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.findAllAndRemove(query) - verify(operations).findAllAndRemove(query, First::class.java) - } - - @Test // DATAMONGO-1761 - fun `findDistinct(String, KClass) should call java counterpart`() { - - operations.findDistinct("field", First::class) - verify(operations).findDistinct("field", First::class.java, String::class.java) - } - - @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, KClass) should call java counterpart`() { - - val query = mock() - - operations.findDistinct(query, "field", First::class) - verify(operations).findDistinct(query, "field", First::class.java, String::class.java) - } - - @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, String, KClass) should call java counterpart`() { - - val query = mock() - - operations.findDistinct(query, "field", "collection", First::class) - verify(operations).findDistinct(query, "field", "collection", First::class.java, String::class.java) + verify { operations.findAllAndRemove(query, First::class.java) } } @Test // DATAMONGO-1761 fun `findDistinctImplicit(Query, String) should call java counterpart`() { - val query = mock() + val query = mockk() operations.findDistinct(query, "field") - verify(operations).findDistinct(query, "field", First::class.java, String::class.java) + verify { operations.findDistinct(query, "field", First::class.java, String::class.java) } } @Test // DATAMONGO-1761 fun `findDistinct(Query, String, String) should call java counterpart`() { - val query = mock() + val query = mockk() operations.findDistinct(query, "field", "collection") - verify(operations).findDistinct(query, "field", "collection", First::class.java, String::class.java) - } - - @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, KClass) should call java counterpart`() { - - val query = mock() - - operations.findDistinct(query, "field", First::class) - verify(operations).findDistinct(query, "field", First::class.java, String::class.java) + verify { operations.findDistinct(query, "field", "collection", First::class.java, String::class.java) } } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt index 5eee26ab9d..4249b58468 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveAggregationOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,35 +15,44 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner +import reactor.core.publisher.Flux /** * @author Mark Paluch + * @author Sebastien Deleuze */ -@RunWith(MockitoJUnitRunner::class) class ReactiveAggregationOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ReactiveAggregationOperation + val operation = mockk(relaxed = true) - @Test // DATAMONGO-1719 - fun `aggregateAndReturn(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-1719 + fun `aggregateAndReturn() with reified type parameter extension should call its Java counterpart`() { - operation.aggregateAndReturn(First::class) - verify(operation).aggregateAndReturn(First::class.java) - } + operation.aggregateAndReturn() + verify { operation.aggregateAndReturn(First::class.java) } + } - @Test // DATAMONGO-1719 - fun `aggregateAndReturn() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-2255 + fun terminatingAggregationOperationAllAsFlow() { - operation.aggregateAndReturn() - verify(operation).aggregateAndReturn(First::class.java) - } + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensionsTests.kt new file mode 100644 index 0000000000..58c071ccb6 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationExtensionsTests.kt @@ -0,0 +1,65 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.bson.Document +import org.junit.Test +import reactor.core.publisher.Flux + +/** + * @author Christoph Strobl + * @soundtrack Rage Against The Machine - Take the Power Back + */ +class ReactiveChangeStreamOperationExtensionsTests { + + val operation = mockk(relaxed = true) + val changestream = mockk>(relaxed = true) + + @Test // DATAMONGO-2089 + fun `ReactiveChangeStreamOperation#changeStream() with reified type parameter extension should call its Java counterpart`() { + + operation.changeStream() + verify { operation.changeStream(First::class.java) } + } + + @Test // DATAMONGO-2089 + fun `TerminatingChangeStream#listen() flow extension`() { + + val doc1 = mockk>() + val doc2 = mockk>() + val doc3 = mockk>() + + val spec = mockk>() + every { spec.listen() } returns Flux.just(doc1, doc2, doc3) + + runBlocking { + assertThat(spec.flow().toList()).contains(doc1, doc2, doc3) + } + + verify { + spec.listen() + } + } + + data class Last(val id: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt index 7e0ee6c97f..cbb7ae46f3 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveFindOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,61 +15,289 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.take +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatExceptionOfType import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner +import org.springframework.data.geo.Distance +import org.springframework.data.geo.GeoResult +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono /** * @author Mark Paluch + * @author Sebastien Deleuze */ -@RunWith(MockitoJUnitRunner::class) class ReactiveFindOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ReactiveFindOperation + val operation = mockk(relaxed = true) - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operationWithProjection: ReactiveFindOperation.FindWithProjection + val operationWithProjection = mockk>(relaxed = true) - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var distinctWithProjection: ReactiveFindOperation.DistinctWithProjection + val distinctWithProjection = mockk(relaxed = true) - @Test // DATAMONGO-1719 - fun `ReactiveFind#query(KClass) extension should call its Java counterpart`() { + val findDistinct = mockk(relaxed = true) - operation.query(First::class) - verify(operation).query(First::class.java) - } + val reactiveFind = mockk>(relaxed = true) @Test // DATAMONGO-1719 fun `ReactiveFind#query() with reified type parameter extension should call its Java counterpart`() { operation.query() - verify(operation).query(First::class.java) + verify { operation.query(First::class.java) } } - @Test // DATAMONGO-1719 - fun `ReactiveFind#FindOperatorWithProjection#asType(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-1719, DATAMONGO-2086 + fun `ReactiveFind#FindOperatorWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { - operationWithProjection.asType(First::class) - verify(operationWithProjection).`as`(First::class.java) + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } } - @Test // DATAMONGO-1719 - fun `ReactiveFind#FindOperatorWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-2086 + fun `ReactiveFind#DistinctWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { - operationWithProjection.asType() - verify(operationWithProjection).`as`(First::class.java) + distinctWithProjection.asType() + verify { distinctWithProjection.`as`(User::class.java) } } - @Test // DATAMONGO-1761 - fun `ReactiveFind#DistinctWithProjection#asType(KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-2417 + fun `ReactiveFind#distrinct() using KProperty1 should call its Java counterpart`() { - distinctWithProjection.asType(First::class) - verify(distinctWithProjection).`as`(First::class.java) + every { operation.query(KotlinUser::class.java) } returns reactiveFind + + operation.distinct(KotlinUser::username) + verify { + operation.query(KotlinUser::class.java) + reactiveFind.distinct("username") + } } + + @Test // DATAMONGO-2417 + fun `ReactiveFind#FindDistinct#field() using KProperty should call its Java counterpart`() { + + findDistinct.distinct(KotlinUser::username) + verify { findDistinct.distinct("username") } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitOneWithValue() { + + val find = mockk>() + every { find.one() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitOne()).isEqualTo("foo") + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitOneWithNull() { + + val find = mockk>() + every { find.one() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.awaitOne() } + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitOneOrNullWithValue() { + + val find = mockk>() + every { find.one() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitOneOrNull()).isEqualTo("foo") + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitOneOrNullWithNull() { + + val find = mockk>() + every { find.one() } returns Mono.empty() + + runBlocking { + assertThat(find.awaitOneOrNull()).isNull() + } + + verify { + find.one() + } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitFirstWithValue() { + + val find = mockk>() + every { find.first() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitFirst()).isEqualTo("foo") + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitFirstWithNull() { + + val find = mockk>() + every { find.first() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.awaitFirst() } + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitFirstOrNullWithValue() { + + val find = mockk>() + every { find.first() } returns Mono.just("foo") + + runBlocking { + assertThat(find.awaitFirstOrNull()).isEqualTo("foo") + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2247 + fun terminatingFindAwaitFirstOrNullWithNull() { + + val find = mockk>() + every { find.first() } returns Mono.empty() + + runBlocking { + assertThat(find.awaitFirstOrNull()).isNull() + } + + verify { + find.first() + } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitCount() { + + val find = mockk>() + every { find.count() } returns Mono.just(1) + + runBlocking { + assertThat(find.awaitCount()).isEqualTo(1) + } + + verify { + find.count() + } + } + + @Test // DATAMONGO-2209 + fun terminatingFindAwaitExists() { + + val find = mockk>() + every { find.exists() } returns Mono.just(true) + + runBlocking { + assertThat(find.awaitExists()).isTrue() + } + + verify { + find.exists() + } + } + + @Test // DATAMONGO-2255 + fun terminatingFindAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } + + @Test // DATAMONGO-2255 + fun terminatingFindTailAsFlow() { + + val spec = mockk>() + every { spec.tail() } returns Flux.just("foo", "bar", "baz").concatWith(Flux.never()) + + runBlocking { + assertThat(spec.tailAsFlow().take(3).toList()).contains("foo", "bar", "baz") + } + + verify { + spec.tail() + } + } + + @Test // DATAMONGO-2255 + fun terminatingFindNearAllAsFlow() { + + val spec = mockk>() + val foo = GeoResult("foo", Distance(0.0)) + val bar = GeoResult("bar", Distance(0.0)) + val baz = GeoResult("baz", Distance(0.0)) + every { spec.all() } returns Flux.just(foo, bar, baz) + + runBlocking { + assertThat(spec.flow().toList()).contains(foo, bar, baz) + } + + verify { + spec.all() + } + } + + @Test // DATAMONGO-2255 + fun terminatingDistinctAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } + + data class KotlinUser(val username: String) } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt index 236dd06d5d..888d890655 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveInsertOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,60 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono /** * @author Mark Paluch + * @author Sebastien Deleuze */ -@RunWith(MockitoJUnitRunner::class) class ReactiveInsertOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ReactiveInsertOperation + val operation = mockk(relaxed = true) @Test // DATAMONGO-1719 - fun `insert(KClass) extension should call its Java counterpart`() { + fun `insert() with reified type parameter extension should call its Java counterpart`() { - operation.insert(First::class) - verify(operation).insert(First::class.java) + operation.insert() + verify { operation.insert(First::class.java) } } - @Test // DATAMONGO-1719 - fun `insert() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-2209 + fun terminatingInsertOneAndAwait() { - operation.insert() - verify(operation).insert(First::class.java) + val insert = mockk>() + every { insert.one("foo") } returns Mono.just("foo") + + runBlocking { + assertThat(insert.oneAndAwait("foo")).isEqualTo("foo") + } + + verify { + insert.one("foo") + } + } + + @Test // DATAMONGO-2255 + fun terminatingInsertAllAsFlow() { + + val insert = mockk>() + val list = listOf("foo", "bar") + every { insert.all(any()) } returns Flux.fromIterable(list) + + runBlocking { + assertThat(insert.flow(list).toList()).containsAll(list) + } + + verify { + insert.all(list) + } } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensionsTests.kt new file mode 100644 index 0000000000..a71f9d5a70 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMapReduceOperationExtensionsTests.kt @@ -0,0 +1,66 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core + +import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import reactor.core.publisher.Flux + +/** + * @author Christoph Strobl + * @author Sebastien Deleuze + */ +class ReactiveMapReduceOperationExtensionsTests { + + val operation = mockk(relaxed = true) + + val operationWithProjection = mockk>(relaxed = true) + + @Test // DATAMONGO-1929 + fun `ReactiveMapReduceOperation#mapReduce() with reified type parameter extension should call its Java counterpart`() { + + operation.mapReduce() + verify { operation.mapReduce(First::class.java) } + } + + @Test // DATAMONGO-1929, DATAMONGO-2086 + fun `ReactiveMapReduceOperation#MapReduceWithProjection#asType() with reified type parameter extension should call its Java counterpart`() { + + operationWithProjection.asType() + verify { operationWithProjection.`as`(User::class.java) } + } + + @Test // DATAMONGO-2255 + fun terminatingMapReduceAllAsFlow() { + + val spec = mockk>() + every { spec.all() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.flow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.all() + } + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt index 2a3bd7d7f3..386e1aae56 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveMongoOperationsExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,118 +15,76 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.mock import example.first.First +import io.mockk.mockk +import io.mockk.verify import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.Mockito.* -import org.mockito.junit.MockitoJUnitRunner -import org.springframework.data.mongodb.core.query.NearQuery +import org.springframework.data.mongodb.core.aggregation.Aggregation +import org.springframework.data.mongodb.core.aggregation.TypedAggregation import org.springframework.data.mongodb.core.query.Query import org.springframework.data.mongodb.core.query.Update -import reactor.core.publisher.Mono /** * @author Sebastien Deleuze * @author Christoph Strobl * @author Mark Paluch + * @author Wonwoo Lee */ -@RunWith(MockitoJUnitRunner::class) class ReactiveMongoOperationsExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operations: ReactiveMongoOperations - - @Test // DATAMONGO-1689 - fun `indexOps(KClass) extension should call its Java counterpart`() { - - operations.indexOps(First::class) - verify(operations).indexOps(First::class.java) - } + val operations = mockk(relaxed = true) @Test // DATAMONGO-1689 fun `indexOps() with reified type parameter extension should call its Java counterpart`() { operations.indexOps() - verify(operations).indexOps(First::class.java) + verify { operations.indexOps(First::class.java) } } @Test // DATAMONGO-1689 fun `execute(ReactiveCollectionCallback) with reified type parameter extension should call its Java counterpart`() { - val collectionCallback = mock>() + val collectionCallback = mockk>() operations.execute(collectionCallback) - verify(operations).execute(First::class.java, collectionCallback) - } - - @Test // DATAMONGO-1689 - fun `createCollection(KClass) extension should call its Java counterpart`() { - - operations.createCollection(First::class) - verify(operations).createCollection(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `createCollection(KClass, CollectionOptions) extension should call its Java counterpart`() { - - val collectionOptions = mock() - - operations.createCollection(First::class, collectionOptions) - verify(operations).createCollection(First::class.java, collectionOptions) + verify { operations.execute(First::class.java, collectionCallback) } } @Test // DATAMONGO-1689 fun `createCollection() with reified type parameter extension should call its Java counterpart`() { operations.createCollection() - verify(operations).createCollection(First::class.java) + verify { operations.createCollection(First::class.java) } } @Test // DATAMONGO-1689 fun `createCollection(CollectionOptions) with reified type parameter extension should call its Java counterpart`() { - val collectionOptions = mock() + val collectionOptions = mockk() operations.createCollection(collectionOptions) - verify(operations).createCollection(First::class.java, collectionOptions) - } - - @Test // DATAMONGO-1689 - fun `collectionExists(KClass) extension should call its Java counterpart`() { - - operations.collectionExists(First::class) - verify(operations).collectionExists(First::class.java) + verify { operations.createCollection(First::class.java, collectionOptions) } } @Test // DATAMONGO-1689 fun `collectionExists() with reified type parameter extension should call its Java counterpart`() { operations.collectionExists() - verify(operations).collectionExists(First::class.java) - } - - @Test // DATAMONGO-1689 - fun `dropCollection(KClass) extension should call its Java counterpart`() { - - operations.dropCollection(First::class) - verify(operations).dropCollection(First::class.java) + verify { operations.collectionExists(First::class.java) } } @Test // DATAMONGO-1689 fun `dropCollection() with reified type parameter extension should call its Java counterpart`() { operations.dropCollection() - verify(operations).dropCollection(First::class.java) + verify { operations.dropCollection(First::class.java) } } @Test // DATAMONGO-1689 fun `findAll() with reified type parameter extension should call its Java counterpart`() { operations.findAll() - verify(operations).findAll(First::class.java) + verify { operations.findAll(First::class.java) } } @Test // DATAMONGO-1689 @@ -135,63 +93,54 @@ class ReactiveMongoOperationsExtensionsTests { val collectionName = "foo" operations.findAll(collectionName) - verify(operations).findAll(First::class.java, collectionName) + verify { operations.findAll(First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findOne(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.findOne(query) - verify(operations).findOne(query, First::class.java) + verify { operations.findOne(query, First::class.java) } } @Test // DATAMONGO-1689 fun `findOne(Query, String) with reified type parameter extension should call its Java counterpart`() { val collectionName = "foo" - val query = mock() + val query = mockk() operations.findOne(query, collectionName) - verify(operations).findOne(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `exists(Query, KClass) extension should call its Java counterpart`() { - - val query = mock() - - operations.exists(query, First::class) - verify(operations).exists(query, First::class.java) + verify { operations.findOne(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `exists(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.exists(query) - verify(operations).exists(query, First::class.java) + verify { operations.exists(query, First::class.java) } } @Test // DATAMONGO-1689 fun `find(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.find(query) - verify(operations).find(query, First::class.java) + verify { operations.find(query, First::class.java) } } @Test // DATAMONGO-1689 fun `find(Query, String) with reified type parameter extension should call its Java counterpart`() { val collectionName = "foo" - val query = mock() + val query = mockk() operations.find(query, collectionName) - verify(operations).find(query, First::class.java, collectionName) + verify { operations.find(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 @@ -200,7 +149,7 @@ class ReactiveMongoOperationsExtensionsTests { val id = 1L operations.findById(id) - verify(operations).findById(id, First::class.java) + verify { operations.findById(id, First::class.java) } } @Test // DATAMONGO-1689 @@ -210,375 +159,253 @@ class ReactiveMongoOperationsExtensionsTests { val id = 1L operations.findById(id, collectionName) - verify(operations).findById(id, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `geoNear(Query) with reified type parameter extension should call its Java counterpart`() { - - val query = NearQuery.near(0.0, 0.0) - - operations.geoNear(query) - verify(operations).geoNear(query, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `geoNear(Query, String) with reified type parameter extension should call its Java counterpart`() { - - val collectionName = "foo" - val query = NearQuery.near(0.0, 0.0) - - operations.geoNear(query, collectionName) - verify(operations).geoNear(query, First::class.java, collectionName) + verify { operations.findById(id, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAndModify(Query, Update, FindAndModifyOptions) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() - val options = mock() + val query = mockk() + val update = mockk() + val options = mockk() operations.findAndModify(query, update, options) - verify(operations).findAndModify(query, update, options, First::class.java) + verify { operations.findAndModify(query, update, options, First::class.java) } } @Test // DATAMONGO-1689 fun `findAndModify(Query, Update, FindAndModifyOptions, String) with reified type parameter extension should call its Java counterpart`() { val collectionName = "foo" - val query = mock() - val update = mock() - val options = mock() + val query = mockk() + val update = mockk() + val options = mockk() operations.findAndModify(query, update, options, collectionName) - verify(operations).findAndModify(query, update, options, First::class.java, collectionName) + verify { operations.findAndModify(query, update, options, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.findAndRemove(query) - verify(operations).findAndRemove(query, First::class.java) + verify { operations.findAndRemove(query, First::class.java) } } @Test // DATAMONGO-1689 fun `findAndRemove(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.findAndRemove(query, collectionName) - verify(operations).findAndRemove(query, First::class.java, collectionName) + verify { operations.findAndRemove(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `count() with reified type parameter extension should call its Java counterpart`() { operations.count() - verify(operations).count(any(), eq(First::class.java)) + verify { operations.count(any(), eq(First::class.java)) } } @Test // DATAMONGO-1689 fun `count(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.count(query) - verify(operations).count(query, First::class.java) + verify { operations.count(query, First::class.java) } } @Test // DATAMONGO-1689 fun `count(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.count(query, collectionName) - verify(operations).count(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `count(Query, KClass) with reified type parameter extension should call its Java counterpart`() { - - val query = mock() - - operations.count(query, First::class) - verify(operations).count(query, First::class.java) + verify { operations.count(query, First::class.java, collectionName) } } - @Test // DATAMONGO-1689 - fun `count(Query, KClass, String) with reified type parameter extension should call its Java counterpart`() { - - val query = mock() - val collectionName = "foo" - - operations.count(query, First::class, collectionName) - verify(operations).count(query, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `insert(Collection, KClass) extension should call its Java counterpart`() { + @Test // DATAMONGO-2208 + fun `insert(Collection) with reified type parameter extension should call its Java counterpart`() { val collection = listOf(First(), First()) - operations.insert(collection, First::class) - verify(operations).insert(collection, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `insertAll(Mono, KClass) extension should call its Java counterpart`() { - - val collection = Mono.just(listOf(First(), First())) - - operations.insertAll(collection, First::class) - verify(operations).insertAll(collection, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `upsert(Query, Update, KClass) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - - operations.upsert(query, update, First::class) - verify(operations).upsert(query, update, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `upsert(Query, Update, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - val collectionName = "foo" - - operations.upsert(query, update, First::class, collectionName) - verify(operations).upsert(query, update, First::class.java, collectionName) + operations.insert(collection) + verify { operations.insert(collection, First::class.java) } } @Test // DATAMONGO-1689 fun `upsert(Query, Update) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() operations.upsert(query, update) - verify(operations).upsert(query, update, First::class.java) + verify { operations.upsert(query, update, First::class.java) } } @Test // DATAMONGO-1689 fun `upsert(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() val collectionName = "foo" operations.upsert(query, update, collectionName) - verify(operations).upsert(query, update, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `updateFirst(Query, Update, KClass) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - - operations.updateFirst(query, update, First::class) - verify(operations).updateFirst(query, update, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `updateFirst(Query, Update, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - val collectionName = "foo" - - operations.updateFirst(query, update, First::class, collectionName) - verify(operations).updateFirst(query, update, First::class.java, collectionName) + verify { operations.upsert(query, update, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `updateFirst(Query, Update) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() operations.updateFirst(query, update) - verify(operations).updateFirst(query, update, First::class.java) + verify { operations.updateFirst(query, update, First::class.java) } } @Test // DATAMONGO-1689 fun `updateFirst(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() val collectionName = "foo" operations.updateFirst(query, update, collectionName) - verify(operations).updateFirst(query, update, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `updateMulti(Query, Update, KClass) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - - operations.updateMulti(query, update, First::class) - verify(operations).updateMulti(query, update, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `updateMulti(Query, Update, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val update = mock() - val collectionName = "foo" - - operations.updateMulti(query, update, First::class, collectionName) - verify(operations).updateMulti(query, update, First::class.java, collectionName) + verify { operations.updateFirst(query, update, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `updateMulti(Query, Update) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() operations.updateMulti(query, update) - verify(operations).updateMulti(query, update, First::class.java) + verify { operations.updateMulti(query, update, First::class.java) } } @Test // DATAMONGO-1689 fun `updateMulti(Query, Update, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() - val update = mock() + val query = mockk() + val update = mockk() val collectionName = "foo" operations.updateMulti(query, update, collectionName) - verify(operations).updateMulti(query, update, First::class.java, collectionName) - } - - @Test // DATAMONGO-1689 - fun `remove(Query, KClass) extension should call its Java counterpart`() { - - val query = mock() - - operations.remove(query, First::class) - verify(operations).remove(query, First::class.java) - } - - @Test // DATAMONGO-1689 - fun `remove(Query, KClass, String) extension should call its Java counterpart`() { - - val query = mock() - val collectionName = "foo" - - operations.remove(query, First::class, collectionName) - verify(operations).remove(query, First::class.java, collectionName) + verify { operations.updateMulti(query, update, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `remove(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.remove(query) - verify(operations).remove(query, First::class.java) + verify { operations.remove(query, First::class.java) } } @Test // DATAMONGO-1689 fun `remove(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.remove(query, collectionName) - verify(operations).remove(query, First::class.java, collectionName) + verify { operations.remove(query, First::class.java, collectionName) } } @Test // DATAMONGO-1689 fun `findAllAndRemove(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.findAllAndRemove(query) - verify(operations).findAllAndRemove(query, First::class.java) + verify { operations.findAllAndRemove(query, First::class.java) } } @Test // DATAMONGO-1689 fun `tail(Query) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() operations.tail(query) - verify(operations).tail(query, First::class.java) + verify { operations.tail(query, First::class.java) } } @Test // DATAMONGO-1689 fun `tail(Query, String) with reified type parameter extension should call its Java counterpart`() { - val query = mock() + val query = mockk() val collectionName = "foo" operations.tail(query, collectionName) - verify(operations).tail(query, First::class.java, collectionName) + verify { operations.tail(query, First::class.java, collectionName) } } @Test // DATAMONGO-1761 - fun `findDistinct(String, KClass) should call java counterpart`() { + fun `findDistinctImplicit(Query, String) should call java counterpart`() { + + val query = mockk() - operations.findDistinct("field", First::class) - verify(operations).findDistinct("field", First::class.java, String::class.java) + operations.findDistinct(query, "field") + verify { operations.findDistinct(query, "field", First::class.java, String::class.java) } } @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, KClass) should call java counterpart`() { + fun `findDistinct(Query, String, String) should call java counterpart`() { - val query = mock() + val query = mockk() - operations.findDistinct(query, "field", First::class) - verify(operations).findDistinct(query, "field", First::class.java, String::class.java) + operations.findDistinct(query, "field", "collection") + verify { operations.findDistinct(query, "field", "collection", First::class.java, String::class.java) } } - @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, String, KClass) should call java counterpart`() { + @Test // #893 + fun `aggregate(TypedAggregation, String, KClass) should call java counterpart`() { - val query = mock() + val aggregation = mockk>() - operations.findDistinct(query, "field", "collection", First::class) - verify(operations).findDistinct(query, "field", "collection", First::class.java, String::class.java) + operations.aggregate(aggregation, "foo") + verify { operations.aggregate(aggregation, "foo", First::class.java) } } - @Test // DATAMONGO-1761 - fun `findDistinctImplicit(Query, String) should call java counterpart`() { + @Test // #893 + fun `aggregate(TypedAggregation, KClass) should call java counterpart`() { - val query = mock() + val aggregation = mockk>() - operations.findDistinct(query, "field") - verify(operations).findDistinct(query, "field", First::class.java, String::class.java) + operations.aggregate(aggregation) + verify { operations.aggregate(aggregation, First::class.java) } } - @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, String) should call java counterpart`() { + @Test // #893 + fun `aggregate(Aggregation, KClass) should call java counterpart`() { - val query = mock() + val aggregation = mockk() - operations.findDistinct(query, "field", "collection") - verify(operations).findDistinct(query, "field", "collection", First::class.java, String::class.java) + operations.aggregate(aggregation) + verify { + operations.aggregate( + aggregation, + String::class.java, + First::class.java + ) + } } - - @Test // DATAMONGO-1761 - fun `findDistinct(Query, String, KClass) should call java counterpart`() { + @Test // #893 + fun `aggregate(Aggregation, String) should call java counterpart`() { - val query = mock() + val aggregation = mockk() - operations.findDistinct(query, "field", First::class) - verify(operations).findDistinct(query, "field", First::class.java, String::class.java) + operations.aggregate(aggregation, "foo") + verify { operations.aggregate(aggregation, "foo", First::class.java) } } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt index a8650ce5c5..c824568418 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveRemoveOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,34 +15,61 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify +import com.mongodb.client.result.DeleteResult import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner +import reactor.core.publisher.Flux +import reactor.core.publisher.Mono /** * @author Mark Paluch + * @author Sebastien Deleuze */ -@RunWith(MockitoJUnitRunner::class) class ReactiveRemoveOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ReactiveRemoveOperation + val operation = mockk(relaxed = true) @Test // DATAMONGO-1719 - fun `remove(KClass) extension should call its Java counterpart`() { + fun `remove() with reified type parameter extension should call its Java counterpart`() { - operation.remove(First::class) - verify(operation).remove(First::class.java) + operation.remove() + verify { operation.remove(First::class.java) } } - @Test // DATAMONGO-1719 - fun `remove() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-2209 + fun allAndAwait() { - operation.remove() - verify(operation).remove(First::class.java) + val remove = mockk>() + val result = mockk() + every { remove.all() } returns Mono.just(result) + + runBlocking { + assertThat(remove.allAndAwait()).isEqualTo(result) + } + + verify { + remove.all() + } + } + + @Test // DATAMONGO-2255 + fun terminatingRemoveFindAndRemoveAsFlow() { + + val spec = mockk>() + every { spec.findAndRemove() } returns Flux.just("foo", "bar", "baz") + + runBlocking { + assertThat(spec.findAndRemoveAsFlow().toList()).contains("foo", "bar", "baz") + } + + verify { + spec.findAndRemove() + } } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt index 725c48ea47..1c376389ea 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/ReactiveUpdateOperationExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,36 +15,213 @@ */ package org.springframework.data.mongodb.core -import com.nhaarman.mockito_kotlin.verify +import com.mongodb.client.result.UpdateResult import example.first.First +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import kotlinx.coroutines.runBlocking +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatExceptionOfType import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.junit.MockitoJUnitRunner +import reactor.core.publisher.Mono /** - * Unit tests for [ReactiveExecutableUpdateOperationExtensions]. + * Unit tests for `ReactiveExecutableUpdateOperationExtensions.kt`. * * @author Mark Paluch + * @author Sebastien Deleuze */ -@RunWith(MockitoJUnitRunner::class) class ReactiveUpdateOperationExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var operation: ReactiveUpdateOperation + val operation = mockk(relaxed = true) @Test // DATAMONGO-1719 - fun `update(KClass) extension should call its Java counterpart`() { + fun `update() with reified type parameter extension should call its Java counterpart`() { - operation.update(First::class) - verify(operation).update(First::class.java) + operation.update() + verify { operation.update(First::class.java) } } - @Test // DATAMONGO-1719 - fun `update() with reified type parameter extension should call its Java counterpart`() { + @Test // DATAMONGO-2209 + fun findModifyAndAwaitWithValue() { - operation.update() - verify(operation).update(First::class.java) + val find = mockk>() + every { find.findAndModify() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findModifyAndAwait()).isEqualTo("foo") + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2247 + fun findModifyAndAwaitWithNull() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.findModifyAndAwait() } + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2247 + fun findModifyAndAwaitOrNullWithValue() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findModifyAndAwaitOrNull()).isEqualTo("foo") + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2247 + fun findModifyAndAwaitOrNullWithNull() { + + val find = mockk>() + every { find.findAndModify() } returns Mono.empty() + + runBlocking { + assertThat(find.findModifyAndAwaitOrNull()).isNull() + } + + verify { + find.findAndModify() + } + } + + @Test // DATAMONGO-2209 + fun findReplaceAndAwaitWithValue() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findReplaceAndAwait()).isEqualTo("foo") + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2247 + fun findReplaceAndAwaitWithNull() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.empty() + + assertThatExceptionOfType(NoSuchElementException::class.java).isThrownBy { + runBlocking { find.findReplaceAndAwait() } + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2247 + fun findReplaceAndAwaitOrNullWithValue() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.just("foo") + + runBlocking { + assertThat(find.findReplaceAndAwaitOrNull()).isEqualTo("foo") + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2247 + fun findReplaceAndAwaitOrNullWithNull() { + + val find = mockk>() + every { find.findAndReplace() } returns Mono.empty() + + runBlocking { + assertThat(find.findReplaceAndAwaitOrNull()).isNull() + } + + verify { + find.findAndReplace() + } + } + + @Test // DATAMONGO-2209 + fun allAndAwait() { + + val update = mockk>() + val result = mockk() + every { update.all() } returns Mono.just(result) + + runBlocking { + assertThat(update.allAndAwait()).isEqualTo(result) + } + + verify { + update.all() + } + } + + @Test // DATAMONGO-2209 + fun firstAndAwait() { + + val update = mockk>() + val result = mockk() + every { update.first() } returns Mono.just(result) + + runBlocking { + assertThat(update.firstAndAwait()).isEqualTo(result) + } + + verify { + update.first() + } + } + + @Test // DATAMONGO-2209 + fun upsertAndAwait() { + + val update = mockk>() + val result = mockk() + every { update.upsert() } returns Mono.just(result) + + runBlocking { + assertThat(update.upsertAndAwait()).isEqualTo(result) + } + + verify { + update.upsert() + } + } + + @Test // DATAMONGO-2209 + fun findAndReplaceWithProjectionAsType() { + + val update = mockk>() + val result = mockk>() + every { update.`as`(String::class.java) } returns result + + assertThat(update.asType()).isEqualTo(result) + + verify { + update.`as`(String::class.java) + } } } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/MappingMongoConverterKtUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/MappingMongoConverterKtUnitTests.kt new file mode 100644 index 0000000000..797c9d41ff --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/MappingMongoConverterKtUnitTests.kt @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert + +import org.assertj.core.api.Assertions.assertThat +import org.bson.Document +import org.junit.jupiter.api.Test +import org.springframework.data.mongodb.core.mapping.MongoMappingContext + +/** + * Kotlin unit tests for [MappingMongoConverter]. + * + * @author Mark Paluch + */ +class MappingMongoConverterKtUnitTests { + + @Test // GH-4485 + fun shouldIgnoreNonReadableProperties() { + + val document = Document.parse("{_id: 'baz', type: 'SOME_VALUE'}") + val converter = + MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoMappingContext()) + + val tx = converter.read(SpecialTransaction::class.java, document) + + assertThat(tx.id).isEqualTo("baz") + assertThat(tx.type).isEqualTo("SOME_DEFAULT_VALUE") + } +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/SpecialTransaction.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/SpecialTransaction.kt new file mode 100644 index 0000000000..95643dfa70 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/convert/SpecialTransaction.kt @@ -0,0 +1,26 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert + +abstract class SomeTransaction() { + + abstract val id: String + abstract val type: String +} + +data class SpecialTransaction(override val id: String) : SomeTransaction() { + override val type: String = "SOME_DEFAULT_VALUE" +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt index cbacb1b11f..a5e20487ff 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/CriteriaExtensionsTests.kt @@ -1,11 +1,11 @@ /* - * Copyright 2017 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,19 @@ */ package org.springframework.data.mongodb.core.query +import io.mockk.mockk +import io.mockk.verify +import org.assertj.core.api.Assertions.assertThat import org.junit.Test -import org.junit.runner.RunWith -import org.mockito.Answers -import org.mockito.Mock -import org.mockito.Mockito -import org.mockito.junit.MockitoJUnitRunner +import org.springframework.data.mapping.div /** * @author Sebastien Deleuze + * @author Tjeu Kayim */ -@RunWith(MockitoJUnitRunner::class) class CriteriaExtensionsTests { - @Mock(answer = Answers.RETURNS_MOCKS) - lateinit var criteria: Criteria + val criteria = mockk(relaxed = true) @Test fun `isEqualTo() extension should call its Java counterpart`() { @@ -38,7 +36,7 @@ class CriteriaExtensionsTests { criteria.isEqualTo(foo) - Mockito.verify(criteria, Mockito.times(1)).`is`(foo) + verify(exactly = 1) { criteria.`is`(foo) } } @Test @@ -46,7 +44,7 @@ class CriteriaExtensionsTests { criteria.isEqualTo(null) - Mockito.verify(criteria, Mockito.times(1)).`is`(null) + verify(exactly = 1) { criteria.`is`(null) } } @Test @@ -57,7 +55,7 @@ class CriteriaExtensionsTests { criteria.inValues(foo, bar) - Mockito.verify(criteria, Mockito.times(1)).`in`(foo, bar) + verify(exactly = 1) { criteria.`in`(foo, bar) } } @Test @@ -65,7 +63,7 @@ class CriteriaExtensionsTests { criteria.inValues(null, null) - Mockito.verify(criteria, Mockito.times(1)).`in`(null, null) + verify(exactly = 1) { criteria.`in`(null, null) } } @Test @@ -75,7 +73,7 @@ class CriteriaExtensionsTests { criteria.inValues(c) - Mockito.verify(criteria, Mockito.times(1)).`in`(c) + verify(exactly = 1) { criteria.`in`(c) } } @Test @@ -85,6 +83,45 @@ class CriteriaExtensionsTests { criteria.inValues(c) - Mockito.verify(criteria, Mockito.times(1)).`in`(c) + verify(exactly = 1) { criteria.`in`(c) } } + + @Test + fun `and(KProperty) extension should call its Java counterpart`() { + + criteria.and(Book::title) + + verify(exactly = 1) { criteria.and("title") } + } + + @Test + fun `and(KProperty) extension should support nested properties`() { + + criteria.and(Book::author / Author::name) + + verify(exactly = 1) { criteria.and("author.name") } + } + + @Test + fun `where(KProperty) should equal Criteria where()`() { + + class Book(val title: String) + + val typedCriteria = where(Book::title) + val classicCriteria = Criteria.where("title") + + assertThat(typedCriteria).isEqualTo(classicCriteria) + } + + @Test + fun `where(KProperty) should support nested properties`() { + + val typedCriteria = where(Book::author / Author::name) + val classicCriteria = Criteria.where("author.name") + + assertThat(typedCriteria).isEqualTo(classicCriteria) + } + + class Book(val title: String, val author: Author) + class Author(val name: String) } diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt new file mode 100644 index 0000000000..88b2bcf050 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt @@ -0,0 +1,462 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.assertj.core.api.Assertions.assertThat +import org.bson.BsonRegularExpression +import org.junit.Test +import org.springframework.data.geo.Circle +import org.springframework.data.geo.Point +import org.springframework.data.mapping.div +import org.springframework.data.mongodb.core.geo.GeoJsonPoint +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type +import java.util.regex.Pattern + +/** + * Unit tests for [Criteria] extensions. + * + * @author Tjeu Kayim + * @author Mark Paluch + * @author Sangyong Choi + */ +class TypedCriteriaExtensionsTests { + + @Test + fun `isEqualTo() should equal expected criteria`() { + + val typed = Book::title isEqualTo "Moby-Dick" + val expected = Criteria("title").isEqualTo("Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `ne() should equal expected criteria`() { + + val typed = Book::title ne "Moby-Dick" + val expected = Criteria("title").ne("Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `lt() should equal expected criteria`() { + + val typed = Book::price lt 100 + val expected = Criteria("price").lt(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `lte() should equal expected criteria`() { + + val typed = Book::price lte 100 + val expected = Criteria("price").lte(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `gt() should equal expected criteria`() { + + val typed = Book::price gt 100 + val expected = Criteria("price").gt(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `gte() should equal expected criteria`() { + + val typed = Book::price gte 100 + val expected = Criteria("price").gte(100) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `inValues(vararg) should equal expected criteria`() { + + val typed = Book::price.inValues(1, 2, 3) + val expected = Criteria("price").inValues(1, 2, 3) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `inValues(list) should equal expected criteria`() { + + val typed = Book::price inValues listOf(1, 2, 3) + val expected = Criteria("price").inValues(listOf(1, 2, 3)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `nin(vararg) should equal expected criteria`() { + + val typed = Book::price.nin(1, 2, 3) + val expected = Criteria("price").nin(1, 2, 3) + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `nin(list) should equal expected criteria`() { + + val typed = Book::price nin listOf(1, 2, 3) + val expected = Criteria("price").nin(listOf(1, 2, 3)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `mod() should equal expected criteria`() { + + val typed = Book::price.mod(2, 3) + val expected = Criteria("price").mod(2, 3) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `all(vararg) should equal expected criteria`() { + + val typed = Book::categories.all(1, 2, 3) + val expected = Criteria("categories").all(1, 2, 3) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `all(list) should equal expected criteria`() { + + val typed = Book::categories all listOf(1, 2, 3) + val expected = Criteria("categories").all(listOf(1, 2, 3)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `size() should equal expected criteria`() { + + val typed = Book::categories size 4 + val expected = Criteria("categories").size(4) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `exists() should equal expected criteria`() { + + val typed = Book::title exists true + val expected = Criteria("title").exists(true) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `type(Int) should equal expected criteria`() { + + val typed = Book::title type 2 + val expected = Criteria("title").type(2) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `type(List) should equal expected criteria`() { + + val typed = Book::title type listOf(Type.STRING, Type.BOOLEAN) + val expected = Criteria("title").type(Type.STRING, Type.BOOLEAN) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `type(vararg) should equal expected criteria`() { + + val typed = Book::title.type(Type.STRING, Type.BOOLEAN) + val expected = Criteria("title").type(Type.STRING, Type.BOOLEAN) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `not() should equal expected criteria`() { + + val typed = Book::price.not().lt(123) + val expected = Criteria("price").not().lt(123) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `regex(string) should equal expected criteria`() { + + val typed = Book::title regex "ab+c" + val expected = Criteria("title").regex("ab+c") + assertEqualCriteriaByJson(typed, expected) + } + + @Test + fun `regex(string, options) should equal expected criteria`() { + + val typed = Book::title.regex("ab+c", "g") + val expected = Criteria("title").regex("ab+c", "g") + + assertEqualCriteriaByJson(typed, expected) + } + + @Test + fun `regex(Regex) should equal expected criteria`() { + + val typed = Book::title regex Regex("ab+c") + val expected = Criteria("title").regex(Pattern.compile("ab+c")) + + assertEqualCriteriaByJson(typed, expected) + } + + private fun assertEqualCriteriaByJson(typed: Criteria, expected: Criteria) { + assertThat(typed.criteriaObject.toJson()).isEqualTo(expected.criteriaObject.toJson()) + } + + @Test + fun `regex(Pattern) should equal expected criteria`() { + + val value = Pattern.compile("ab+c") + val typed = Book::title regex value + val expected = Criteria("title").regex(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `regex(BsonRegularExpression) should equal expected criteria`() { + + val expression = BsonRegularExpression("ab+c") + val typed = Book::title regex expression + val expected = Criteria("title").regex(expression) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `withinSphere() should equal expected criteria`() { + + val value = Circle(Point(928.76, 28.345), 65.243) + val typed = Building::location withinSphere value + val expected = Criteria("location").withinSphere(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `within() should equal expected criteria`() { + + val value = Circle(Point(5.43421, 12.456), 52.67) + val typed = Building::location within value + val expected = Criteria("location").within(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `near() should equal expected criteria`() { + + val value = Point(57.431, 71.345) + val typed = Building::location near value + val expected = Criteria("location").near(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `nearSphere() should equal expected criteria`() { + + val value = Point(5.4321, 12.345) + val typed = Building::location nearSphere value + val expected = Criteria("location").nearSphere(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `intersects() should equal expected criteria`() { + + val value = GeoJsonPoint(5.481573, 51.451726) + val typed = Building::location intersects value + val expected = Criteria("location").intersects(value) + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria`() { + + val typed = Building::location maxDistance 3.0 + val expected = Criteria("location").maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria`() { + + val typed = Building::location minDistance 3.0 + val expected = Criteria("location").minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria with nearSphere`() { + + val point = Point(0.0, 0.0) + val typed = Building::location nearSphere point maxDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with nearSphere`() { + + val point = Point(0.0, 0.0) + val typed = Building::location nearSphere point minDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria with near`() { + + val point = Point(0.0, 0.0) + val typed = Building::location near point maxDistance 3.0 + val expected = Criteria("location") + .near(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with near`() { + + val point = Point(0.0, 0.0) + val typed = Building::location near point minDistance 3.0 + val expected = Criteria("location") + .near(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `elemMatch() should equal expected criteria`() { + + val value = Criteria("price").lt(950) + val typed = Book::title elemMatch value + val expected = Criteria("title").elemMatch(value) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `elemMatch(TypedCriteria) should equal expected criteria`() { + + val typed = Book::title elemMatch (Book::price lt 950) + val expected = Criteria("title").elemMatch(Criteria("price").lt(950)) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `bits() should equal expected criteria`() { + + val typed = Book::title bits { allClear(123) } + val expected = Criteria("title").bits().allClear(123) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `One level nested should equal expected criteria`() { + + val typed = Book::author / Author::name isEqualTo "Herman Melville" + val expected = Criteria("author.name").isEqualTo("Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `Two levels nested should equal expected criteria`() { + + data class Entity(val book: Book) + + val typed = Entity::book / Book::author / Author::name isEqualTo "Herman Melville" + val expected = Criteria("book.author.name").isEqualTo("Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `typed criteria inside orOperator() should equal expected criteria`() { + + val typed = (Book::title isEqualTo "Moby-Dick").orOperator( + Book::price lt 1200, + Book::price gt 240 + ) + val expected = Criteria("title").isEqualTo("Moby-Dick") + .orOperator( + Criteria("price").lt(1200), + Criteria("price").gt(240) + ) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `chaining gt & isEqualTo() should equal expected criteria`() { + + val typed = (Book::title isEqualTo "Moby-Dick") + .and(Book::price).lt(950) + val expected = Criteria("title").isEqualTo("Moby-Dick") + .and("price").lt(950) + + assertThat(typed).isEqualTo(expected) + } + + data class Book( + val title: String = "Moby-Dick", + val price: Int = 123, + val available: Boolean = true, + val categories: List = emptyList(), + val author: Author = Author() + ) + + data class Author( + val name: String = "Herman Melville" + ) + + data class Building( + val location: GeoJsonPoint + ) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensionsTests.kt new file mode 100644 index 0000000000..2d2b2c6847 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedUpdateExtensionsTests.kt @@ -0,0 +1,251 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.query + +import org.assertj.core.api.Assertions.assertThat +import org.junit.Test +import org.springframework.data.mapping.div +import java.time.Instant + +/** + * Unit tests for [Update] extensions. + * + * @author Pawel Matysek + */ +class TypedUpdateExtensionsTests { + + @Test // GH-3028 + fun `update() should equal expected Update`() { + + val typed = update(Book::title, "Moby-Dick") + val expected = Update.update("title", "Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `set() should equal expected Update`() { + + val typed = Update().set(Book::title, "Moby-Dick") + val expected = Update().set("title", "Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `setOnInsert() should equal expected Update`() { + + val typed = Update().setOnInsert(Book::title, "Moby-Dick") + val expected = Update().setOnInsert("title", "Moby-Dick") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `unset() should equal expected Update`() { + + val typed = Update().unset(Book::title) + val expected = Update().unset("title") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `inc(key, inc) should equal expected Update`() { + + val typed = Update().inc(Book::price, 5) + val expected = Update().inc("price", 5) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `inc(key) should equal expected Update`() { + + val typed = Update().inc(Book::price) + val expected = Update().inc("price") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `push(key, value) should equal expected Update`() { + + val typed = Update().push(Book::categories, "someCategory") + val expected = Update().push("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `push(key) should equal expected Update`() { + + val typed = Update().push(Book::categories) + val expected = Update().push("categories") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `addToSet(key) should equal expected Update`() { + + val typed = Update().addToSet(Book::categories).each("category", "category2") + val expected = Update().addToSet("categories").each("category", "category2") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `addToSet(key, value) should equal expected Update`() { + + val typed = Update().addToSet(Book::categories, "someCategory") + val expected = Update().addToSet("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `pop() should equal expected Update`() { + + val typed = Update().pop(Book::categories, Update.Position.FIRST) + val expected = Update().pop("categories", Update.Position.FIRST) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `pull() should equal expected Update`() { + + val typed = Update().pull(Book::categories, "someCategory") + val expected = Update().pull("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `pullAll() should equal expected Update`() { + + val typed = Update().pullAll(Book::categories, arrayOf("someCategory", "someCategory2")) + val expected = Update().pullAll("categories", arrayOf("someCategory", "someCategory2")) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `currentDate() should equal expected Update`() { + + val typed = Update().currentDate(Book::releaseDate) + val expected = Update().currentDate("releaseDate") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `currentTimestamp() should equal expected Update`() { + + val typed = Update().currentTimestamp(Book::releaseDate) + val expected = Update().currentTimestamp("releaseDate") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `multiply() should equal expected Update`() { + + val typed = Update().multiply(Book::price, 2) + val expected = Update().multiply("price", 2) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `max() should equal expected Update`() { + + val typed = Update().max(Book::price, 200) + val expected = Update().max("price", 200) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `min() should equal expected Update`() { + + val typed = Update().min(Book::price, 100) + val expected = Update().min("price", 100) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `bitwise() should equal expected Update`() { + + val typed = Update().bitwise(Book::price).and(2) + val expected = Update().bitwise("price").and(2) + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `filterArray() should equal expected Update`() { + + val typed = Update().filterArray(Book::categories, "someCategory") + val expected = Update().filterArray("categories", "someCategory") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `typed modifies() should equal expected modifies()`() { + + val typed = update(Book::title, "Moby-Dick") + + assertThat(typed.modifies(Book::title)).isEqualTo(typed.modifies("title")) + assertThat(typed.modifies(Book::price)).isEqualTo(typed.modifies("price")) + } + + @Test // GH-3028 + fun `One level nested should equal expected Update`() { + + val typed = update(Book::author / Author::name, "Herman Melville") + val expected = Update.update("author.name", "Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + @Test // GH-3028 + fun `Two levels nested should equal expected Update`() { + + data class Entity(val book: Book) + + val typed = update(Entity::book / Book::author / Author::name, "Herman Melville") + val expected = Update.update("book.author.name", "Herman Melville") + + assertThat(typed).isEqualTo(expected) + } + + data class Book( + val title: String = "Moby-Dick", + val price: Int = 123, + val available: Boolean = true, + val categories: List = emptyList(), + val author: Author = Author(), + val releaseDate: Instant, + ) + + data class Author( + val name: String = "Herman Melville", + ) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/CoroutineRepositoryUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/CoroutineRepositoryUnitTests.kt new file mode 100644 index 0000000000..af3744a900 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/CoroutineRepositoryUnitTests.kt @@ -0,0 +1,68 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository + +import com.mongodb.client.result.DeleteResult +import io.mockk.every +import io.mockk.mockk +import kotlinx.coroutines.runBlocking +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.springframework.data.annotation.Id +import org.springframework.data.mongodb.core.ReactiveMongoOperations +import org.springframework.data.mongodb.core.convert.MappingMongoConverter +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver +import org.springframework.data.mongodb.core.mapping.MongoMappingContext +import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory +import org.springframework.data.repository.kotlin.CoroutineCrudRepository +import reactor.core.publisher.Mono + +/** + * Unit tests for Kotlin Coroutine repositories. + * + * @author Mark Paluch + */ +class CoroutineRepositoryUnitTests { + + val operations = mockk(relaxed = true) + lateinit var repositoryFactory: ReactiveMongoRepositoryFactory + + @BeforeEach + fun before() { + + every { operations.getConverter() } returns MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoMappingContext()) + repositoryFactory = ReactiveMongoRepositoryFactory(operations) + } + + @Test // DATAMONGO-2601 + fun `should discard result of suspended query method without result`() { + + every { operations.remove(any(), any(), any()) } returns Mono.just(DeleteResult.acknowledged(1)) + + val repository = repositoryFactory.getRepository(PersonRepository::class.java) + + runBlocking { + repository.deleteAllByName("foo") + } + } + + interface PersonRepository : CoroutineCrudRepository { + + suspend fun deleteAllByName(name: String) + } + + data class Person(@Id var id: Long, var name: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/KotlinRepositoryUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/KotlinRepositoryUnitTests.kt new file mode 100644 index 0000000000..96e1b679d2 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/KotlinRepositoryUnitTests.kt @@ -0,0 +1,61 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository + +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.springframework.data.annotation.Id +import org.springframework.data.mongodb.core.MongoOperations +import org.springframework.data.mongodb.core.convert.MappingMongoConverter +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver +import org.springframework.data.mongodb.core.mapping.MongoMappingContext +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory +import org.springframework.data.repository.CrudRepository + +/** + * Unit tests for Kotlin repositories. + * + * @author Mark Paluch + */ +class KotlinRepositoryUnitTests { + + val operations = mockk(relaxed = true) + lateinit var repositoryFactory: MongoRepositoryFactory + + @BeforeEach + fun before() { + + every { operations.getConverter() } returns MappingMongoConverter(NoOpDbRefResolver.INSTANCE, MongoMappingContext()) + repositoryFactory = MongoRepositoryFactory(operations) + } + + @Test // DATAMONGO-2601 + fun shouldSupportDeleteMethods() { + + val repository = repositoryFactory.getRepository(PersonRepository::class.java) + + repository.deleteAllByName("foo") + } + + interface PersonRepository : CrudRepository { + + fun deleteAllByName(name: String) + } + + data class Person(@Id var id: Long, var name: String) +} diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodCoroutineUnitTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodCoroutineUnitTests.kt new file mode 100644 index 0000000000..b3e5c013b9 --- /dev/null +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/repository/query/ReactiveMongoQueryMethodCoroutineUnitTests.kt @@ -0,0 +1,86 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.query + +import kotlinx.coroutines.flow.Flow +import org.assertj.core.api.Assertions.assertThat +import org.assertj.core.api.Assertions.assertThatNoException +import org.junit.jupiter.api.Test +import org.springframework.data.mongodb.core.mapping.MongoMappingContext +import org.springframework.data.mongodb.repository.Person +import org.springframework.data.mongodb.repository.Update +import org.springframework.data.projection.SpelAwareProxyProjectionFactory +import org.springframework.data.repository.core.support.DefaultRepositoryMetadata +import org.springframework.data.repository.kotlin.CoroutineCrudRepository +import kotlin.coroutines.Continuation + +/** + * Unit tests for [ReactiveMongoQueryMethod] using Coroutine repositories. + * + * @author Mark Paluch + */ +class ReactiveMongoQueryMethodCoroutineUnitTests { + + val projectionFactory = SpelAwareProxyProjectionFactory() + + interface PersonRepository : CoroutineCrudRepository { + + suspend fun findSuspendAllByName(): Flow + + fun findAllByName(): Flow + + suspend fun findSuspendByName(): List + + @Update("{ \$inc: { age: 1 } }") + suspend fun findAndIncrementAgeByName(name: String) + } + + @Test // DATAMONGO-2562 + internal fun `should consider methods returning Flow as collection queries`() { + + val method = PersonRepository::class.java.getMethod("findAllByName") + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThat(queryMethod.isCollectionQuery).isTrue() + } + + @Test // DATAMONGO-2562 + internal fun `should consider suspended methods returning Flow as collection queries`() { + + val method = PersonRepository::class.java.getMethod("findSuspendAllByName", Continuation::class.java) + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThat(queryMethod.isCollectionQuery).isTrue() + } + + @Test // DATAMONGO-2630 + internal fun `should consider suspended methods returning List as collection queries`() { + + val method = PersonRepository::class.java.getMethod("findSuspendByName", Continuation::class.java) + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThat(queryMethod.isCollectionQuery).isTrue() + } + + @Test // GH-4772 + internal fun `should consider suspended update queries`() { + + val method = PersonRepository::class.java.getMethod("findAndIncrementAgeByName", String::class.java, Continuation::class.java) + val queryMethod = ReactiveMongoQueryMethod(method, DefaultRepositoryMetadata(PersonRepository::class.java), projectionFactory, MongoMappingContext()) + + assertThatNoException().isThrownBy { queryMethod.verify() } + } +} diff --git a/spring-data-mongodb/src/test/resources/geospatial.xml b/spring-data-mongodb/src/test/resources/geospatial.xml index 13a01ac350..8937e8f753 100644 --- a/spring-data-mongodb/src/test/resources/geospatial.xml +++ b/spring-data-mongodb/src/test/resources/geospatial.xml @@ -2,23 +2,24 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd + http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd"> - - - + + + - + - + + - + diff --git a/spring-data-mongodb/src/test/resources/gridfs/another-resource.xml b/spring-data-mongodb/src/test/resources/gridfs/another-resource.xml new file mode 100644 index 0000000000..7217ac4743 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/gridfs/another-resource.xml @@ -0,0 +1,2 @@ + + diff --git a/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml b/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml index 274878ec27..a7691c97ae 100644 --- a/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml +++ b/spring-data-mongodb/src/test/resources/gridfs/gridfs.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/gridfs/reactive-gridfs.xml b/spring-data-mongodb/src/test/resources/gridfs/reactive-gridfs.xml new file mode 100644 index 0000000000..bcba3dfb38 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/gridfs/reactive-gridfs.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/infrastructure.xml b/spring-data-mongodb/src/test/resources/infrastructure.xml index 0107de40a3..500c44e2bf 100644 --- a/spring-data-mongodb/src/test/resources/infrastructure.xml +++ b/spring-data-mongodb/src/test/resources/infrastructure.xml @@ -1,20 +1,21 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd"> - + + diff --git a/spring-data-mongodb/src/test/resources/logback.xml b/spring-data-mongodb/src/test/resources/logback.xml index 3c6add8152..64550c957c 100644 --- a/spring-data-mongodb/src/test/resources/logback.xml +++ b/spring-data-mongodb/src/test/resources/logback.xml @@ -7,14 +7,20 @@ - - - + + + + + + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml b/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml index 1c3e248efb..a16849877b 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-custom-fieldnamingstrategy.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml b/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml index bb1037c5f7..580fe87272 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-custom-typeMapper.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-default.xml b/spring-data-mongodb/src/test/resources/namespace/converter-default.xml index 6692e304db..9da3af36d7 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-default.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-default.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml b/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml index 7d72040076..e69006a0f7 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-invalid.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml b/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml index 1ef0063abd..7fef48d5dd 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-nested-bean-definition.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml b/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml index 345e7d12bc..8d7415f46b 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-validation-disabled.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml b/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml index 2b7e4519a0..fe6df5bae2 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter-validation-enabled.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/converter.xml b/spring-data-mongodb/src/test/resources/namespace/converter.xml index 91842765d9..1a0106d7bf 100644 --- a/spring-data-mongodb/src/test/resources/namespace/converter.xml +++ b/spring-data-mongodb/src/test/resources/namespace/converter.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> @@ -13,6 +13,8 @@ + + diff --git a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml index 469489df98..66dba8540a 100644 --- a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml +++ b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean-custom-write-concern.xml @@ -2,16 +2,16 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - + - + - \ No newline at end of file + diff --git a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml index 245004df86..6f8c1ae0cf 100644 --- a/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml +++ b/spring-data-mongodb/src/test/resources/namespace/db-factory-bean.xml @@ -2,13 +2,21 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml index 0a2b5bc470..478f95daf0 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-bean.xml @@ -2,13 +2,15 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - + + + diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml index 567585f1c5..4bd9158356 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri-and-id.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml index 3fa5daff85..e12b585237 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-client-uri.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml index 567585f1c5..4bd9158356 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-and-id.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml index cee72b9832..5969d84e9f 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-no-credentials.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml index 7319f4ddcb..a279bd83e4 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongo-uri-write-concern-and-details.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml b/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml index 4b157b561e..0659bfb973 100644 --- a/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml +++ b/spring-data-mongodb/src/test/resources/namespace/mongoClient-bean.xml @@ -2,22 +2,22 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> - + - - + + - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml index 4f0883a6f6..76f271b4da 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/MongoBeanPropertyDocumentMapper-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml new file mode 100644 index 0000000000..79e5ac40a0 --- /dev/null +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml index 74d6fe0083..dc86edce2f 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoDbFactoryNoDatabaseRunningTests-context.xml @@ -3,16 +3,16 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml index 7582cecfb1..5575248498 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoNamespaceReplicaSetTests-context.xml @@ -3,16 +3,20 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context-3.0.xsd"> - + + + - + + + - + - - @@ -61,7 +55,7 @@ - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml index b91883d3cf..8466692f83 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/auditing.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties index 4ac86e45af..a79c2c685c 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/mongo.properties @@ -8,7 +8,8 @@ mongo.socketTimeout=1500 mongo.threadsAllowedToBlockForConnectionMultiplier=4 mongo.socketKeepAlive=true mongo.fsync=true -mongo.slaveOk=true mongoSsl.ssl=true +replicaSetName=rs0 +credential=jon:warg@snow?uri.authMechanism=PLAIN diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml index ce49033e76..e3635826c7 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexCreatorIntegrationTests-context.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml index 97797b9847..66d9aed368 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/core/mapping/event/ValidatingMongoEventListenerTests-context.xml @@ -3,13 +3,13 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml index d967bf8148..e9c04b79c9 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/monitor/MongoMonitorIntegrationTests-context.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml index 119940c359..264d474b56 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/PersonRepositoryIntegrationTests-context.xml @@ -3,16 +3,19 @@ xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:util="http://www.springframework.org/schema/util" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> - + + + + - + @@ -26,13 +29,8 @@ - - - - - - + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml index 72ef773b67..2e88cda928 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/RepositoryIndexCreationIntegrationTests-context.xml @@ -3,9 +3,9 @@ xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml index e2595507dc..b70efb607c 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/MongoNamespaceIntegrationTests-context.xml @@ -4,20 +4,25 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:repository="http://www.springframework.org/schema/data/repository" xmlns:util="http://www.springframework.org/schema/util" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/data/repository http://www.springframework.org/schema/data/repository/spring-repository.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/data/repository https://www.springframework.org/schema/data/repository/spring-repository.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> + + + - + + + diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml index 41002c4d3a..4817cacf03 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/repository/config/lazy/AllowNestedMongoRepositoriesRepositoryConfigTests-context.xml @@ -4,10 +4,10 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:repository="http://www.springframework.org/schema/data/repository" xmlns:util="http://www.springframework.org/schema/util" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/data/repository http://www.springframework.org/schema/data/repository/spring-repository.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/data/repository https://www.springframework.org/schema/data/repository/spring-repository.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/util https://www.springframework.org/schema/util/spring-util.xsd"> diff --git a/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml b/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml index 09059b8fc2..896bb26812 100644 --- a/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml +++ b/spring-data-mongodb/src/test/resources/reactive-infrastructure.xml @@ -1,7 +1,7 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd"> @@ -15,6 +15,7 @@ + diff --git a/spring-data-mongodb/src/test/resources/server-jmx.xml b/spring-data-mongodb/src/test/resources/server-jmx.xml index 7b24ed7aba..54f985f4cb 100644 --- a/spring-data-mongodb/src/test/resources/server-jmx.xml +++ b/spring-data-mongodb/src/test/resources/server-jmx.xml @@ -4,9 +4,9 @@ xmlns:p="http://www.springframework.org/schema/p" xmlns:mongo="http://www.springframework.org/schema/data/mongo" xmlns:context="http://www.springframework.org/schema/context" - xsi:schemaLocation="http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd - http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd - http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd"> + xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd + http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context.xsd"> diff --git a/spring-data-mongodb/src/test/resources/template-mapping.xml b/spring-data-mongodb/src/test/resources/template-mapping.xml index f9b8073b20..5f571f7241 100644 --- a/spring-data-mongodb/src/test/resources/template-mapping.xml +++ b/spring-data-mongodb/src/test/resources/template-mapping.xml @@ -2,8 +2,8 @@ + xsi:schemaLocation="http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans.xsd + http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd"> diff --git a/src/main/antora/antora-playbook.yml b/src/main/antora/antora-playbook.yml new file mode 100644 index 0000000000..9f842fe401 --- /dev/null +++ b/src/main/antora/antora-playbook.yml @@ -0,0 +1,40 @@ +# PACKAGES antora@3.2.0-alpha.2 @antora/atlas-extension:1.0.0-alpha.1 @antora/collector-extension@1.0.0-alpha.3 @springio/antora-extensions@1.1.0-alpha.2 @asciidoctor/tabs@1.0.0-alpha.12 @opendevise/antora-release-line-extension@1.0.0-alpha.2 +# +# The purpose of this Antora playbook is to build the docs in the current branch. +antora: + extensions: + - require: '@springio/antora-extensions' + root_component_name: 'data-mongodb' +site: + title: Spring Data MongoDB + url: https://docs.spring.io/spring-data/mongo/reference +content: + sources: + - url: ./../../.. + branches: HEAD + start_path: src/main/antora + worktrees: true + - url: https://github.com/spring-projects/spring-data-commons + # Refname matching: + # https://docs.antora.org/antora/latest/playbook/content-refname-matching/ + branches: [ main, 3.3.x, 3.2.x] + start_path: src/main/antora +asciidoc: + attributes: + hide-uri-scheme: '@' + tabs-sync-option: '@' + extensions: + - '@asciidoctor/tabs' + - '@springio/asciidoctor-extensions' + - '@springio/asciidoctor-extensions/javadoc-extension' + sourcemap: true +urls: + latest_version_segment: '' +runtime: + log: + failure_level: warn + format: pretty +ui: + bundle: + url: https://github.com/spring-io/antora-ui-spring/releases/download/v0.4.18/ui-bundle.zip + snapshot: true diff --git a/src/main/antora/antora.yml b/src/main/antora/antora.yml new file mode 100644 index 0000000000..1a23500e21 --- /dev/null +++ b/src/main/antora/antora.yml @@ -0,0 +1,17 @@ +name: data-mongodb +version: true +title: Spring Data MongoDB +nav: + - modules/ROOT/nav.adoc +ext: + collector: + - run: + command: ./mvnw validate process-resources -pl :spring-data-mongodb-distribution -am -Pantora-process-resources + local: true + scan: + dir: spring-data-mongodb-distribution/target/classes/ + - run: + command: ./mvnw package -Pdistribute + local: true + scan: + dir: target/antora diff --git a/src/main/asciidoc/images/jconsole.png b/src/main/antora/modules/ROOT/assets/images/jconsole.png similarity index 100% rename from src/main/asciidoc/images/jconsole.png rename to src/main/antora/modules/ROOT/assets/images/jconsole.png diff --git a/src/main/antora/modules/ROOT/examples/example b/src/main/antora/modules/ROOT/examples/example new file mode 120000 index 0000000000..3195fe72aa --- /dev/null +++ b/src/main/antora/modules/ROOT/examples/example @@ -0,0 +1 @@ +../../../../../../spring-data-mongodb/src/test/java/org/springframework/data/mongodb/example \ No newline at end of file diff --git a/src/main/antora/modules/ROOT/nav.adoc b/src/main/antora/modules/ROOT/nav.adoc new file mode 100644 index 0000000000..221f47c011 --- /dev/null +++ b/src/main/antora/modules/ROOT/nav.adoc @@ -0,0 +1,71 @@ +* xref:index.adoc[Overview] +** xref:commons/upgrade.adoc[] +** xref:migration-guides.adoc[] +*** xref:migration-guide/migration-guide-2.x-to-3.x.adoc[] +*** xref:migration-guide/migration-guide-3.x-to-4.x.adoc[] + +* xref:mongodb.adoc[] +** xref:preface.adoc[] +** xref:mongodb/getting-started.adoc[] +** xref:mongodb/configuration.adoc[] + +** xref:mongodb/template-api.adoc[] +*** xref:mongodb/template-config.adoc[] +*** xref:mongodb/template-collection-management.adoc[] +*** xref:mongodb/template-crud-operations.adoc[] +*** xref:mongodb/template-query-operations.adoc[] +*** xref:mongodb/template-document-count.adoc[] +*** xref:mongodb/aggregation-framework.adoc[] + +** xref:mongodb/template-gridfs.adoc[] +** xref:mongodb/mapping/mapping.adoc[] +*** xref:mongodb/mapping/mapping-schema.adoc[] +*** xref:mongodb/mapping/custom-conversions.adoc[Type based Converter] +*** xref:mongodb/mapping/property-converters.adoc[] +*** xref:mongodb/mapping/unwrapping-entities.adoc[] +*** xref:mongodb/mapping/document-references.adoc[Object References] +*** xref:mongodb/mapping/mapping-index-management.adoc[] + +** xref:mongodb/value-expressions.adoc[] +** xref:mongodb/lifecycle-events.adoc[] +** xref:mongodb/auditing.adoc[] +** xref:mongodb/client-session-transactions.adoc[] +** xref:mongodb/change-streams.adoc[] +** xref:mongodb/tailable-cursors.adoc[] +** xref:mongodb/sharding.adoc[] +** xref:mongodb/mongo-search-indexes.adoc[] +** xref:mongodb/mongo-encryption.adoc[] + +// Repository +* xref:repositories.adoc[] +** xref:repositories/core-concepts.adoc[] +** xref:repositories/definition.adoc[] +** xref:mongodb/repositories/repositories.adoc[] +** xref:repositories/core-extensions.adoc[] +** xref:repositories/create-instances.adoc[] +** xref:repositories/query-methods-details.adoc[] +** xref:mongodb/repositories/query-methods.adoc[] +** xref:mongodb/repositories/modifying-methods.adoc[] +** xref:repositories/projections.adoc[] +** xref:repositories/custom-implementations.adoc[] +** xref:repositories/core-domain-events.adoc[] +** xref:repositories/null-handling.adoc[] +** xref:mongodb/repositories/cdi-integration.adoc[] +** xref:repositories/query-keywords-reference.adoc[] +** xref:repositories/query-return-types-reference.adoc[] + +// Observability +* xref:observability/observability.adoc[] +** xref:observability/conventions.adoc[] +** xref:observability/metrics.adoc[] +** xref:observability/spans.adoc[] + +* xref:kotlin.adoc[] +** xref:kotlin/requirements.adoc[] +** xref:kotlin/null-safety.adoc[] +** xref:kotlin/extensions.adoc[] +** xref:kotlin/coroutines.adoc[] + +* xref:attachment$api/java/index.html[Javadoc,role=link-external,window=_blank] +* https://github.com/spring-projects/spring-data-commons/wiki[Wiki,role=link-external,window=_blank] + diff --git a/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc new file mode 100644 index 0000000000..51a9189aa0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/commons/upgrade.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$upgrade.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/index.adoc b/src/main/antora/modules/ROOT/pages/index.adoc new file mode 100644 index 0000000000..2a22bd56b4 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/index.adoc @@ -0,0 +1,22 @@ +[[spring-data-mongodb-reference-documentation]] += Spring Data MongoDB +:revnumber: {version} +:revdate: {localdate} +:feature-scroll: true + +_Spring Data MongoDB provides support for the MongoDB database. +It uses familiar Spring concepts such as a template classes for core API usage and lightweight repository style data access to ease development of applications with a consistent programming model._ + +[horizontal] +xref:mongodb.adoc[MongoDB] :: MongoDB support and connectivity +xref:repositories.adoc[Repositories] :: Mongo Repositories +xref:observability/observability.adoc[Observability] :: Observability Integration +xref:kotlin.adoc[Kotlin] :: Kotlin support +// xref:migration-guides.adoc[Migration] :: Migration Guides +https://github.com/spring-projects/spring-data-commons/wiki[Wiki] :: What's New, Upgrade Notes, Supported Versions, additional cross-version information. + +Mark Pollack; Thomas Risberg; Oliver Gierke; Costin Leau; Jon Brisbin; Thomas Darimont; Christoph Strobl; Mark Paluch; Jay Bryant + +(C) 2008-{copyright-year} VMware Inc. + +Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically. diff --git a/src/main/antora/modules/ROOT/pages/kotlin.adoc b/src/main/antora/modules/ROOT/pages/kotlin.adoc new file mode 100644 index 0000000000..4f01678d84 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc b/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc new file mode 100644 index 0000000000..8f578961cf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/coroutines.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/coroutines.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc b/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc new file mode 100644 index 0000000000..381a48be13 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/extensions.adoc @@ -0,0 +1,85 @@ +include::{commons}@data-commons::page$kotlin/extensions.adoc[] + +To retrieve a list of `SWCharacter` objects in Java, you would normally write the following: + +[source,java] +---- +Flux characters = template.query(SWCharacter.class).inTable("star-wars").all() +---- + +With Kotlin and the Spring Data extensions, you can instead write the following: + +[source,kotlin] +---- +val characters = template.query().inTable("star-wars").all() +// or (both are equivalent) +val characters : Flux = template.query().inTable("star-wars").all() +---- + +As in Java, `characters` in Kotlin is strongly typed, but Kotlin's clever type inference allows for shorter syntax. + +[[mongo.query.kotlin-support]] +== Type-safe Queries for Kotlin + +Kotlin embraces domain-specific language creation through its language syntax and its extension system. +Spring Data MongoDB ships with a Kotlin Extension for `Criteria` using https://kotlinlang.org/docs/reference/reflection.html#property-references[Kotlin property references] to build type-safe queries. +Queries using this extension are typically benefit from improved readability. +Most keywords on `Criteria` have a matching Kotlin extension, such as `inValues` and `regex`. + +Consider the following example explaining Type-safe Queries: + +==== +[source,kotlin] +---- +import org.springframework.data.mongodb.core.query.* + +mongoOperations.find( + Query(Book::title isEqualTo "Moby-Dick") <1> +) + +mongoOperations.find( + Query(titlePredicate = Book::title exists true) +) + +mongoOperations.find( + Query( + Criteria().andOperator( + Book::price gt 5, + Book::price lt 10 + )) +) + +// Binary operators +mongoOperations.find( + Query(BinaryMessage::payload bits { allClear(0b101) }) <2> +) + +// Nested Properties (i.e. refer to "book.author") +mongoOperations.find( + Query(Book::author / Author::name regex "^H") <3> +) +---- +<1> `isEqualTo()` is an infix extension function with receiver type `KProperty` that returns `Criteria`. +<2> For bitwise operators, pass a lambda argument where you call one of the methods of `Criteria.BitwiseCriteriaOperators`. +<3> To construct nested properties, use the `/` character (overloaded operator `div`). +==== + +[[mongo.update.kotlin-support]] +== Type-safe Updates for Kotlin + +A syntax similar to <> can be used to update documents: + +==== +[source,kotlin] +---- +mongoOperations.updateMulti( + Query(Book::title isEqualTo "Moby-Dick"), + update(Book:title, "The Whale") <1> + .inc(Book::price, 100) <2> + .addToSet(Book::authors, "Herman Melville") <3> +) +---- +<1> `update()` is a factory function with receiver type `KProperty` that returns `Update`. +<2> Most methods from `Update` have a matching Kotlin extension. +<3> Functions with `KProperty` can be used as well on collections types +==== diff --git a/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc b/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc new file mode 100644 index 0000000000..6967ddb3f6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/null-safety.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/null-safety.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc b/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc new file mode 100644 index 0000000000..bb209ab6a4 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/kotlin/requirements.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$kotlin/requirements.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-2.x-to-3.x.adoc b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-2.x-to-3.x.adoc new file mode 100644 index 0000000000..c002c1fee5 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-2.x-to-3.x.adoc @@ -0,0 +1,61 @@ +[[mongodb.migration.2.x-3.x]] += Migration Guide from 2.x to 3.x + +Spring Data MongoDB 3.x requires the MongoDB Java Driver 4.x + +To learn more about driver versions please visit the https://www.mongodb.com/docs/drivers/java/sync/current/upgrade/[MongoDB Documentation]. + +[[dependency-changes]] +== Dependency Changes + +* `org.mongodb:mongo-java-driver` (uber jar) got replaced with: +** bson-jar +** core-jar +** sync-jar + +The change in dependencies allows usage of the reactive support without having to pull the synchronous driver. +NOTE: The new sync driver does no longer support `com.mongodb.DBObject`. Please use `org.bson.Document` instead. + +[[signature-changes]] +== Signature Changes + +* `MongoTemplate` no longer supports `com.mongodb.MongoClient` and `com.mongodb.MongoClientOptions`. +Please use `com.mongodb.client.MongoClient` and `com.mongodb.MongoClientSettings` instead. + +In case you're using `AbstractMongoConfiguration` please switch to `AbstractMongoClientConfiguration`. + +[[namespace-changes]] +== Namespace Changes + +The switch to `com.mongodb.client.MongoClient` requires an update of your configuration XML if you have one. +The best way to provide required connection information is by using a connection string. +Please see the https://docs.mongodb.com/manual/reference/connection-string/[MongoDB Documentation] for details. + + +==== +[source,xml] +---- + +---- + +[source,xml] +---- + + + + + +---- + +[source,xml] +---- + + + +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-3.x-to-4.x.adoc b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-3.x-to-4.x.adoc new file mode 100644 index 0000000000..aa340a4f24 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guide/migration-guide-3.x-to-4.x.adoc @@ -0,0 +1,5 @@ +[[mongodb.migration.3.x-4.x]] += Migration Guide from 3.x to 4.x + +Spring Data MongoDB 4.x requires the MongoDB Java Driver 4.8.x + +To learn more about driver versions please visit the https://www.mongodb.com/docs/drivers/java/sync/current/upgrade/[MongoDB Documentation]. diff --git a/src/main/antora/modules/ROOT/pages/migration-guides.adoc b/src/main/antora/modules/ROOT/pages/migration-guides.adoc new file mode 100644 index 0000000000..3f8e783a16 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/migration-guides.adoc @@ -0,0 +1,8 @@ +[[mongodb.migration]] += Migration Guides +:page-section-summary-toc: 1 + +This section contains version-specific migration guides explaining how to upgrade between two versions. + + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb.adoc b/src/main/antora/modules/ROOT/pages/mongodb.adoc new file mode 100644 index 0000000000..907cbf06be --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb.adoc @@ -0,0 +1,23 @@ +[[mongodb.core]] += MongoDB Support +:page-section-summary-toc: 1 + +Spring Data support for MongoDB contains a wide range of features: + +* xref:mongodb/template-config.adoc[Spring configuration support] with Java-based `@Configuration` classes or an XML namespace for a Mongo driver instance and replica sets. +* xref:mongodb/template-api.adoc[`MongoTemplate` helper class] that increases productivity when performing common Mongo operations. +Includes integrated object mapping between documents and POJOs. +* xref:mongodb/template-api.adoc#mongo-template.exception-translation[Exception translation] into Spring's portable Data Access Exception hierarchy. +* Feature-rich xref:mongodb/mapping/mapping.adoc[Object Mapping] integrated with Spring's Conversion Service. +* xref:mongodb/mapping/mapping.adoc#mapping-usage-annotations[Annotation-based mapping metadata] that is extensible to support other metadata formats. +* xref:mongodb/lifecycle-events.adoc[Persistence and mapping lifecycle events]. +* xref:mongodb/template-query-operations.adoc[Java-based Query, Criteria, and Update DSLs]. +* Automatic implementation of xref:repositories.adoc[Repository interfaces], including support for custom query methods. +* xref:repositories/core-extensions.adoc#mongodb.repositories.queries.type-safe[QueryDSL integration] to support type-safe queries. +* xref:mongodb/client-session-transactions.adoc[Multi-Document Transactions]. +* xref:mongodb/template-query-operations.adoc#mongo.geo-json[GeoSpatial integration]. + +For most tasks, you should use `MongoTemplate` or the Repository support, which both leverage the rich mapping functionality. +`MongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. +`MongoTemplate` also provides callback methods so that it is easy for you to get the low-level API artifacts, such as `com.mongodb.client.MongoDatabase`, to communicate directly with MongoDB. +The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/aggregation-framework.adoc b/src/main/antora/modules/ROOT/pages/mongodb/aggregation-framework.adoc new file mode 100644 index 0000000000..81a00683c6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/aggregation-framework.adoc @@ -0,0 +1,691 @@ +[[mongo.aggregation]] += Aggregation Framework Support + +Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. + +For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. + +[[mongo.aggregation.basic-concepts]] +== Basic Concepts + +The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: javadoc:org.springframework.data.mongodb.core.aggregation.Aggregation[] and javadoc:org.springframework.data.mongodb.core.aggregation.AggregationResults[]. + +* `Aggregation` ++ +An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class. ++ +The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter. ++ +* `TypedAggregation` ++ +A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields. ++ +At runtime, field references get checked against the given input type, considering potential `@Field` annotations. +[NOTE] +==== +Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. +==== +* `AggregationDefinition` ++ +An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. ++ +* `AggregationResults` ++ +`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation. ++ +The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework: ++ +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + pipelineOP1(), + pipelineOP2(), + pipelineOPn() +); + +AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); +List mappedResult = results.getMappedResults(); +---- + +Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence. + +[[mongo.aggregation.supported-aggregation-operations]] +[[aggregation-stages]] +.Supported Aggregation Operations & Stages +[%collapsible] +==== +The MongoDB Aggregation Framework provides the following types of aggregation stages and operations: + +* addFields - `AddFieldsOperation` +* bucket / bucketAuto - `BucketOperation` / `BucketAutoOperation` +* count - `CountOperation` +* densify - `DensifyOperation` +* facet - `FacetOperation` +* geoNear - `GeoNearOperation` +* graphLookup - `GraphLookupOperation` +* group - `GroupOperation` +* limit - `LimitOperation` +* lookup - `LookupOperation` +* match - `MatchOperation` +* merge - `MergeOperation` +* project - `ProjectionOperation` +* redact - `RedactOperation` +* replaceRoot - `ReplaceRootOperation` +* sample - `SampleOperation` +* set - `SetOperation` +* setWindowFields - `SetWindowFieldsOperation` +* skip - `SkipOperation` +* sort / sortByCount - `SortOperation` / `SortByCountOperation` +* unionWith - `UnionWithOperation` +* unset - `UnsetOperation` +* unwind - `UnwindOperation` +==== + +[TIP] +==== +Unsupported aggregation stages (like https://www.mongodb.com/docs/atlas/atlas-search/query-syntax/[$search] for MongoDB Atlas) can be provided by implementing either `AggregationOperation`. +`Aggregation.stage` is a shortcut for registering a pipeline stage by providing its JSON or `Bson` representation. + +[source,java] +---- +Aggregation.stage(""" + { $search : { + "near": { + "path": "released", + "origin": { "$date": { "$numberLong": "..." } } , + "pivot": 7 + } + } + } +"""); +---- +==== + +At the time of this writing, we provide support for the following Aggregation Operators in Spring Data MongoDB: + +.Aggregation Operators currently supported by Spring Data MongoDB +[cols="2*"] +|=== +| Set Aggregation Operators +| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` + +| Group/Accumulator Aggregation Operators +| `addToSet`, `bottom`, `bottomN`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `firstN`, `last`, `lastN` `max`, `maxN`, `min`, `minN`, `avg`, `push`, `sum`, `top`, `topN`, `count` (+++*+++), `median`, `percentile`, `stdDevPop`, `stdDevSamp` + +| Arithmetic Aggregation Operators +| `abs`, `acos`, `acosh`, `add` (+++*+++ via `plus`), `asin`, `asin`, `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` + +| String Aggregation Operators +| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `replaceAll`, `replaceOne`, split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` + +| Comparison Aggregation Operators +| `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` + +| Array Aggregation Operators +| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `first`, `in`, `indexOfArray`, `isArray`, `last`, range`, `reverseArray`, `reduce`, `size`, `sortArray`, `slice`, `zip` + +| Literal Operators +| `literal` + +| Date Aggregation Operators +| `dateSubstract`, `dateTrunc`, `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateAdd`, `dateDiff`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear`, `tsIncrement`, `tsSecond` + +| Variable Operators +| `map` + +| Conditional Aggregation Operators +| `cond`, `ifNull`, `switch` + +| Type Aggregation Operators +| `type` + +| Convert Aggregation Operators +| `convert`, `degreesToRadians`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` + +| Object Aggregation Operators +| `objectToArray`, `mergeObjects`, `getField`, `setField` + +| Script Aggregation Operators +| `function`, `accumulator` + +|=== + ++++*+++ The operation is mapped or added by Spring Data MongoDB. + +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. + +[[mongo.aggregation.projection]] +== Projection Expressions + +Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method. +Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression: + +.Projection expression examples +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}} +project("name", "netPrice") + +// generates {$project: {thing1: $thing2}} +project().and("thing1").as("thing2") + +// generates {$project: {a: 1, b: 1, thing2: $thing1}} +project("a","b").and("thing1").as("thing2") +---- +==== + +.Multi-Stage Aggregation using Projection and Sorting +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} +project("name", "netPrice"), sort(ASC, "name") + +// generates {$project: {name: $firstname}}, {$sort: {name: 1}} +project().and("firstname").as("name"), sort(ASC, "name") + +// does not work +project().and("firstname").as("name"), sort(ASC, "firstname") +---- +==== + +More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.facet]] +== Faceted Classification + +As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. + +[[buckets]] +=== Buckets + +Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output. + +`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} +bucket("price").withBoundaries(0, 100, 400); + +// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} +bucket("price").withBoundaries(0, 100).withDefault("Other"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} +bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} +bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); +---- +==== + +`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucketAuto: {groupBy: $price, buckets: 5}} +bucketAuto("price", 5) + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} +bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} +bucketAuto("price", 5).andOutput("title").push().as("titles"); +---- +==== + +To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and xref:mongodb/aggregation-framework.adoc#mongo.aggregation.projection.expressions[SpEL expressions] through `andOutputExpression()`. + +Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and +https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. + +[[multi-faceted-aggregation]] +=== Multi-faceted Aggregation + +Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors. + +You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. + +Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples: + +.Facet operation examples +==== +[source,java] +---- +// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} +facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) + +// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} +facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) + +// generates {$facet: {categorizedByYear: [ +// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, +// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} +// ]}} +facet(project("title").and("publicationDate").extractYear().as("publicationYear"), + bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) + .as("categorizedByYear")) +---- +==== + +Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.sort-by-count]] +=== Sort By Count + +Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using xref:mongodb/aggregation-framework.adoc#mongo.aggregation.facet[Faceted Classification]. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example: + +.Sort by count example +==== +[source,java] +---- +// generates { $sortByCount: "$country" } +sortByCount("country"); +---- +==== + +A sort by count operation is equivalent to the following BSON (Binary JSON): + +---- +{ $group: { _id: , count: { $sum: 1 } } }, +{ $sort: { count: -1 } } +---- + +[[mongo.aggregation.projection.expressions]] +=== Spring Expression Support in Projection Expressions + +We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations. + +[[complex-calculations-with-spel-expressions]] +==== Complex Calculations with SpEL expressions + +Consider the following SpEL expression: + +[source,java] +---- +1 + (q + 1) / (q - 1) +---- + +The preceding expression is translated into the following projection expression part: + +[source,javascript] +---- +{ "$add" : [ 1, { + "$divide" : [ { + "$add":["$q", 1]}, { + "$subtract":[ "$q", 1]} + ] +}]} +---- + +You can see examples in more context in xref:mongodb/aggregation-framework.adoc#mongo.aggregation.examples.example5[Aggregation Framework Example 5] and xref:mongodb/aggregation-framework.adoc#mongo.aggregation.examples.example6[Aggregation Framework Example 6]. +You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. + +.Supported SpEL transformations +[%collapsible] +==== +[%header,cols="2"] +|=== +| SpEL Expression +| Mongo Expression Part +| a == b +| { $eq : [$a, $b] } +| a != b +| { $ne : [$a , $b] } +| a > b +| { $gt : [$a, $b] } +| a >= b +| { $gte : [$a, $b] } +| a < b +| { $lt : [$a, $b] } +| a <= b +| { $lte : [$a, $b] } +| a + b +| { $add : [$a, $b] } +| a - b +| { $subtract : [$a, $b] } +| a * b +| { $multiply : [$a, $b] } +| a / b +| { $divide : [$a, $b] } +| a^b +| { $pow : [$a, $b] } +| a % b +| { $mod : [$a, $b] } +| a && b +| { $and : [$a, $b] } +| a \|\| b +| { $or : [$a, $b] } +| !a +| { $not : [$a] } +|=== +==== + +In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion: + +[source,java] +---- +// { $setEquals : [$a, [5, 8, 13] ] } +.andExpression("setEquals(a, new int[]{5, 8, 13})"); +---- + +[[mongo.aggregation.examples]] +=== Aggregation Framework Examples + +The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. + +[[mongo.aggregation.examples.example1]] +==== Aggregation Framework Example 1 + +In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting). + +[source,java] +---- +class TagCount { + String tag; + int n; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + project("tags"), + unwind("tags"), + group("tags").count().as("n"), + project("n").and("tag").previousOperation(), + sort(DESC, "n") +); + +AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); +List tagCount = results.getMappedResults(); +---- + +The preceding listing uses the following algorithm: + +. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. +. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection. +. Use the `unwind` operation to generate a new document for each tag within the `tags` array. +. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`). +. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`. +. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order. +. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument. + +Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method. + +[[mongo.aggregation.examples.example2]] +==== Aggregation Framework Example 2 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection). + +[source,java] +---- +class ZipInfo { + String id; + String city; + String state; + @Field("pop") int population; + @Field("loc") double[] location; +} + +class City { + String name; + int population; +} + +class ZipInfoStats { + String id; + String state; + City biggestCity; + City smallestCity; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation aggregation = newAggregation(ZipInfo.class, + group("state", "city") + .sum("population").as("pop"), + sort(ASC, "pop", "state", "city"), + group("state") + .last("city").as("biggestCity") + .last("pop").as("biggestPop") + .first("city").as("smallestCity") + .first("pop").as("smallestPop"), + project() + .and("state").previousOperation() + .and("biggestCity") + .nested(bind("name", "biggestCity").and("population", "biggestPop")) + .and("smallestCity") + .nested(bind("name", "smallestCity").and("population", "smallestPop")), + sort(ASC, "state") +); + +AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); +ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); +---- + +Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format. + +The preceding listings use the following algorithm: + +. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field. +. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled). +. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation. +. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method. +. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example3]] +==== Aggregation Framework Example 3 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering). + +[source,java] +---- +class StateStats { + @Id String id; + String state; + @Field("totalPop") int totalPopulation; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(ZipInfo.class, + group("state").sum("population").as("totalPop"), + sort(ASC, previousOperation(), "totalPop"), + match(where("totalPop").gte(10 * 1000 * 1000)) +); + +AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); +List stateStatsList = result.getMappedResults(); +---- + +The preceding listings use the following algorithm: + +. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`. +. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. +. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example4]] +==== Aggregation Framework Example 4 + +This example demonstrates the use of simple arithmetic operations in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .and("netPrice").plus(1).as("netPricePlus1") + .and("netPrice").minus(1).as("netPriceMinus1") + .and("netPrice").multiply(1.19).as("grossPrice") + .and("netPrice").divide(2).as("netPriceDiv2") + .and("spaceUnits").mod(2).as("spaceUnitsMod2") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example5]] +==== Aggregation Framework Example 5 + +This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("netPrice + 1").as("netPricePlus1") + .andExpression("netPrice - 1").as("netPriceMinus1") + .andExpression("netPrice / 2").as("netPriceDiv2") + .andExpression("netPrice * 1.19").as("grossPrice") + .andExpression("spaceUnits % 2").as("spaceUnitsMod2") + .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") + +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +[[mongo.aggregation.examples.example6]] +==== Aggregation Framework Example 6 + +This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. + +Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +double shippingCosts = 1.2; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we can also refer to other fields of the document within the SpEL expression. + +[[mongo.aggregation.examples.example7]] +==== Aggregation Framework Example 7 + +This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. + +[source,java] +---- +public class InventoryItem { + + @Id int id; + String item; + String description; + int qty; +} + +public class InventoryItemProjection { + + @Id int id; + String item; + String description; + int qty; + int discount +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(InventoryItem.class, + project("item").and("discount") + .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) + .then(30) + .otherwise(20)) + .and(ifNull("description", "Unspecified")).as("description") +); + +AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); +List stateStatsList = result.getMappedResults(); +---- + +This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description. + +As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression. + +.Conditional aggregation projection +==== +[source,java] +---- +TypedAggregation agg = Aggregation.newAggregation(Book.class, + project("title") + .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1> + .equalToValue("")) <2> + .then("$$REMOVE") <3> + .otherwiseValueOf("author.middle") <4> + ) + .as("author.middle")); +---- +<1> If the value of the field `author.middle` +<2> does not contain a value, +<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field. +<4> Otherwise, add the field value of `author.middle`. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc b/src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc new file mode 100644 index 0000000000..014dde02ca --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/auditing.adoc @@ -0,0 +1,47 @@ +[[mongo.auditing]] += Auditing + +Since Spring Data MongoDB 1.4, auditing can be enabled by annotating a configuration class with the `@EnableMongoAuditing` annotation, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +@EnableMongoAuditing +class Config { + + @Bean + public AuditorAware myAuditorProvider() { + return new AuditorAwareImpl(); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +@EnableReactiveMongoAuditing +class Config { + + @Bean + public ReactiveAuditorAware myAuditorProvider() { + return new ReactiveAuditorAwareImpl(); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + +---- +====== + +If you expose a bean of type `AuditorAware` / `ReactiveAuditorAware` to the `ApplicationContext`, the auditing infrastructure picks it up automatically and uses it to determine the current user to be set on domain types. +If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableMongoAuditing`. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/change-streams.adoc b/src/main/antora/modules/ROOT/pages/mongodb/change-streams.adoc new file mode 100644 index 0000000000..1f999500bf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/change-streams.adoc @@ -0,0 +1,94 @@ +[[change-streams]] += Change Streams + +As of MongoDB 3.6, https://docs.mongodb.com/manual/changeStreams/[Change Streams] let applications get notified about changes without having to tail the oplog. + +NOTE: Change Stream support is only possible for replica sets or for a sharded cluster. + +Change Streams can be consumed with both, the imperative and the reactive MongoDB Java driver. It is highly recommended to use the reactive variant, as it is less resource-intensive. However, if you cannot use the reactive API, you can still obtain change events by using the messaging concept that is already prevalent in the Spring ecosystem. + +It is possible to watch both on a collection as well as database level, whereas the database level variant publishes +changes from all collections within the database. When subscribing to a database change stream, make sure to use a + suitable type for the event type as conversion might not apply correctly across different entity types. +In doubt, use `Document`. + +[[change-streams-with-messagelistener]] +== Change Streams with `MessageListener` + +Listening to a https://docs.mongodb.com/manual/tutorial/change-streams-example/[Change Stream by using a Sync Driver] creates a long running, blocking task that needs to be delegated to a separate component. +In this case, we need to first create a javadoc:org.springframework.data.mongodb.core.messaging.MessageListenerContainer[] which will be the main entry point for running the specific `SubscriptionRequest` tasks. +Spring Data MongoDB already ships with a default implementation that operates on `MongoTemplate` and is capable of creating and running `Task` instances for a javadoc:org.springframework.data.mongodb.core.messaging.ChangeStreamRequest[]. + +The following example shows how to use Change Streams with `MessageListener` instances: + +.Change Streams with `MessageListener` instances +==== +[source,java] +---- +MessageListenerContainer container = new DefaultMessageListenerContainer(template); +container.start(); <1> + +MessageListener, User> listener = System.out::println; <2> +ChangeStreamRequestOptions options = new ChangeStreamRequestOptions("db", "user", ChangeStreamOptions.empty()); <3> + +Subscription subscription = container.register(new ChangeStreamRequest<>(listener, options), User.class); <4> + +// ... + +container.stop(); <5> +---- +<1> Starting the container initializes the resources and starts `Task` instances for already registered `SubscriptionRequest` instances. Requests added after startup are ran immediately. +<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion. +<3> Set the collection to listen to and provide additional options through `ChangeStreamOptions`. +<4> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel it to free resources. +<5> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container. +==== + +[NOTE] +==== +Errors while processing are passed on to an `org.springframework.util.ErrorHandler`. If not stated otherwise a log appending `ErrorHandler` gets applied by default. + +Please use `register(request, body, errorHandler)` to provide additional functionality. +==== + +[[reactive-change-streams]] +== Reactive Change Streams + +Subscribing to Change Streams with the reactive API is a more natural approach to work with streams. Still, the essential building blocks, such as `ChangeStreamOptions`, remain the same. The following example shows how to use Change Streams emitting ``ChangeStreamEvent``s: + +.Change Streams emitting `ChangeStreamEvent` +==== +[source,java] +---- +Flux> flux = reactiveTemplate.changeStream(User.class) <1> + .watchCollection("people") + .filter(where("age").gte(38)) <2> + .listen(); <3> +---- +<1> The event target type the underlying document should be converted to. Leave this out to receive raw results without conversion. +<2> Use an aggregation pipeline or just a query `Criteria` to filter events. +<3> Obtain a `Flux` of change stream events. The `ChangeStreamEvent#getBody()` is converted to the requested domain type from (2). +==== + +[[resuming-change-streams]] +== Resuming Change Streams + +Change Streams can be resumed and resume emitting events where you left. To resume the stream, you need to supply either a resume +token or the last known server time (in UTC). Use javadoc:org.springframework.data.mongodb.core.ChangeStreamOptions[] to set the value accordingly. + +The following example shows how to set the resume offset using server time: + +.Resume a Change Stream +==== +[source,java] +---- +Flux> resumed = template.changeStream(User.class) + .watchCollection("people") + .resumeAt(Instant.now().minusSeconds(1)) <1> + .listen(); +---- +<1> You may obtain the server time of an `ChangeStreamEvent` through the `getTimestamp` method or use the `resumeToken` +exposed through `getResumeToken`. +==== + +TIP: In some cases an `Instant` might not be a precise enough measure when resuming a Change Stream. Use a MongoDB native +https://docs.mongodb.com/manual/reference/bson-types/#timestamps[BsonTimestamp] for that purpose. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/client-session-transactions.adoc b/src/main/antora/modules/ROOT/pages/mongodb/client-session-transactions.adoc new file mode 100644 index 0000000000..f825690d7a --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/client-session-transactions.adoc @@ -0,0 +1,483 @@ +[[mongo.sessions]] += Sessions & Transactions + +As of version 3.6, MongoDB supports the concept of sessions. +The use of sessions enables MongoDB's https://docs.mongodb.com/manual/core/read-isolation-consistency-recency/#causal-consistency[Causal Consistency] model, which guarantees running operations in an order that respects their causal relationships. +Those are split into `ServerSession` instances and `ClientSession` instances. +In this section, when we speak of a session, we refer to `ClientSession`. + +WARNING: Operations within a client session are not isolated from operations outside the session. + +Both `MongoOperations` and `ReactiveMongoOperations` provide gateway methods for tying a `ClientSession` to the operations. +`MongoCollection` and `MongoDatabase` use session proxy objects that implement MongoDB's collection and database interfaces, so you need not add a session on each call. +This means that a potential call to `MongoCollection#find()` is delegated to `MongoCollection#find(ClientSession)`. + +NOTE: Methods such as `(Reactive)MongoOperations#getCollection` return native MongoDB Java Driver gateway objects (such as `MongoCollection`) that themselves offer dedicated methods for `ClientSession`. +These methods are *NOT* session-proxied. +You should provide the `ClientSession` where needed when interacting directly with a `MongoCollection` or `MongoDatabase` and not through one of the `#execute` callbacks on `MongoOperations`. + +[[mongo.sessions.sync]] +[[mongo.sessions.reactive]] +== ClientSession support + +The following example shows the usage of a session: + +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +ClientSessionOptions sessionOptions = ClientSessionOptions.builder() + .causallyConsistent(true) + .build(); + +ClientSession session = client.startSession(sessionOptions); <1> + +template.withSession(() -> session) + .execute(action -> { + + Query query = query(where("name").is("Durzo Blint")); + Person durzo = action.findOne(query, Person.class); <2> + + Person azoth = new Person("Kylar Stern"); + azoth.setMaster(durzo); + + action.insert(azoth); <3> + + return azoth; + }); + +session.close() <4> +---- + +<1> Obtain a new session from the server. +<2> Use `MongoOperation` methods as before. +The `ClientSession` gets applied automatically. +<3> Make sure to close the `ClientSession`. +<4> Close the session. + +WARNING: When dealing with `DBRef` instances, especially lazily loaded ones, it is essential to *not* close the `ClientSession` before all data is loaded. +Otherwise, lazy fetch fails. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +ClientSessionOptions sessionOptions = ClientSessionOptions.builder() +.causallyConsistent(true) +.build(); + +Publisher session = client.startSession(sessionOptions); <1> + +template.withSession(session) +.execute(action -> { + + Query query = query(where("name").is("Durzo Blint")); + return action.findOne(query, Person.class) + .flatMap(durzo -> { + + Person azoth = new Person("Kylar Stern"); + azoth.setMaster(durzo); + + return action.insert(azoth); <2> + }); + }, ClientSession::close) <3> + .subscribe(); <4> +---- + +<1> Obtain a `Publisher` for new session retrieval. +<2> Use `ReactiveMongoOperation` methods as before. +The `ClientSession` is obtained and applied automatically. +<3> Make sure to close the `ClientSession`. +<4> Nothing happens until you subscribe. +See https://projectreactor.io/docs/core/release/reference/#reactive.subscribe[the Project Reactor Reference Guide] for details. + +By using a `Publisher` that provides the actual session, you can defer session acquisition to the point of actual subscription. +Still, you need to close the session when done, so as to not pollute the server with stale sessions. +Use the `doFinally` hook on `execute` to call `ClientSession#close()` when you no longer need the session. +If you prefer having more control over the session itself, you can obtain the `ClientSession` through the driver and provide it through a `Supplier`. + +NOTE: Reactive use of `ClientSession` is limited to Template API usage. +There's currently no session integration with reactive repositories. +==== +====== + +[[mongo.transactions]] +== MongoDB Transactions + +As of version 4, MongoDB supports https://www.mongodb.com/transactions[Transactions]. +Transactions are built on top of xref:mongodb/client-session-transactions.adoc[Sessions] and, consequently, require an active `ClientSession`. + +NOTE: Unless you specify a `MongoTransactionManager` within your application context, transaction support is *DISABLED*. +You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions. + +To get full programmatic control over transactions, you may want to use the session callback on `MongoOperations`. + +The following example shows programmatic transaction control: + +.Programmatic transactions +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +ClientSession session = client.startSession(options); <1> + +template.withSession(session) + .execute(action -> { + + session.startTransaction(); <2> + + try { + + Step step = // ...; + action.insert(step); + + process(step); + + action.update(Step.class).apply(Update.set("state", // ... + + session.commitTransaction(); <3> + + } catch (RuntimeException e) { + session.abortTransaction(); <4> + } + }, ClientSession::close) <5> +---- + +<1> Obtain a new `ClientSession`. +<2> Start the transaction. +<3> If everything works out as expected, commit the changes. +<4> Something broke, so roll back everything. +<5> Do not forget to close the session when done. + +The preceding example lets you have full control over transactional behavior while using the session scoped `MongoOperations` instance within the callback to ensure the session is passed on to every server call. +To avoid some of the overhead that comes with this approach, you can use a `TransactionTemplate` to take away some of the noise of manual transaction flow. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono result = Mono + .from(client.startSession()) <1> + + .flatMap(session -> { + session.startTransaction(); <2> + + return Mono.from(collection.deleteMany(session, ...)) <3> + + .onErrorResume(e -> Mono.from(session.abortTransaction()).then(Mono.error(e))) <4> + + .flatMap(val -> Mono.from(session.commitTransaction()).then(Mono.just(val))) <5> + + .doFinally(signal -> session.close()); <6> + }); +---- + +<1> First we obviously need to initiate the session. +<2> Once we have the `ClientSession` at hand, start the transaction. +<3> Operate within the transaction by passing on the `ClientSession` to the operation. +<4> If the operations completes exceptionally, we need to stop the transaction and preserve the error. +<5> Or of course, commit the changes in case of success. +Still preserving the operations result. +<6> Lastly, we need to make sure to close the session. + +The culprit of the above operation is in keeping the main flows `DeleteResult` instead of the transaction outcome published via either `commitTransaction()` or `abortTransaction()`, which leads to a rather complicated setup. + +NOTE: Unless you specify a `ReactiveMongoTransactionManager` within your application context, transaction support is *DISABLED*. +You can use `setSessionSynchronization(ALWAYS)` to participate in ongoing non-native MongoDB transactions. +==== +====== + +[[mongo.transactions.transaction-template]] +[[mongo.transactions.reactive-operator]] +== Transactions with TransactionTemplate / TransactionalOperator + +Spring Data MongoDB transactions support both `TransactionTemplate` and `TransactionalOperator`. + +.Transactions with `TransactionTemplate` / `TransactionalOperator` +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +template.setSessionSynchronization(ALWAYS); <1> + +// ... + +TransactionTemplate txTemplate = new TransactionTemplate(anyTxManager); <2> + +txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus status) { <3> + + Step step = // ...; + template.insert(step); + + process(step); + + template.update(Step.class).apply(Update.set("state", // ... + } +}); +---- + +<1> Enable transaction synchronization during Template API configuration. +<2> Create the `TransactionTemplate` using the provided `PlatformTransactionManager`. +<3> Within the callback the `ClientSession` and transaction are already registered. + +CAUTION: Changing state of `MongoTemplate` during runtime (as you might think would be possible in item 1 of the preceding listing) can cause threading and visibility issues. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +template.setSessionSynchronization(ALWAYS); <1> + +// ... + +TransactionalOperator rxtx = TransactionalOperator.create(anyTxManager, + new DefaultTransactionDefinition()); <2> + + +Step step = // ...; +template.insert(step); + +Mono process(step) + .then(template.update(Step.class).apply(Update.set("state", …)) + .as(rxtx::transactional) <3> + .then(); +---- + +<1> Enable transaction synchronization for Transactional participation. +<2> Create the `TransactionalOperator` using the provided `ReactiveTransactionManager`. +<3> `TransactionalOperator.transactional(…)` provides transaction management for all upstream operations. +==== +====== + +[[mongo.transactions.tx-manager]] +[[mongo.transactions.reactive-tx-manager]] +== Transactions with MongoTransactionManager & ReactiveMongoTransactionManager + +`MongoTransactionManager` / `ReactiveMongoTransactionManager` is the gateway to the well known Spring transaction support. +It lets applications use link:{springDocsUrl}/data-access.html#transaction[the managed transaction features of Spring]. +The `MongoTransactionManager` binds a `ClientSession` to the thread whereas the `ReactiveMongoTransactionManager` is using the `ReactorContext` for this. +`MongoTemplate` detects the session and operates on these resources which are associated with the transaction accordingly. +`MongoTemplate` can also participate in other, ongoing transactions. +The following example shows how to create and use transactions with a `MongoTransactionManager`: + +.Transactions with `MongoTransactionManager` / `ReactiveMongoTransactionManager` +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +static class Config extends AbstractMongoClientConfiguration { + + @Bean + MongoTransactionManager transactionManager(MongoDatabaseFactory dbFactory) { <1> + return new MongoTransactionManager(dbFactory); + } + + // ... +} + +@Component +public class StateService { + + @Transactional + void someBusinessFunction(Step step) { <2> + + template.insert(step); + + process(step); + + template.update(Step.class).apply(Update.set("state", // ... + }; +}); + +---- + +<1> Register `MongoTransactionManager` in the application context. +<2> Mark methods as transactional. + +NOTE: `@Transactional(readOnly = true)` advises `MongoTransactionManager` to also start a transaction that adds the +`ClientSession` to outgoing requests. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class Config extends AbstractReactiveMongoConfiguration { + + @Bean + ReactiveMongoTransactionManager transactionManager(ReactiveMongoDatabaseFactory factory) { <1> + return new ReactiveMongoTransactionManager(factory); + } + + // ... +} + +@Service +public class StateService { + + @Transactional + Mono someBusinessFunction(Step step) { <2> + + return template.insert(step) + .then(process(step)) + .then(template.update(Step.class).apply(Update.set("state", …)); + }; +}); + +---- + +<1> Register `ReactiveMongoTransactionManager` in the application context. +<2> Mark methods as transactional. + +NOTE: `@Transactional(readOnly = true)` advises `ReactiveMongoTransactionManager` to also start a transaction that adds the `ClientSession` to outgoing requests. +==== +====== + +[[mongo.transaction.options]] +=== Controlling MongoDB-specific Transaction Options + +Transactional service methods can require specific transaction options to run a transaction. +Spring Data MongoDB's transaction managers support evaluation of transaction labels such as `@Transactional(label = { "mongo:readConcern=available" })`. + +By default, the label namespace using the `mongo:` prefix is evaluated by `MongoTransactionOptionsResolver` that is configured by default. +Transaction labels are provided by `TransactionAttribute` and available to programmatic transaction control through `TransactionTemplate` and `TransactionalOperator`. +Due to their declarative nature, `@Transactional(label = …)` provides a good starting point that also can serve as documentation. + +Currently, the following options are supported: + +Max Commit Time:: + +Controls the maximum execution time on the server for the commitTransaction operation. +The format of the value corresponds with ISO-8601 duration format as used with `Duration.parse(…)`. ++ +Usage: +`mongo:maxCommitTime=PT1S` + +Read Concern:: + +Sets the read concern for the transaction. ++ +Usage: +`mongo:readConcern=LOCAL|MAJORITY|LINEARIZABLE|SNAPSHOT|AVAILABLE` + +Read Preference:: + +Sets the read preference for the transaction. ++ +Usage: +`mongo:readPreference=PRIMARY|SECONDARY|SECONDARY_PREFERRED|PRIMARY_PREFERRED|NEAREST` + +Write Concern:: + +Sets the write concern for the transaction. ++ +Usage: +`mongo:writeConcern=ACKNOWLEDGED|W1|W2|W3|UNACKNOWLEDGED|JOURNALED|MAJORITY` + +NOTE: Nested transactions that join the outer transaction do not affect the initial transaction options as the transaction is already started. +Transaction options are only applied when a new transaction is started. + +[[mongo.transactions.behavior]] +== Special behavior inside transactions + +Inside transactions, MongoDB server has a slightly different behavior. + +*Connection Settings* + +The MongoDB drivers offer a dedicated replica set name configuration option turing the driver into auto-detection mode. +This option helps identify the primary replica set nodes and command routing during a transaction. + +NOTE: Make sure to add `replicaSet` to the MongoDB URI. +Please refer to https://docs.mongodb.com/manual/reference/connection-string/#connections-connection-options[connection string options] for further details. + +*Collection Operations* + +MongoDB does *not* support collection operations, such as collection creation, within a transaction. +This also affects the on the fly collection creation that happens on first usage. +Therefore make sure to have all required structures in place. + +*Transient Errors* + +MongoDB can add special labels to errors raised during transactional operations. +Those may indicate transient failures that might vanish by merely retrying the operation. +We highly recommend https://github.com/spring-projects/spring-retry[Spring Retry] for those purposes. +Nevertheless one may override `MongoTransactionManager#doCommit(MongoTransactionObject)` to implement a https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation[Retry Commit Operation] +behavior as outlined in the MongoDB reference manual. + +*Count* + +MongoDB `count` operates upon collection statistics which may not reflect the actual situation within a transaction. +The server responds with _error 50851_ when issuing a `count` command inside of a multi-document transaction. +Once `MongoTemplate` detects an active transaction, all exposed `count()` methods are converted and delegated to the aggregation framework using `$match` and `$count` operators, preserving `Query` settings, such as `collation`. + +Restrictions apply when using geo commands inside of the aggregation count helper. +The following operators cannot be used and must be replaced with a different operator: + +* `$where` -> `$expr` +* `$near` -> `$geoWithin` with `$center` +* `$nearSphere` -> `$geoWithin` with `$centerSphere` + +Queries using `Criteria.near(…)` and `Criteria.nearSphere(…)` must be rewritten to `Criteria.within(…)` respective `Criteria.withinSphere(…)`. +Same applies for the `near` query keyword in repository query methods that must be changed to `within`. +See also MongoDB JIRA ticket https://jira.mongodb.org/browse/DRIVERS-518[DRIVERS-518] for further reference. + +The following snippet shows `count` usage inside the session-bound closure: + +==== +[source,javascript] +---- +session.startTransaction(); + +template.withSession(session) + .execute(action -> { + action.count(query(where("state").is("active")), Step.class) + ... +---- +==== + +The snippet above materializes in the following command: + +==== +[source,javascript] +---- +db.collection.aggregate( + [ + { $match: { state: "active" } }, + { $count: "totalEntityCount" } + ] +) +---- +==== + +instead of: + +==== +[source,javascript] +---- +db.collection.find( { state: "active" } ).count() +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/collation.adoc b/src/main/antora/modules/ROOT/pages/mongodb/collation.adoc new file mode 100644 index 0000000000..0f99995b92 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/collation.adoc @@ -0,0 +1,90 @@ +[[mongo.collation]] += Collations + +Since version 3.4, MongoDB supports collations for collection and index creation and various query operations. +Collations define string comparison rules based on the http://userguide.icu-project.org/collation/concepts[ICU collations]. +A collation document consists of various properties that are encapsulated in `Collation`, as the following listing shows: + +==== +[source,java] +---- +Collation collation = Collation.of("fr") <1> + + .strength(ComparisonLevel.secondary() <2> + .includeCase()) + + .numericOrderingEnabled() <3> + + .alternate(Alternate.shifted().punct()) <4> + + .forwardDiacriticSort() <5> + + .normalizationEnabled(); <6> +---- +<1> `Collation` requires a locale for creation. This can be either a string representation of the locale, a `Locale` (considering language, country, and variant) or a `CollationLocale`. The locale is mandatory for creation. +<2> Collation strength defines comparison levels that denote differences between characters. You can configure various options (case-sensitivity, case-ordering, and others), depending on the selected strength. +<3> Specify whether to compare numeric strings as numbers or as strings. +<4> Specify whether the collation should consider whitespace and punctuation as base characters for purposes of comparison. +<5> Specify whether strings with diacritics sort from back of the string, such as with some French dictionary ordering. +<6> Specify whether to check whether text requires normalization and whether to perform normalization. +==== + +Collations can be used to create collections and indexes. If you create a collection that specifies a collation, the +collation is applied to index creation and queries unless you specify a different collation. A collation is valid for a +whole operation and cannot be specified on a per-field basis. + +Like other metadata, collations can be be derived from the domain type via the `collation` attribute of the `@Document` +annotation and will be applied directly when running queries, creating collections or indexes. + +NOTE: Annotated collations will not be used when a collection is auto created by MongoDB on first interaction. This would +require additional store interaction delaying the entire process. Please use `MongoOperations.createCollection` for those cases. + +[source,java] +---- +Collation french = Collation.of("fr"); +Collation german = Collation.of("de"); + +template.createCollection(Person.class, CollectionOptions.just(collation)); + +template.indexOps(Person.class).ensureIndex(new Index("name", Direction.ASC).collation(german)); +---- + +NOTE: MongoDB uses simple binary comparison if no collation is specified (`Collation.simple()`). + +Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options, as the following two examples show: + +.Using collation with `find` +==== +[source,java] +---- +Collation collation = Collation.of("de"); + +Query query = new Query(Criteria.where("firstName").is("Amél")).collation(collation); + +List results = template.find(query, Person.class); +---- +==== + +.Using collation with `aggregate` +==== +[source,java] +---- +Collation collation = Collation.of("de"); + +AggregationOptions options = AggregationOptions.builder().collation(collation).build(); + +Aggregation aggregation = newAggregation( + project("tags"), + unwind("tags"), + group("tags") + .count().as("count") +).withOptions(options); + +AggregationResults results = template.aggregate(aggregation, "tags", TagCount.class); +---- +==== + +WARNING: Indexes are only used if the collation used for the operation matches the index collation. + +xref:mongodb/repositories/repositories.adoc[MongoDB Repositories] support `Collations` via the `collation` attribute of the `@Query` annotation. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc b/src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc new file mode 100644 index 0000000000..1034acdd79 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/configuration.adoc @@ -0,0 +1,336 @@ +[[mongodb-connectors]] += Connecting to MongoDB + +One of the first tasks when using MongoDB and Spring is to create a `MongoClient` object using the IoC container. +There are two main ways to do this, either by using Java-based bean metadata or by using XML-based bean metadata. + +NOTE: For those not familiar with how to configure the Spring container using Java-based bean metadata instead of XML-based metadata, see the high-level introduction in the reference docs https://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration[here] as well as the detailed documentation https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#beans-java-instantiating-container[here]. + +[[mongo.mongo-java-config]] +== Registering a Mongo Instance + +The following example shows an example to register an instance of a `MongoClient`: + +.Registering `MongoClient` +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class AppConfig { + + /* + * Use the standard Mongo driver API to create a com.mongodb.client.MongoClient instance. + */ + public @Bean com.mongodb.client.MongoClient mongoClient() { + return com.mongodb.client.MongoClients.create("mongodb://localhost:27017"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class AppConfig { + + /* + * Use the standard Mongo driver API to create a com.mongodb.client.MongoClient instance. + */ + public @Bean com.mongodb.reactivestreams.client.MongoClient mongoClient() { + return com.mongodb.reactivestreams.client.MongoClients.create("mongodb://localhost:27017"); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="third"] +---- + + + + + + + +---- +====== + +This approach lets you use the standard `MongoClient` instance, with the container using Spring's `MongoClientFactoryBean`/`ReactiveMongoClientFactoryBean`. +As compared to instantiating a `MongoClient` instance directly, the `FactoryBean` has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. +This hierarchy and the use of `@Repository` is described in link:{springDocsUrl}/data-access.html[Spring's DAO support features]. + +The following example shows an example of a Java-based bean metadata that supports exception translation on `@Repository` annotated classes: + +.Registering a `MongoClient` via `MongoClientFactoryBean` / `ReactiveMongoClientFactoryBean` +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class AppConfig { + + /* + * Factory bean that creates the com.mongodb.client.MongoClient instance + */ + public @Bean MongoClientFactoryBean mongo() { + MongoClientFactoryBean mongo = new MongoClientFactoryBean(); + mongo.setHost("localhost"); + return mongo; + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class AppConfig { + + /* + * Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance + */ + public @Bean ReactiveMongoClientFactoryBean mongo() { + ReactiveMongoClientFactoryBean mongo = new ReactiveMongoClientFactoryBean(); + mongo.setHost("localhost"); + return mongo; + } +} +---- +====== + +To access the `MongoClient` object created by the `FactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired MongoClient mongoClient;` field. + +[[mongo.mongo-db-factory]] +== The MongoDatabaseFactory Interface + +While `MongoClient` is the entry point to the MongoDB driver API, connecting to a specific MongoDB database instance requires additional information, such as the database name and an optional username and password. +With that information, you can obtain a `MongoDatabase` object and access all the functionality of a specific MongoDB database instance. +Spring provides the `org.springframework.data.mongodb.core.MongoDatabaseFactory` & `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interfaces, shown in the following listing, to bootstrap connectivity to the database: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface MongoDatabaseFactory { + + MongoDatabase getDatabase() throws DataAccessException; + + MongoDatabase getDatabase(String dbName) throws DataAccessException; +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface ReactiveMongoDatabaseFactory { + + Mono getDatabase() throws DataAccessException; + + Mono getDatabase(String dbName) throws DataAccessException; +} +---- +====== + +The following sections show how you can use the container with either Java-based or XML-based metadata to configure an instance of the `MongoDatabaseFactory` interface. +In turn, you can use the `MongoDatabaseFactory` / `ReactiveMongoDatabaseFactory` instance to configure `MongoTemplate` / `ReactiveMongoTemplate`. + +Instead of using the IoC container to create an instance of the template, you can use them in standard Java code, as follows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public class MongoApplication { + + public static void main(String[] args) throws Exception { + + MongoOperations mongoOps = new MongoTemplate(new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database")); + + // ... + } +} +---- +The code in bold highlights the use of `SimpleMongoClientDbFactory` and is the only difference between the listing shown in the xref:mongodb/getting-started.adoc[getting started section]. +Use `SimpleMongoClientDbFactory` when choosing `com.mongodb.client.MongoClient` as the entrypoint of choice. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public class ReactiveMongoApplication { + + public static void main(String[] args) throws Exception { + + ReactiveMongoOperations mongoOps = new MongoTemplate(new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database")); + + // ... + } +} +---- +====== + +[[mongo.mongo-db-factory-java]] +[[mongo.mongo-db-factory.config]] +== Registering a `MongoDatabaseFactory` / `ReactiveMongoDatabaseFactory` + +To register a `MongoDatabaseFactory`/ `ReactiveMongoDatabaseFactory` instance with the container, you write code much like what was highlighted in the previous section. +The following listing shows a simple example: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class MongoConfiguration { + + @Bean + public MongoDatabaseFactory mongoDatabaseFactory() { + return new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +public class ReactiveMongoConfiguration { + + @Bean + public ReactiveMongoDatabaseFactory mongoDatabaseFactory() { + return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database"); + } +} +---- +====== + +MongoDB Server generation 3 changed the authentication model when connecting to the DB. +Therefore, some of the configuration options available for authentication are no longer valid. +You should use the `MongoClient`-specific options for setting credentials through `MongoCredential` to provide authentication data, as shown in the following example: + +[tabs] +====== +Java:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class MongoAppConfig extends AbstractMongoClientConfiguration { + + @Override + public String getDatabaseName() { + return "database"; + } + + @Override + protected void configureClientSettings(Builder builder) { + + builder + .credential(MongoCredential.createCredential("name", "db", "pwd".toCharArray())) + .applyToClusterSettings(settings -> { + settings.hosts(singletonList(new ServerAddress("127.0.0.1", 27017))); + }); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + +---- +Username and password credentials used in XML-based configuration must be URL-encoded when these contain reserved characters, such as `:`, `%`, `@`, or `,`. +The following example shows encoded credentials: +`m0ng0@dmin:mo_res:bw6},Qsdxx@admin@database` -> `m0ng0%40dmin:mo_res%3Abw6%7D%2CQsdxx%40admin@database` +See https://tools.ietf.org/html/rfc3986#section-2.2[section 2.2 of RFC 3986] for further details. +====== + +If you need to configure additional options on the `com.mongodb.client.MongoClient` instance that is used to create a `SimpleMongoClientDbFactory`, you can refer to an existing bean as shown in the following example. To show another common usage pattern, the following listing shows the use of a property placeholder, which lets you parametrize the configuration and the creation of a `MongoTemplate`: + +[tabs] +====== +Java:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +@PropertySource("classpath:/com/myapp/mongodb/config/mongo.properties") +public class MongoAppConfig extends AbstractMongoClientConfiguration { + + @Autowired + Environment env; + + @Override + public String getDatabaseName() { + return "database"; + } + + @Override + protected void configureClientSettings(Builder builder) { + + builder.applyToClusterSettings(settings -> { + settings.hosts(singletonList( + new ServerAddress(env.getProperty("mongo.host"), env.getProperty("mongo.port", Integer.class)))); + }); + + builder.applyToConnectionPoolSettings(settings -> { + + settings.maxConnectionLifeTime(env.getProperty("mongo.pool-max-life-time", Integer.class), TimeUnit.MILLISECONDS) + .minSize(env.getProperty("mongo.pool-min-size", Integer.class)) + .maxSize(env.getProperty("mongo.pool-max-size", Integer.class)) + .maintenanceFrequency(10, TimeUnit.MILLISECONDS) + .maintenanceInitialDelay(11, TimeUnit.MILLISECONDS) + .maxConnectionIdleTime(30, TimeUnit.SECONDS) + .maxWaitTime(15, TimeUnit.MILLISECONDS); + }); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + + + + + + + + + + + +---- +====== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/converters-type-mapping.adoc b/src/main/antora/modules/ROOT/pages/mongodb/converters-type-mapping.adoc new file mode 100644 index 0000000000..ea0876aa54 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/converters-type-mapping.adoc @@ -0,0 +1,125 @@ +[[mongo-template.type-mapping]] +== Type Mapping + +MongoDB collections can contain documents that represent instances of a variety of types. +This feature can be useful if you store a hierarchy of classes or have a class with a property of type `Object`.In the latter case, the values held inside that property have to be read in correctly when retrieving the object.Thus, we need a mechanism to store type information alongside the actual document. + +To achieve that, the `MappingMongoConverter` uses a `MongoTypeMapper` abstraction with `DefaultMongoTypeMapper` as its main implementation.Its default behavior to store the fully qualified classname under `_class` inside the document.Type hints are written for top-level documents as well as for every value (if it is a complex type and a subtype of the declared property type).The following example (with a JSON representation at the end) shows how the mapping works: + +.Type mapping +==== +[source,java] +---- +class Sample { + Contact value; +} + +abstract class Contact { … } + +class Person extends Contact { … } + +Sample sample = new Sample(); +sample.value = new Person(); + +mongoTemplate.save(sample); + +{ + "value" : { "_class" : "com.acme.Person" }, + "_class" : "com.acme.Sample" +} +---- +==== + +Spring Data MongoDB stores the type information as the last field for the actual root class as well as for the nested type (because it is complex and a subtype of `Contact`).So, if you now use `mongoTemplate.findAll(Object.class, "sample")`, you can find out that the document stored is a `Sample` instance.You can also find out that the value property is actually a `Person`. + +[[customizing-type-mapping]] +=== Customizing Type Mapping + +If you want to avoid writing the entire Java class name as type information but would rather like to use a key, you can use the `@TypeAlias` annotation on the entity class.If you need to customize the mapping even more, have a look at the `TypeInformationMapper` interface.An instance of that interface can be configured at the `DefaultMongoTypeMapper`, which can, in turn, be configured on `MappingMongoConverter`.The following example shows how to define a type alias for an entity: + +.Defining a type alias for an Entity +==== +[source,java] +---- +@TypeAlias("pers") +class Person { + +} +---- +==== + +Note that the resulting document contains `pers` as the value in the `_class` Field. + +[WARNING] +==== +Type aliases only work if the mapping context is aware of the actual type. +The required entity metadata is determined either on first save or has to be provided via the configurations initial entity set. +By default, the configuration class scans the base package for potential candidates. + +[source,java] +---- +@Configuration +class AppConfig extends AbstractMongoClientConfiguration { + + @Override + protected Set> getInitialEntitySet() { + return Collections.singleton(Person.class); + } + + // ... +} +---- +==== + +[[configuring-custom-type-mapping]] +=== Configuring Custom Type Mapping + +The following example shows how to configure a custom `MongoTypeMapper` in `MappingMongoConverter`: + +[source,java] +---- +class CustomMongoTypeMapper extends DefaultMongoTypeMapper { + //implement custom type mapping here +} +---- + +.Configuring a custom `MongoTypeMapper` +==== +.Java +[source,java,role="primary"] +---- +@Configuration +class SampleMongoConfiguration extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "database"; + } + + @Bean + @Override + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { + MappingMongoConverter mmc = super.mappingMongoConverter(); + mmc.setTypeMapper(customTypeMapper()); + return mmc; + } + + @Bean + public MongoTypeMapper customTypeMapper() { + return new CustomMongoTypeMapper(); + } +} +---- + +.XML +[source,xml,role="secondary"] +---- + + + +---- +==== + +Note that the preceding example extends the `AbstractMongoClientConfiguration` class and overrides the bean definition of the `MappingMongoConverter` where we configured our custom `MongoTypeMapper`. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc b/src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc new file mode 100644 index 0000000000..a850604594 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/geo-json.adoc @@ -0,0 +1,40 @@ +TODO: add the following section somewhere + +[[mongo.geo-json.jackson-modules]] +== GeoJSON Jackson Modules + +By using the <>, Spring Data registers additional Jackson ``Modules``s to the `ObjectMapper` for de-/serializing common Spring Data domain types. +Please refer to the <> section to learn more about the infrastructure setup of this feature. + +The MongoDB module additionally registers ``JsonDeserializer``s for the following GeoJSON types via its `GeoJsonConfiguration` exposing the `GeoJsonModule`. +---- +org.springframework.data.mongodb.core.geo.GeoJsonPoint +org.springframework.data.mongodb.core.geo.GeoJsonMultiPoint +org.springframework.data.mongodb.core.geo.GeoJsonLineString +org.springframework.data.mongodb.core.geo.GeoJsonMultiLineString +org.springframework.data.mongodb.core.geo.GeoJsonPolygon +org.springframework.data.mongodb.core.geo.GeoJsonMultiPolygon +---- + +[NOTE] +==== +The `GeoJsonModule` only registers ``JsonDeserializer``s! + +To equip the `ObjectMapper` with a symmetric set of ``JsonSerializer``s you need to either manually configure those for the `ObjectMapper` or provide a custom `SpringDataJacksonModules` configuration exposing `GeoJsonModule.serializers()` as a Spring Bean. + +[source,java] +---- +class GeoJsonConfiguration implements SpringDataJacksonModules { + + @Bean + public Module geoJsonSerializers() { + return GeoJsonModule.serializers(); + } +} +---- +==== + +[WARNING] +==== +The next major version (`4.0`) will register both, ``JsonDeserializer``s and ``JsonSerializer``s for GeoJSON types by default. +==== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/getting-started.adoc b/src/main/antora/modules/ROOT/pages/mongodb/getting-started.adoc new file mode 100644 index 0000000000..a52e336ea3 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/getting-started.adoc @@ -0,0 +1,62 @@ +[[mongodb-getting-started]] += Getting Started + +An easy way to bootstrap setting up a working environment is to create a Spring-based project via https://start.spring.io/#!type=maven-project&dependencies=data-mongodb[start.spring.io] or create a Spring project in https://spring.io/tools[Spring Tools]. + +[[mongo.examples-repo]] +== Examples Repository + +The GitHub https://github.com/spring-projects/spring-data-examples[spring-data-examples repository] hosts several examples that you can download and play around with to get a feel for how the library works. + +[[mongodb.hello-world]] +== Hello World + +First, you need to set up a running MongoDB server. Refer to the https://docs.mongodb.org/manual/core/introduction/[MongoDB Quick Start guide] for an explanation on how to startup a MongoDB instance. +Once installed, starting MongoDB is typically a matter of running the following command: `/bin/mongod` + +Then you can create a `Person` class to persist: + +==== +[source,java] +---- +include::example$example/Person.java[tags=file] +---- +==== + +You also need a main application to run: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +include::example$example/MongoApplication.java[tags=file] +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +include::example$example/ReactiveMongoApplication.java[tags=file] +---- +====== + +When you run the main program, the preceding examples produce the following output: + +[source] +---- +10:01:32,265 DEBUG o.s.data.mongodb.core.MongoTemplate - insert Document containing fields: [_class, age, name] in collection: Person +10:01:32,765 DEBUG o.s.data.mongodb.core.MongoTemplate - findOne using query: { "name" : "Joe"} in db.collection: database.Person +Person [id=4ddbba3c0be56b7e1b210166, name=Joe, age=34] +10:01:32,984 DEBUG o.s.data.mongodb.core.MongoTemplate - Dropped collection [database.person] +---- + +Even in this simple example, there are few things to notice: + +* You can instantiate the central helper class of Spring Mongo, xref:mongodb/template-api.adoc[`MongoTemplate`], by using the standard or reactive `MongoClient` object and the name of the database to use. +* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See xref:mongodb/mapping/mapping.adoc[here]). +* Conventions are used for handling the `id` field, converting it to be an `ObjectId` when stored in the database. +* Mapping conventions can use field access. Notice that the `Person` class has only getters. +* If the constructor argument names match the field names of the stored document, they are used to instantiate the object + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/jmx.adoc b/src/main/antora/modules/ROOT/pages/mongodb/jmx.adoc new file mode 100644 index 0000000000..8b98bcebd2 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/jmx.adoc @@ -0,0 +1,70 @@ +[[mongo.jmx]] += JMX support + +[NOTE] +==== +JMX support has been deprecated in 4.5 and will be removed in 5.0. + +We recommend switching to Spring Boot https://docs.spring.io/spring-boot/reference/actuator/endpoints.html[Actuator Endpoints] and expose those over JMX if needed. +==== + +The JMX support for MongoDB exposes the results of running the 'serverStatus' command on the admin database for a single MongoDB server instance. It also exposes an administrative MBean, `MongoAdmin`, that lets you perform administrative operations, such as dropping or creating a database. The JMX features build upon the JMX feature set available in the Spring Framework. See link:{springDocsUrl}/integration.html#jmx[here] for more details. + +[[mongodb:jmx-configuration]] +== MongoDB JMX Configuration + +Spring's Mongo namespace lets you enable JMX functionality, as the following example shows: + +.XML schema to configure MongoDB +==== +[source,xml] +---- + + + + + + + + + + + + + + + + + + + + +---- +==== + +The preceding code exposes several MBeans: + +* `AssertMetrics` +* `BackgroundFlushingMetrics` +* `BtreeIndexCounters` +* `ConnectionMetrics` +* `GlobalLockMetrics` +* `MemoryMetrics` +* `OperationCounters` +* `ServerInfo` +* `MongoAdmin` + +The following screenshot from JConsole shows the resulting configuration: + +image::jconsole.png[] diff --git a/src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc b/src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc new file mode 100644 index 0000000000..e7a41a3426 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/lifecycle-events.adoc @@ -0,0 +1,147 @@ +[[mongodb.mapping-usage.events]] += Lifecycle Events + +The MongoDB mapping framework includes several `org.springframework.context.ApplicationEvent` events that your application can respond to by registering special beans in the `ApplicationContext`. +Being based on Spring's `ApplicationContext` event infrastructure enables other products, such as Spring Integration, to easily receive these events, as they are a well known eventing mechanism in Spring-based applications. + +Entity lifecycle events can be costly and you may notice a change in the performance profile when loading large result sets. +You can disable lifecycle events on the javadoc:org.springframework.data.mongodb.core.MongoTemplate#setEntityLifecycleEventsEnabled(boolean)[Template API]. + +To intercept an object before it goes through the conversion process (which turns your domain object into a `org.bson.Document`), you can register a subclass of `AbstractMongoEventListener` that overrides the `onBeforeConvert` method. +When the event is dispatched, your listener is called and passed the domain object before it goes into the converter. +The following example shows how to do so: + +==== +[source,java] +---- +public class BeforeConvertListener extends AbstractMongoEventListener { + @Override + public void onBeforeConvert(BeforeConvertEvent event) { + ... does some auditing manipulation, set timestamps, whatever ... + } +} +---- +==== + +To intercept an object before it goes into the database, you can register a subclass of javadoc:org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener[] that overrides the `onBeforeSave` method. When the event is dispatched, your listener is called and passed the domain object and the converted `com.mongodb.Document`. The following example shows how to do so: + +==== +[source,java] +---- +public class BeforeSaveListener extends AbstractMongoEventListener { + @Override + public void onBeforeSave(BeforeSaveEvent event) { + … change values, delete them, whatever … + } +} +---- +==== + +Declaring these beans in your Spring ApplicationContext causes them to be invoked whenever the event is dispatched. + +.Callbacks on `AbstractMappingEventListener`: +[%collapsible] +==== +* `onBeforeConvert`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations before the object is converted to a `Document` by a `MongoConverter`. +* `onBeforeSave`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations *before* inserting or saving the `Document` in the database. +* `onAfterSave`: Called in `MongoTemplate` `insert`, `insertList`, and `save` operations *after* inserting or saving the `Document` in the database. +* `onAfterLoad`: Called in `MongoTemplate` `find`, `findAndRemove`, `findOne`, and `getCollection` methods after the `Document` has been retrieved from the database. +* `onAfterConvert`: Called in `MongoTemplate` `find`, `findAndRemove`, `findOne`, and `getCollection` methods after the `Document` has been retrieved from the database was converted to a POJO. +==== + +NOTE: Lifecycle events are only emitted for root level types. +Complex types used as properties within a document root are not subject to event publication unless they are document references annotated with `@DBRef`. + +WARNING: Lifecycle events depend on an `ApplicationEventMulticaster`, which in case of the `SimpleApplicationEventMulticaster` can be configured with a `TaskExecutor`, and therefore gives no guarantees when an Event is processed. + +include::{commons}@data-commons::page$entity-callbacks.adoc[leveloffset=+1] + +[[mongo.entity-callbacks]] +== Store specific EntityCallbacks + +Spring Data MongoDB uses the `EntityCallback` API for its auditing support and reacts on the following callbacks. + +.Supported Entity Callbacks +[%header,cols="4"] +|=== +| Callback +| Method +| Description +| Order + +| `ReactiveBeforeConvertCallback` +`BeforeConvertCallback` +| `onBeforeConvert(T entity, String collection)` +| Invoked before a domain object is converted to `org.bson.Document`. +| `Ordered.LOWEST_PRECEDENCE` + +| `ReactiveAfterConvertCallback` +`AfterConvertCallback` +| `onAfterConvert(T entity, org.bson.Document target, String collection)` +| Invoked after a domain object is loaded. + +Can modify the domain object after reading it from a `org.bson.Document`. +| `Ordered.LOWEST_PRECEDENCE` + +| `ReactiveAuditingEntityCallback` +`AuditingEntityCallback` +| `onBeforeConvert(Object entity, String collection)` +| Marks an auditable entity _created_ or _modified_ +| 100 + +| `ReactiveBeforeSaveCallback` +`BeforeSaveCallback` +| `onBeforeSave(T entity, org.bson.Document target, String collection)` +| Invoked before a domain object is saved. + +Can modify the target, to be persisted, `Document` containing all mapped entity information. +| `Ordered.LOWEST_PRECEDENCE` + +| `ReactiveAfterSaveCallback` +`AfterSaveCallback` +| `onAfterSave(T entity, org.bson.Document target, String collection)` +| Invoked before a domain object is saved. + +Can modify the domain object, to be returned after save, `Document` containing all mapped entity information. +| `Ordered.LOWEST_PRECEDENCE` + +|=== + +=== Bean Validation + +Spring Data MongoDB supports Bean Validation for MongoDB entities annotated with https://beanvalidation.org/[https://xxx][Jakarta Validation annotations]. + +You can enable Bean Validation by registering `ValidatingEntityCallback` respectively `ReactiveValidatingEntityCallback` for reactive driver usage in your Spring `ApplicationContext` as shown in the following example: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +class Config { + + @Bean + public ValidatingEntityCallback validatingEntityCallback(Validator validator) { + return new ValidatingEntityCallback(validator); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +class Config { + + @Bean + public ReactiveValidatingEntityCallback validatingEntityCallback(Validator validator) { + return new ReactiveValidatingEntityCallback(validator); + } +} +---- +====== + +If you're using both, imperative and reactive, then you can enable also both callbacks. + +NOTE: When using XML-based configuration, historically, `ValidatingMongoEventListener` is registered through our namespace handlers when configuring ``. +If you want to use the newer Entity Callback variant, make sure to not use ``, otherwise you'll end up with both, the `ValidatingMongoEventListener` and the `ValidatingEntityCallback` being registered. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/custom-conversions.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/custom-conversions.adoc new file mode 100644 index 0000000000..4553be1d43 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/custom-conversions.adoc @@ -0,0 +1,114 @@ +include::{commons}@data-commons::page$custom-conversions.adoc[] + +[[mongo.custom-converters]] +== Type based Converter + +The most trivial way of influencing the mapping result is by specifying the desired native MongoDB target type via the `@Field` annotation. +This allows to work with non MongoDB types like `BigDecimal` in the domain model while persisting values in native `org.bson.types.Decimal128` format. + +.Explicit target type mapping +==== +[source,java] +---- +public class Payment { + + @Id String id; <1> + + @Field(targetType = FieldType.DECIMAL128) <2> + BigDecimal value; + + Date date; <3> + +} +---- + +[source,java] +---- +{ + "_id" : ObjectId("5ca4a34fa264a01503b36af8"), <1> + "value" : NumberDecimal(2.099), <2> + "date" : ISODate("2019-04-03T12:11:01.870Z") <3> +} +---- +<1> String _id_ values that represent a valid `ObjectId` are converted automatically. See xref:mongodb/template-crud-operations.adoc#mongo-template.id-handling[How the `_id` Field is Handled in the Mapping Layer] +for details. +<2> The desired target type is explicitly defined as `Decimal128` which translates to `NumberDecimal`. +Otherwise, the +`BigDecimal` value would have been truned into a `String`. +<3> `Date` values are handled by the MongoDB driver itself are stored as `ISODate`. +==== + +The snippet above is handy for providing simple type hints. To gain more fine-grained control over the mapping process, + you can register Spring converters with the `MongoConverter` implementations, such as the `MappingMongoConverter`. + +The `MappingMongoConverter` checks to see if any Spring converters can handle a specific class before attempting to map the object itself. To 'hijack' the normal mapping strategies of the `MappingMongoConverter`, perhaps for increased performance or other custom mapping needs, you first need to create an implementation of the Spring `Converter` interface and then register it with the `MappingConverter`. + +NOTE: For more information on the Spring type conversion service, see the reference docs link:{springDocsUrl}/core.html#validation[here]. + +[[mongo.custom-converters.writer]] +=== Writing Converter + +The following example shows an implementation of the `Converter` that converts from a `Person` object to a `org.bson.Document`: + +[source,java] +---- +import org.springframework.core.convert.converter.Converter; + +import org.bson.Document; + +public class PersonWriteConverter implements Converter { + + public Document convert(Person source) { + Document document = new Document(); + document.put("_id", source.getId()); + document.put("name", source.getFirstName()); + document.put("age", source.getAge()); + return document; + } +} +---- + +[[mongo.custom-converters.reader]] +=== Reading Converter + +The following example shows an implementation of a `Converter` that converts from a `Document` to a `Person` object: + +[source,java] +---- +public class PersonReadConverter implements Converter { + + public Person convert(Document source) { + Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); + p.setAge((Integer) source.get("age")); + return p; + } +} +---- + +[[mongo.custom-converters.xml]] +=== Registering Converters + +[source,java] +---- +class MyMongoConfiguration extends AbstractMongoClientConfiguration { + + @Override + public String getDatabaseName() { + return "database"; + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter adapter) { + adapter.registerConverter(new com.example.PersonReadConverter()); + adapter.registerConverter(new com.example.PersonWriteConverter()); + } +} +---- + +[[mongo.numeric-conversion]] +== Big Number Format + +MongoDB in its early days did not have support for large numeric values such as `BigDecimal`. +To persist `BigDecimal` and `BigInteger` values, Spring Data MongoDB converted values their `String` representation. +With MongoDB Server 3.4, `org.bson.types.Decimal128` offers a native representation for `BigDecimal` and `BigInteger`. +You can use the to the native representation by either annotating your properties with `@Field(targetType=DECIMAL128)` or by configuring the big decimal representation in `MongoCustomConversions` through `MongoCustomConversions.create(config -> config.bigDecimal(…))`. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/document-references.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/document-references.adoc new file mode 100644 index 0000000000..1dec452dcf --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/document-references.adoc @@ -0,0 +1,505 @@ +[[mapping-usage-references]] += Using DBRefs + +The mapping framework does not have to store child objects embedded within the document. +You can also store them separately and use a `DBRef` to refer to that document. +When the object is loaded from MongoDB, those references are eagerly resolved so that you get back a mapped object that looks the same as if it had been stored embedded within your top-level document. + +The following example uses a DBRef to refer to a specific document that exists independently of the object in which it is referenced (both classes are shown in-line for brevity's sake): + +==== +[source,java] +---- +@Document +public class Account { + + @Id + private ObjectId id; + private Float total; +} + +@Document +public class Person { + + @Id + private ObjectId id; + @Indexed + private Integer ssn; + @DBRef + private List accounts; +} +---- +==== + +You need not use `@OneToMany` or similar mechanisms because the List of objects tells the mapping framework that you want a one-to-many relationship. +When the object is stored in MongoDB, there is a list of DBRefs rather than the `Account` objects themselves. +When it comes to loading collections of ``DBRef``s it is advisable to restrict references held in collection types to a specific MongoDB collection. +This allows bulk loading of all references, whereas references pointing to different MongoDB collections need to be resolved one by one. + +IMPORTANT: The mapping framework does not handle cascading saves. +If you change an `Account` object that is referenced by a `Person` object, you must save the `Account` object separately. +Calling `save` on the `Person` object does not automatically save the `Account` objects in the `accounts` property. + +``DBRef``s can also be resolved lazily. +In this case the actual `Object` or `Collection` of references is resolved on first access of the property. +Use the `lazy` attribute of `@DBRef` to specify this. +Required properties that are also defined as lazy loading ``DBRef`` and used as constructor arguments are also decorated with the lazy loading proxy making sure to put as little pressure on the database and network as possible. + +TIP: Lazily loaded ``DBRef``s can be hard to debug. +Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()` or some inline debug rendering invoking property getters. +Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. + +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + +[[mapping-usage.document-references]] +== Using Document References + +Using `@DocumentReference` offers a flexible way of referencing entities in MongoDB. +While the goal is the same as when using xref:mongodb/mapping/document-references.adoc[DBRefs], the store representation is different. +`DBRef` resolves to a document with a fixed structure as outlined in the https://docs.mongodb.com/manual/reference/database-references/[MongoDB Reference documentation]. + +Document references, do not follow a specific format. +They can be literally anything, a single value, an entire document, basically everything that can be stored in MongoDB. +By default, the mapping layer will use the referenced entities _id_ value for storage and retrieval, like in the sample below. + +==== +[source,java] +---- +@Document +class Account { + + @Id + String id; + Float total; +} + +@Document +class Person { + + @Id + String id; + + @DocumentReference <1> + List accounts; +} +---- + +[source,java] +---- +Account account = … + +template.insert(account); <2> + +template.update(Person.class) + .matching(where("id").is(…)) + .apply(new Update().push("accounts").value(account)) <3> + .first(); +---- + +[source,json] +---- +{ + "_id" : …, + "accounts" : [ "6509b9e" … ] <4> +} +---- +<1> Mark the collection of `Account` values to be referenced. +<2> The mapping framework does not handle cascading saves, so make sure to persist the referenced entity individually. +<3> Add the reference to the existing entity. +<4> Referenced `Account` entities are represented as an array of their `_id` values. +==== + +The sample above uses an ``_id``-based fetch query (`{ '_id' : ?#{#target} }`) for data retrieval and resolves linked entities eagerly. +It is possible to alter resolution defaults (listed below) using the attributes of `@DocumentReference` + +.@DocumentReference defaults +[cols="2,3,5",options="header"] +|=== +| Attribute | Description | Default + +| `db` +| The target database name for collection lookup. +| `MongoDatabaseFactory.getMongoDatabase()` + +| `collection` +| The target collection name. +| The annotated property's domain type, respectively the value type in case of `Collection` like or `Map` properties, collection name. + +| `lookup` +| The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator. +| An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value. + +| `sort` +| Used for sorting result documents on server side. +| None by default. +Result order of `Collection` like properties is restored based on the used lookup query on a best-effort basis. + +| `lazy` +| If set to `true` value resolution is delayed upon first access of the property. +| Resolves properties eagerly by default. +|=== + +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + +`@DocumentReference(lookup)` allows defining filter queries that can be different from the `_id` field and therefore offer a flexible way of defining references between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + @Field("publisher_ac") + @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") <1> + Publisher publisher; +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; <1> + String name; + + @DocumentReference(lazy = true) <2> + List books; + +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher_ac" : "DR" +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 1a23e45, + "acronym" : "DR", + "name" : "Del Rey", + … +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> Lazy load back references to the `Book` collection. +==== + +The above snippet shows the reading side of things when working with custom referenced objects. +Writing requires a bit of additional setup as the mapping information do not express where `#target` stems from. +The mapping layer requires registration of a `Converter` between the target document and `DocumentPointer`, like the one below: + +==== +[source,java] +---- +@WritingConverter +class PublisherReferenceConverter implements Converter> { + + @Override + public DocumentPointer convert(Publisher source) { + return () -> source.getAcronym(); + } +} +---- +==== + +If no `DocumentPointer` converter is provided the target reference document can be computed based on the given lookup query. +In this case the association target properties are evaluated as shown in the following sample. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc} }") <1> <2> + Publisher publisher; +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; <1> + String name; + + // ... +} +---- + +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher" : { + "acc" : "DOC" + } +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> The field value placeholders of the lookup query (like `acc`) is used to form the reference document. +==== + +It is also possible to model relational style _One-To-Many_ references using a combination of `@ReadonlyProperty` and `@DocumentReference`. +This approach allows link types without storing the linking values within the owning document but rather on the referencing document as shown in the example below. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + ObjectId publisherId; <1> +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; + String name; + + @ReadOnlyProperty <2> + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") <3> + List books; +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisherId" : 8cfb002 +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 8cfb002, + "acronym" : "DR", + "name" : "Del Rey" +} +---- +<1> Set up the link from `Book` (reference) to `Publisher` (owner) by storing the `Publisher.id` within the `Book` document. +<2> Mark the property holding the references to be readonly. +This prevents storing references to individual ``Book``s with the `Publisher` document. +<3> Use the `#self` variable to access values within the `Publisher` document and in this retrieve `Books` with matching `publisherId`. +==== + +With all the above in place it is possible to model all kind of associations between entities. +Have a look at the non-exhaustive list of samples below to get feeling for what is possible. + +.Simple Document Reference using _id_ field +==== +[source,java] +---- +class Entity { + @DocumentReference + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" <1> +} +---- +<1> MongoDB simple type can be directly used without further configuration. +==== + +.Simple Document Reference using _id_ field with explicit lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") <1> + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> _target_ defines the reference value itself. +==== + +.Document Reference extracting the `refKey` field for the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{refKey}' }") <1> <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("refKey", source.id); <1> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "refKey" : "9a48e32" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> The key used for obtaining the reference value must be the one used during write. +<2> `refKey` is short for `target.refKey`. +==== + +.Document Reference with multiple values forming the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ 'firstname' : '?#{fn}', 'lastname' : '?#{ln}' }") <1> <2> + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "fn" : "Josh", <1> + "ln" : "Long" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32", + "firstname" : "Josh", <2> + "lastname" : "Long", <2> +} +---- +<1> Read/write the keys `fn` & `ln` from/to the linkage document based on the lookup query. +<2> Use non _id_ fields for the lookup of the target documents. +==== + +.Document Reference reading from a target collection +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("id", source.id) <1> + .append("collection", … ); <2> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "id" : "9a48e32", <1> + "collection" : "…" <2> + } +} +---- +<1> Read/write the keys `_id` from/to the reference document to use them in the lookup query. +<2> The collection name can be read from the reference document using its key. +==== + +[WARNING] +==== +We know it is tempting to use all kinds of MongoDB query operators in the lookup query and this is fine. +But there a few aspects to consider: + +* Make sure to have indexes in place that support your lookup. +* Make sure to use the same data types: `@DocumentReference(lookup="{'someRef':?#{#self._id} }")` can easily fail when using `@Id String id` and `String someRef` as ``String @Id``'s are subject to automatic ObjectId conversion (but not other `String` properties containing `ObjectId.toString()`). +Reference lookup uses values from the resulting `Document` and in that case, it would query a String field using an `ObjectId` yielding no results. +* Mind that resolution requires a server roundtrip inducing latency, consider a lazy strategy. +* A collection of document references is bulk loaded using the `$or` operator. + +The original element order is restored in memory on a best-effort basis. +Restoring the order is only possible when using equality expressions and cannot be done when using MongoDB query operators. +In this case results will be ordered as they are received from the store or via the provided `@DocumentReference(sort)` attribute. + +A few more general remarks: + +* Do you use cyclic references? +Ask your self if you need them. +* Lazy document references are hard to debug. +Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()`. +* There is no support for reading document references using reactive infrastructure. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-index-management.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-index-management.adoc new file mode 100644 index 0000000000..b094b925d0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-index-management.adoc @@ -0,0 +1,340 @@ +[[mapping.index-creation]] += Index Creation + +Spring Data MongoDB can automatically create indexes for entity types annotated with `@Document`. +Index creation must be explicitly enabled since version 3.0 to prevent undesired effects with collection lifecyle and performance impact. +Indexes are automatically created for the initial entity set on application startup and when accessing an entity type for the first time while the application runs. + +We generally recommend explicit index creation for application-based control of indexes as Spring Data cannot automatically create indexes for collections that were recreated while the application was running. + +`IndexResolver` provides an abstraction for programmatic index definition creation if you want to make use of `@Indexed` annotations such as `@GeoSpatialIndexed`, `@TextIndexed`, `@CompoundIndex` and `@WildcardIndexed`. +You can use index definitions with `IndexOperations` to create indexes. +A good point in time for index creation is on application startup, specifically after the application context was refreshed, triggered by observing `ContextRefreshedEvent`. +This event guarantees that the context is fully initialized. +Note that at this time other components, especially bean factories might have access to the MongoDB database. + +[WARNING] +==== +``Map``-like properties are skipped by the `IndexResolver` unless annotated with `@WildcardIndexed` because the _map key_ must be part of the index definition. Since the purpose of maps is the usage of dynamic keys and values, the keys cannot be resolved from static mapping metadata. +==== + +.Programmatic Index Creation for a single Domain Type +==== +[source,java] +---- +class MyListener { + + @EventListener(ContextRefreshedEvent.class) + public void initIndicesAfterStartup() { + + MappingContext, MongoPersistentProperty> mappingContext = mongoTemplate + .getConverter().getMappingContext(); + + IndexResolver resolver = new MongoPersistentEntityIndexResolver(mappingContext); + + IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class); + resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex); + } +} +---- +==== + +.Programmatic Index Creation for all Initial Entities +==== +[source,java] +---- +class MyListener{ + + @EventListener(ContextRefreshedEvent.class) + public void initIndicesAfterStartup() { + + MappingContext, MongoPersistentProperty> mappingContext = mongoTemplate + .getConverter().getMappingContext(); + + // consider only entities that are annotated with @Document + mappingContext.getPersistentEntities() + .stream() + .filter(it -> it.isAnnotationPresent(Document.class)) + .forEach(it -> { + + IndexOperations indexOps = mongoTemplate.indexOps(it.getType()); + resolver.resolveIndexFor(it.getType()).forEach(indexOps::ensureIndex); + }); + } +} +---- +==== + +Alternatively, if you want to ensure index and collection presence before any component is able to access your database from your application, declare a `@Bean` method for `MongoTemplate` and include the code from above before returning the `MongoTemplate` object. + +[NOTE] +==== +To turn automatic index creation _ON_ please override `autoIndexCreation()` in your configuration. +[source,java] +---- +@Configuration +public class Config extends AbstractMongoClientConfiguration { + + @Override + public boolean autoIndexCreation() { + return true; + } + +// ... +} +---- +==== + +IMPORTANT: Automatic index creation is turned _OFF_ by default as of version 3.0. + +[[mapping-usage-indexes.compound-index]] +== Compound Indexes + +Compound indexes are also supported. They are defined at the class level, rather than on individual properties. + +NOTE: Compound indexes are very important to improve the performance of queries that involve criteria on multiple fields + +Here's an example that creates a compound index of `lastName` in ascending order and `age` in descending order: + +.Example Compound Index Usage +==== +[source,java] +---- +package com.mycompany.domain; + +@Document +@CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") +public class Person { + + @Id + private ObjectId id; + private Integer age; + private String firstName; + private String lastName; + +} +---- +==== + +[TIP] +==== +`@CompoundIndex` is repeatable using `@CompoundIndexes` as its container. + +[source,java] +---- +@Document +@CompoundIndex(name = "cmp-idx-one", def = "{'firstname': 1, 'lastname': -1}") +@CompoundIndex(name = "cmp-idx-two", def = "{'address.city': -1, 'address.street': 1}") +public class Person { + + String firstname; + String lastname; + + Address address; + + // ... +} +---- +==== + +[[mapping-usage-indexes.hashed-index]] +== Hashed Indexes + +Hashed indexes allow hash based sharding within a sharded cluster. +Using hashed field values to shard collections results in a more random distribution. +For details, refer to the https://docs.mongodb.com/manual/core/index-hashed/[MongoDB Documentation]. + +Here's an example that creates a hashed index for `_id`: + +.Example Hashed Index Usage +==== +[source,java] +---- +@Document +public class DomainType { + + @HashIndexed @Id String id; + + // ... +} +---- +==== + +Hashed indexes can be created next to other index definitions like shown below, in that case both indices are created: + +.Example Hashed Index Usage togehter with simple index +==== +[source,java] +---- +@Document +public class DomainType { + + @Indexed + @HashIndexed + String value; + + // ... +} +---- +==== + +In case the example above is too verbose, a compound annotation allows to reduce the number of annotations that need to be declared on a property: + +.Example Composed Hashed Index Usage +==== +[source,java] +---- +@Document +public class DomainType { + + @IndexAndHash(name = "idx...") <1> + String value; + + // ... +} + +@Indexed +@HashIndexed +@Retention(RetentionPolicy.RUNTIME) +public @interface IndexAndHash { + + @AliasFor(annotation = Indexed.class, attribute = "name") <1> + String name() default ""; +} +---- +<1> Potentially register an alias for certain attributes of the meta annotation. +==== + +[NOTE] +==== +Although index creation via annotations comes in handy for many scenarios cosider taking over more control by setting up indices manually via `IndexOperations`. + +[source,java] +---- +mongoOperations.indexOpsFor(Jedi.class) + .ensureIndex(HashedIndex.hashed("useTheForce")); +---- +==== + +[[mapping-usage-indexes.wildcard-index]] +== Wildcard Indexes + +A `WildcardIndex` is an index that can be used to include all fields or specific ones based a given (wildcard) pattern. +For details, refer to the https://docs.mongodb.com/manual/core/index-wildcard/[MongoDB Documentation]. + +The index can be set up programmatically using `WildcardIndex` via `IndexOperations`. + +.Programmatic WildcardIndex setup +==== +[source,java] +---- +mongoOperations + .indexOps(User.class) + .ensureIndex(new WildcardIndex("userMetadata")); +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + +The `@WildcardIndex` annotation allows a declarative index setup that can used either with a document type or property. + +If placed on a type that is a root level domain entity (one annotated with `@Document`) , the index resolver will create a +wildcard index for it. + +.Wildcard index on domain type +==== +[source,java] +---- +@Document +@WildcardIndexed +public class Product { + // … +} +---- +[source,javascript] +---- +db.product.createIndex({ "$**" : 1 },{}) +---- +==== + +The `wildcardProjection` can be used to specify keys to in-/exclude in the index. + +.Wildcard index with `wildcardProjection` +==== +[source,java] +---- +@Document +@WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }") +public class User { + private @Id String id; + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex( + { "$**" : 1 }, + { "wildcardProjection" : + { "userMetadata.age" : 0 } + } +) +---- +==== + +Wildcard indexes can also be expressed by adding the annotation directly to the field. +Please note that `wildcardProjection` is not allowed on nested paths such as properties. +Projections on types annotated with `@WildcardIndexed` are omitted during index creation. + +.Wildcard index on property +==== +[source,java] +---- +@Document +public class User { + private @Id String id; + + @WildcardIndexed + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + +[[mapping-usage-indexes.text-index]] +== Text Indexes + +NOTE: The text index feature is disabled by default for MongoDB v.2.4. + +Creating a text index allows accumulating several fields into a searchable full-text index. +It is only possible to have one text index per collection, so all fields marked with `@TextIndexed` are combined into this index. +Properties can be weighted to influence the document score for ranking results. +The default language for the text index is English.To change the default language, set the `language` attribute to whichever language you want (for example,`@Document(language="spanish")`). +Using a property called `language` or `@Language` lets you define a language override on a per-document base. +The following example shows how to created a text index and set the language to Spanish: + +.Example Text Index Usage +==== +[source,java] +---- +@Document(language = "spanish") +class SomeEntity { + + @TextIndexed String foo; + + @Language String lang; + + Nested nested; +} + +class Nested { + + @TextIndexed(weight=5) String bar; + String roo; +} +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-schema.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-schema.adoc new file mode 100644 index 0000000000..6c8e200083 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping-schema.adoc @@ -0,0 +1,466 @@ +[[mongo.jsonSchema]] += JSON Schema + +As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema]. +The schema itself and both validation action and level can be defined when creating the collection, as the following example shows: + +.Sample JSON schema +==== +[source,json] +---- +{ + "type": "object", <1> + + "required": [ "firstname", "lastname" ], <2> + + "properties": { <3> + + "firstname": { <4> + "type": "string", + "enum": [ "luke", "han" ] + }, + "address": { <5> + "type": "object", + "properties": { + "postCode": { "type": "string", "minLength": 4, "maxLength": 5 } + } + } + } +} +---- +<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain +embedded schema objects that describe properties and subdocuments. +<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other +schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords]. +<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints. +<4> `firstname` specifies constraints for the `firstname` field inside the document. Here, it is a string-based `properties` element declaring + possible field values. +<5> `address` is a subdocument defining a schema for values in its `postCode` field. +==== + +You can provide a schema either by specifying a schema document (that is, by using the `Document` API to parse or build a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. The following example shows how use `MongoJsonSchema.builder()` to create a JSON schema: + +.Creating a JSON schema +==== +[source,java] +---- +MongoJsonSchema.builder() <1> + .required("lastname") <2> + + .properties( + required(string("firstname").possibleValues("luke", "han")), <3> + + object("address") + .properties(string("postCode").minLength(4).maxLength(5))) + + .build(); <4> +---- +<1> Obtain a schema builder to configure the schema with a fluent API. +<2> Configure required properties either directly as shown here or with more details as in 3. +<3> Configure the required String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`. +<4> Build the schema object. +==== + +There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available +through static methods on the gateway interfaces. +However, you may need to build custom property validation rules, which can be created through the builder API, as the following example shows: + +[source,java] +---- +// "birthdate" : { "bsonType": "date" } +JsonSchemaProperty.named("birthdate").ofType(Type.dateType()); + +// "birthdate" : { "bsonType": "date", "description", "Must be a date" } +JsonSchemaProperty.named("birthdate").with(JsonSchemaObject.of(Type.dateType()).description("Must be a date")); +---- + +`CollectionOptions` provides the entry point to schema support for collections, as the following example shows: + +.Create collection with `$jsonSchema` +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); + +template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); +---- +==== + +[[mongo.jsonSchema.generated]] +== Generating a Schema + +Setting up a schema can be a time consuming task and we encourage everyone who decides to do so, to really take the time it takes. +It's important, schema changes can be hard. +However, there might be times when one does not want to balked with it, and that is where `JsonSchemaCreator` comes into play. + +`JsonSchemaCreator` and its default implementation generates a `MongoJsonSchema` out of domain types metadata provided by the mapping infrastructure. +This means, that xref:mongodb/mapping/mapping.adoc#mapping-usage-annotations[annotated properties] as well as potential xref:mongodb/mapping/mapping.adoc#mapping-configuration[custom conversions] are considered. + +.Generate Json Schema from domain type +==== +[source,java] +---- +public class Person { + + private final String firstname; <1> + private final int age; <2> + private Species species; <3> + private Address address; <4> + private @Field(fieldType=SCRIPT) String theForce; <5> + private @Transient Boolean useTheForce; <6> + + public Person(String firstname, int age) { <1> <2> + + this.firstname = firstname; + this.age = age; + } + + // gettter / setter omitted +} + +MongoJsonSchema schema = MongoJsonSchemaCreator.create(mongoOperations.getConverter()) + .createSchemaFor(Person.class); + +template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); +---- + +[source,json] +---- +{ + 'type' : 'object', + 'required' : ['age'], <2> + 'properties' : { + 'firstname' : { 'type' : 'string' }, <1> + 'age' : { 'bsonType' : 'int' } <2> + 'species' : { <3> + 'type' : 'string', + 'enum' : ['HUMAN', 'WOOKIE', 'UNKNOWN'] + } + 'address' : { <4> + 'type' : 'object' + 'properties' : { + 'postCode' : { 'type': 'string' } + } + }, + 'theForce' : { 'type' : 'javascript'} <5> + } +} +---- +<1> Simple object properties are consideres regular properties. +<2> Primitive types are considered required properties +<3> Enums are restricted to possible values. +<4> Object type properties are inspected and represented as nested documents. +<5> `String` type property that is converted to `Code` by the converter. +<6> `@Transient` properties are omitted when generating the schema. +==== + +NOTE: `_id` properties using types that can be converted into `ObjectId` like `String` are mapped to `{ type : 'object' }` +unless there is more specific information available via the `@MongoId` annotation. + +[cols="2,2,6", options="header"] +.Sepcial Schema Generation rules +|=== +| Java +| Schema Type +| Notes + +| `Object` +| `type : object` +| with `properties` if metadata available. + +| `Collection` +| `type : array` +| - + +| `Map` +| `type : object` +| - + +| `Enum` +| `type : string` +| with `enum` property holding the possible enumeration values. + +| `array` +| `type : array` +| simple type array unless it's a `byte[]` + +| `byte[]` +| `bsonType : binData` +| - + +|=== + +The above example demonstrated how to derive the schema from a very precise typed source. +Using polymorphic elements within the domain model can lead to inaccurate schema representation for `Object` and generic `` types, which are likely to represented as `{ type : 'object' }` without further specification. +`MongoJsonSchemaCreator.property(…)` allows defining additional details such as nested document types that should be considered when rendering the schema. + +.Specify additional types for properties +==== +[source,java] +---- +class Root { + Object value; +} + +class A { + String aValue; +} + +class B { + String bValue; +} +MongoJsonSchemaCreator.create() + .property("value").withTypes(A.class, B.class) <1> +---- + +[source,json] +---- +{ + 'type' : 'object', + 'properties' : { + 'value' : { + 'type' : 'object', + 'properties' : { <1> + 'aValue' : { 'type' : 'string' }, + 'bValue' : { 'type' : 'string' } + } + } + } +} +---- +<1> Properties of the given types are merged into one element. +==== + +MongoDBs schema-free approach allows storing documents of different structure in one collection. +Those may be modeled having a common base class. +Regardless of the chosen approach, `MongoJsonSchemaCreator.merge(…)` can help circumvent the need of merging multiple schema into one. + +.Merging multiple Schemas into a single Schema definition +==== +[source,java] +---- +abstract class Root { + String rootValue; +} + +class A extends Root { + String aValue; +} + +class B extends Root { + String bValue; +} + +MongoJsonSchemaCreator.mergedSchemaFor(A.class, B.class) <1> +---- + +[source,json] +---- +{ + 'type' : 'object', + 'properties' : { <1> + 'rootValue' : { 'type' : 'string' }, + 'aValue' : { 'type' : 'string' }, + 'bValue' : { 'type' : 'string' } + } + } +} +---- +<1> Properties (and their inherited ones) of the given types are combined into one schema. +==== + +[NOTE] +==== +Properties with the same name need to refer to the same JSON schema in order to be combined. +The following example shows a definition that cannot be merged automatically because of a data type mismatch. +In this case a `ConflictResolutionFunction` must be provided to `MongoJsonSchemaCreator`. + +[source,java] +---- +class A extends Root { + String value; +} + +class B extends Root { + Integer value; +} +---- +==== + +[[mongo.jsonSchema.encrypted-fields]] +== Encrypted Fields + +MongoDB 4.2 https://docs.mongodb.com/master/core/security-client-side-encryption/[Field Level Encryption] allows to directly encrypt individual properties. + +Properties can be wrapped within an encrypted property when setting up the JSON Schema as shown in the example below. + +.Client-Side Field Level Encryption via Json Schema +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder() + .properties( + encrypted(string("ssn")) + .algorithm("AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + .keyId("*key0_id") + ).build(); +---- +==== + +Instead of defining encrypted fields manually it is possible leverage the `@Encrypted` annotation as shown in the snippet below. + +.Client-Side Field Level Encryption via Json Schema +==== +[source,java] +---- +@Document +@Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") <1> +static class Patient { + + @Id String id; + String name; + + @Encrypted <2> + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") <3> + Integer ssn; +} +---- +<1> Default encryption settings that will be set for `encryptMetadata`. +<2> Encrypted field using default encryption settings. +<3> Encrypted field overriding the default encryption algorithm. +==== + +[TIP] +==== +The `@Encrypted` Annotation supports resolving keyIds via SpEL Expressions. +To do so additional environment metadata (via the `MappingContext`) is required and must be provided. + +[source,java] +---- +@Document +@Encrypted(keyId = "#{mongocrypt.keyId(#target)}") +static class Patient { + + @Id String id; + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + Integer ssn; +} + +MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); +MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); +---- + +The `mongocrypt.keyId` function is defined via an `EvaluationContextExtension` as shown in the snippet below. +Providing a custom extension provides the most flexible way of computing keyIds. + +[source,java] +---- +public class EncryptionExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + @Override + public Map getFunctions() { + return Collections.singletonMap("keyId", new Function(getMethod("computeKeyId", String.class), this)); + } + + public String computeKeyId(String target) { + // ... lookup via target element name + } +} +---- +==== + +[[mongo.jsonSchema.types]] +== JSON Schema Types + +The following table shows the supported JSON schema types: + +[cols="3,1,6", options="header"] +.Supported JSON schema types +|=== +| Schema Type +| Java Type +| Schema Properties + +| `untyped` +| - +| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not` + +| `object` +| `Object` +| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties` + +| `array` +| any array except `byte[]` +| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems` + +| `string` +| `String` +| `minLength`, `maxLentgth`, `pattern` + +| `int` +| `int`, `Integer` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `long` +| `long`, `Long` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `double` +| `float`, `Float`, `double`, `Double` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `decimal` +| `BigDecimal` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `number` +| `Number` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `binData` +| `byte[]` +| (none) + +| `boolean` +| `boolean`, `Boolean` +| (none) + +| `null` +| `null` +| (none) + +| `objectId` +| `ObjectId` +| (none) + +| `date` +| `java.util.Date` +| (none) + +| `timestamp` +| `BsonTimestamp` +| (none) + +| `regex` +| `java.util.regex.Pattern` +| (none) + +|=== + +NOTE: `untyped` is a generic type that is inherited by all typed schema types. It provides all `untyped` schema properties to typed schema types. + +For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema]. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping.adoc new file mode 100644 index 0000000000..d76266c36a --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/mapping.adoc @@ -0,0 +1,717 @@ +[[mapping-chapter]] += Object Mapping + +Rich mapping support is provided by the `MappingMongoConverter`. +The converter holds a metadata model that provides a full feature set to map domain objects to MongoDB documents. +The mapping metadata model is populated by using annotations on your domain objects. +However, the infrastructure is not limited to using annotations as the only source of metadata information. +The `MappingMongoConverter` also lets you map objects to documents without providing any additional metadata, by following a set of conventions. + +This section describes the features of the `MappingMongoConverter`, including fundamentals, how to use conventions for mapping objects to documents and how to override those conventions with annotation-based mapping metadata. + +include::{commons}@data-commons::page$object-mapping.adoc[leveloffset=+1] + +[[mapping-conventions]] +== Convention-based Mapping + +`MappingMongoConverter` has a few conventions for mapping objects to documents when no additional mapping metadata is provided. +The conventions are: + +* The short Java class name is mapped to the collection name in the following manner. +The class `com.bigbank.SavingsAccount` maps to the `savingsAccount` collection name. +* All nested objects are stored as nested objects in the document and *not* as DBRefs. +* The converter uses any Spring Converters registered with it to override the default mapping of object properties to document fields and values. +* The fields of an object are used to convert to and from fields in the document. +Public `JavaBean` properties are not used. +* If you have a single non-zero-argument constructor whose constructor argument names match top-level field names of document, that constructor is used.Otherwise, the zero-argument constructor is used.If there is more than one non-zero-argument constructor, an exception will be thrown. + +[[mapping.conventions.id-field]] +=== How the `_id` field is handled in the mapping layer. + +MongoDB requires that you have an `_id` field for all documents.If you don't provide one the driver will assign a ObjectId with a generated value.The `_id` field can be of any type, other than arrays, so long as it is unique.The driver naturally supports all primitive types and Dates.When using the `MappingMongoConverter` there are certain rules that govern how properties from the Java class are mapped to the `_id` field. + +The following outlines what field will be mapped to the `_id` document field: + +* A field annotated with `@Id` (`org.springframework.data.annotation.Id`) will be mapped to the `_id` field. + +Additionally, the name of the document field can be customized via the `@Field` annotation, in which case the document will not contain a field `_id`. +* A field without an annotation but named `id` will be mapped to the `_id` field. + +[cols="1,2",options="header"] +.Examples for the translation of `_id` field definitions +|=== +| Field definition +| Resulting Id-Fieldname in MongoDB + +| `String` id +| `_id` + +| `@Field` `String` id +| `_id` + +| `@Field("x")` `String` id +| `x` + +| `@Id` `String` x +| `_id` + +| `@Field("x")` `@Id` `String` y +| `_id` (`@Field(name)` is ignored, `@Id` takes precedence) +|=== + +The following outlines what type conversion, if any, will be done on the property mapped to the _id document field. + +* If a field named `id` is declared as a String or BigInteger in the Java class it will be converted to and stored as an ObjectId if possible. +ObjectId as a field type is also valid. +If you specify a value for `id` in your application, the conversion to an ObjectId is done by the MongoDB driver. +If the specified `id` value cannot be converted to an ObjectId, then the value will be stored as is in the document's `_id` field. +This also applies if the field is annotated with `@Id`. +* If a field is annotated with `@MongoId` in the Java class it will be converted to and stored as using its actual type. +No further conversion happens unless `@MongoId` declares a desired field type. +If no value is provided for the `id` field, a new `ObjectId` will be created and converted to the properties type. +* If a field is annotated with `@MongoId(FieldType.…)` in the Java class it will be attempted to convert the value to the declared `FieldType`. +If no value is provided for the `id` field, a new `ObjectId` will be created and converted to the declared type. +* If a field named `id` is not declared as a String, BigInteger, or ObjectID in the Java class then you should assign it a value in your application so it can be stored 'as-is' in the document's `_id` field. +* If no field named `id` is present in the Java class then an implicit `_id` file will be generated by the driver but not mapped to a property or field of the Java class. + +When querying and updating `MongoTemplate` will use the converter to handle conversions of the `Query` and `Update` objects that correspond to the above rules for saving documents so field names and types used in your queries will be able to match what is in your domain classes. + +[[mapping-conversion]] +== Data Mapping and Type Conversion + +Spring Data MongoDB supports all types that can be represented as BSON, MongoDB's internal document format. +In addition to these types, Spring Data MongoDB provides a set of built-in converters to map additional types. +You can provide your own converters to adjust type conversion. +See xref:mongodb/mapping/custom-conversions.adoc[Custom Conversions - Overriding Default Mapping] for further details. + +.Built in Type conversions: +[%collapsible] +==== +[cols="3,1,6",options="header"] +.Type +|=== +| Type +| Type conversion +| Sample + +| `String` +| native +| `{"firstname" : "Dave"}` + +| `double`, `Double`, `float`, `Float` +| native +| `{"weight" : 42.5}` + +| `int`, `Integer`, `short`, `Short` +| native + +32-bit integer +| `{"height" : 42}` + +| `long`, `Long` +| native + +64-bit integer +| `{"height" : 42}` + +| `Date`, `Timestamp` +| native +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `byte[]` +| native +| `{"bin" : { "$binary" : "AQIDBA==", "$type" : "00" }}` + +| `java.util.UUID` (Legacy UUID) +| native +| `{"uuid" : { "$binary" : "MEaf1CFQ6lSphaa3b9AtlA==", "$type" : "03" }}` + +| `Date` +| native +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `ObjectId` +| native +| `{"_id" : ObjectId("5707a2690364aba3136ab870")}` + +| Array, `List`, `BasicDBList` +| native +| `{"cookies" : [ … ]}` + +| `boolean`, `Boolean` +| native +| `{"active" : true}` + +| `null` +| native +| `{"value" : null}` + +| `Document` +| native +| `{"value" : { … }}` + +| `Decimal128` +| native +| `{"value" : NumberDecimal(…)}` + +| `AtomicInteger` + +calling `get()` before the actual conversion +| converter + +32-bit integer +| `{"value" : "741" }` + +| `AtomicLong` + +calling `get()` before the actual conversion +| converter + +64-bit integer +| `{"value" : "741" }` + +| `BigInteger` +| converter + +`NumberDecimal`, `String` +| `{"value" : NumberDecimal(741) }`, `{"value" : "741" }` + +| `BigDecimal` +| converter + +`NumberDecimal`, `String` +| `{"value" : NumberDecimal(741.99) }`, `{"value" : "741.99" }` + +| `URL` +| converter +| `{"website" : "https://spring.io/projects/spring-data-mongodb/" }` + +| `Locale` +| converter +| `{"locale : "en_US" }` + +| `char`, `Character` +| converter +| `{"char" : "a" }` + +| `NamedMongoScript` +| converter + +`Code` +| `{"_id" : "script name", value: (some javascript code)`} + +| `java.util.Currency` +| converter +| `{"currencyCode" : "EUR"}` + +| `Instant` + +(Java 8) +| native +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `Instant` + +(Joda, JSR310-BackPort) +| converter +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `LocalDate` + +(Joda, Java 8, JSR310-BackPort) +| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via xref:mongodb/mapping/mapping.adoc#mapping-configuration[MongoConverterConfigurationAdapter]] +| `{"date" : ISODate("2019-11-12T00:00:00.000Z")}` + +| `LocalDateTime`, `LocalTime` + +(Joda, Java 8, JSR310-BackPort) +| converter / native (Java8)footnote:[Uses UTC zone offset. Configure via xref:mongodb/mapping/mapping.adoc#mapping-configuration[MongoConverterConfigurationAdapter]] +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `DateTime` (Joda) +| converter +| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` + +| `ZoneId` (Java 8, JSR310-BackPort) +| converter +| `{"zoneId" : "ECT - Europe/Paris"}` + +| `Box` +| converter +| `{"box" : { "first" : { "x" : 1.0 , "y" : 2.0} , "second" : { "x" : 3.0 , "y" : 4.0}}` + +| `Polygon` +| converter +| `{"polygon" : { "points" : [ { "x" : 1.0 , "y" : 2.0} , { "x" : 3.0 , "y" : 4.0} , { "x" : 4.0 , "y" : 5.0}]}}` + +| `Circle` +| converter +| `{"circle" : { "center" : { "x" : 1.0 , "y" : 2.0} , "radius" : 3.0 , "metric" : "NEUTRAL"}}` + +| `Point` +| converter +| `{"point" : { "x" : 1.0 , "y" : 2.0}}` + +| `GeoJsonPoint` +| converter +| `{"point" : { "type" : "Point" , "coordinates" : [3.0 , 4.0] }}` + +| `GeoJsonMultiPoint` +| converter +| `{"geoJsonLineString" : {"type":"MultiPoint", "coordinates": [ [ 0 , 0 ], [ 0 , 1 ], [ 1 , 1 ] ] }}` + +| `Sphere` +| converter +| `{"sphere" : { "center" : { "x" : 1.0 , "y" : 2.0} , "radius" : 3.0 , "metric" : "NEUTRAL"}}` + +| `GeoJsonPolygon` +| converter +| `{"polygon" : { "type" : "Polygon", "coordinates" : [[ [ 0 , 0 ], [ 3 , 6 ], [ 6 , 1 ], [ 0 , 0 ] ]] }}` + +| `GeoJsonMultiPolygon` +| converter +| `{"geoJsonMultiPolygon" : { "type" : "MultiPolygon", "coordinates" : [ +[ [ [ -73.958 , 40.8003 ] , [ -73.9498 , 40.7968 ] ] ], +[ [ [ -73.973 , 40.7648 ] , [ -73.9588 , 40.8003 ] ] ] +] }}` + +| `GeoJsonLineString` +| converter +| `{ "geoJsonLineString" : { "type" : "LineString", "coordinates" : [ [ 40 , 5 ], [ 41 , 6 ] ] }}` + +| `GeoJsonMultiLineString` +| converter +| `{"geoJsonLineString" : { "type" : "MultiLineString", coordinates: [ +[ [ -73.97162 , 40.78205 ], [ -73.96374 , 40.77715 ] ], +[ [ -73.97880 , 40.77247 ], [ -73.97036 , 40.76811 ] ] +] }}` +|=== +==== + +.Collection Handling +[NOTE] +==== +Collection handling depends on the actual values returned by MongoDB. + +* If a document does **not** contain a field mapped to a collection, the mapping will not update the property. +Which means the value will remain `null`, a java default or any value set during object creation. +* If a document contains a field to be mapped, but the field holds a `null` value (like: `{ 'list' : null }`), the property value is set to `null`. +* If a document contains a field to be mapped to a collection which is **not** `null` (like: `{ 'list' : [ ... ] }`), the collection is populated with the mapped values. + +Generally, if you use constructor creation, then you can get hold of the value to be set. +Property population can make use of default initialization values if a property value is not being provided by a query response. +==== + +[[mapping-configuration]] +== Mapping Configuration + +Unless explicitly configured, an instance of `MappingMongoConverter` is created by default when you create a `MongoTemplate`. +You can create your own instance of the `MappingMongoConverter`. +Doing so lets you dictate where in the classpath your domain classes can be found, so that Spring Data MongoDB can extract metadata and construct indexes. +Also, by creating your own instance, you can register Spring converters to map specific classes to and from the database. + +You can configure the `MappingMongoConverter` as well as `com.mongodb.client.MongoClient` and MongoTemplate by using either Java-based or XML-based metadata. +The following example shows the configuration: + +[tabs] +====== +Java:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +public class MongoConfig extends AbstractMongoClientConfiguration { + + @Override + public String getDatabaseName() { + return "database"; + } + + // the following are optional + + @Override + public String getMappingBasePackage() { <1> + return "com.bigbank.domain"; + } + + @Override + void configureConverters(MongoConverterConfigurationAdapter adapter) { <2> + + adapter.registerConverter(new org.springframework.data.mongodb.test.PersonReadConverter()); + adapter.registerConverter(new org.springframework.data.mongodb.test.PersonWriteConverter()); + } + + @Bean + public LoggingEventListener mappingEventsListener() { + return new LoggingEventListener(); + } +} +---- + +<1> The mapping base package defines the root path used to scan for entities used to pre initialize the `MappingContext`. +By default the configuration classes package is used. +<2> Configure additional custom converters for specific domain types that replace the default mapping procedure for those types with your custom implementation. +==== + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +---- +====== + +`AbstractMongoClientConfiguration` requires you to implement methods that define a `com.mongodb.client.MongoClient` as well as provide a database name. +`AbstractMongoClientConfiguration` also has a method named `getMappingBasePackage(…)` that you can override to tell the converter where to scan for classes annotated with the `@Document` annotation. + +You can add additional converters to the converter by overriding the `customConversionsConfiguration` method. +MongoDB's native JSR-310 support can be enabled through `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()`. +Also shown in the preceding example is a `LoggingEventListener`, which logs `MongoMappingEvent` instances that are posted onto Spring's `ApplicationContextEvent` infrastructure. + +[TIP] +==== +.Java Time Types +We recommend using MongoDB's native JSR-310 support via `MongoConverterConfigurationAdapter.useNativeDriverJavaTimeCodecs()` as described above as it is using an `UTC` based approach. +The default JSR-310 support for `java.time` types inherited from Spring Data Commons uses the local machine timezone as reference and should only be used for backwards compatibility. +==== + +NOTE: `AbstractMongoClientConfiguration` creates a `MongoTemplate` instance and registers it with the container under the name `mongoTemplate`. + +The `base-package` property tells it where to scan for classes annotated with the `@org.springframework.data.mongodb.core.mapping.Document` annotation. + +[TIP] +==== +If you want to rely on https://spring.io/projects/spring-boot[Spring Boot] to bootstrap Data MongoDB, but still want to override certain aspects of the configuration, you may want to expose beans of that type. +For custom conversions you may eg. choose to register a bean of type `MongoCustomConversions` that will be picked up the by the Boot infrastructure. +To learn more about this please make sure to read the Spring Boot https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#data.nosql.mongodb[Reference Documentation]. +==== + +[[mapping-usage]] +== Metadata-based Mapping + +To take full advantage of the object mapping functionality inside the Spring Data MongoDB support, you should annotate your mapped objects with the `@Document` annotation. +Although it is not necessary for the mapping framework to have this annotation (your POJOs are mapped correctly, even without any annotations), it lets the classpath scanner find and pre-process your domain objects to extract the necessary metadata. +If you do not use this annotation, your application takes a slight performance hit the first time you store a domain object, because the mapping framework needs to build up its internal metadata model so that it knows about the properties of your domain object and how to persist them. +The following example shows a domain object: + +.Example domain object +==== +[source,java] +---- +package com.mycompany.domain; + +@Document +public class Person { + + @Id + private ObjectId id; + + @Indexed + private Integer ssn; + + private String firstName; + + @Indexed + private String lastName; +} +---- +==== + +IMPORTANT: The `@Id` annotation tells the mapper which property you want to use for the MongoDB `_id` property, and the `@Indexed` annotation tells the mapping framework to call `createIndex(…)` on that property of your document, making searches faster. +Automatic index creation is only done for types annotated with `@Document`. + +WARNING: Auto index creation is **disabled** by default and needs to be enabled through the configuration (see xref:mongodb/mapping/mapping.adoc#mapping.index-creation[Index Creation]). + +[[mapping-usage-annotations]] +=== Mapping Annotation Overview + +The MappingMongoConverter can use metadata to drive the mapping of objects to documents. +The following annotations are available: + +* `@Id`: Applied at the field level to mark the field used for identity purpose. +* `@MongoId`: Applied at the field level to mark the field used for identity purpose. +Accepts an optional `FieldType` to customize id conversion. +* `@Document`: Applied at the class level to indicate this class is a candidate for mapping to the database. +You can specify the name of the collection where the data will be stored. +* `@DBRef`: Applied at the field to indicate it is to be stored using a com.mongodb.DBRef. +* `@DocumentReference`: Applied at the field to indicate it is to be stored as a pointer to another document. +This can be a single value (the _id_ by default), or a `Document` provided via a converter. +* `@Indexed`: Applied at the field level to describe how to index the field. +* `@CompoundIndex` (repeatable): Applied at the type level to declare Compound Indexes. +* `@GeoSpatialIndexed`: Applied at the field level to describe how to geoindex the field. +* `@TextIndexed`: Applied at the field level to mark the field to be included in the text index. +* `@HashIndexed`: Applied at the field level for usage within a hashed index to partition data across a sharded cluster. +* `@Language`: Applied at the field level to set the language override property for text index. +* `@Transient`: By default, all fields are mapped to the document. +This annotation excludes the field where it is applied from being stored in the database. +Transient properties cannot be used within a persistence constructor as the converter cannot materialize a value for the constructor argument. +* `@PersistenceConstructor`: Marks a given constructor - even a package protected one - to use when instantiating the object from the database. +Constructor arguments are mapped by name to the key values in the retrieved Document. +* `@Value`: This annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. +This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. +In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document. +* `@Field`: Applied at the field level it allows to describe the name and type of the field as it will be represented in the MongoDB BSON document thus allowing the name and type to be different than the fieldname of the class as well as the property type. +* `@Version`: Applied at field level is used for optimistic locking and checked for modification on save operations. +The initial value is `zero` (`one` for primitive types) which is bumped automatically on every update. + +The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic. +Specific subclasses are using in the MongoDB support to support annotation based metadata. +Other strategies are also possible to put in place if there is demand. + +.Here is an example of a more complex mapping +[%collapsible] +==== +[source,java] +---- +@Document +@CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") +public class Person { + + @Id + private String id; + + @Indexed(unique = true) + private Integer ssn; + + @Field("fName") + private String firstName; + + @Indexed + private String lastName; + + private Integer age; + + @Transient + private Integer accountTotal; + + @DBRef + private List accounts; + + private T address; + + public Person(Integer ssn) { + this.ssn = ssn; + } + + @PersistenceConstructor + public Person(Integer ssn, String firstName, String lastName, Integer age, T address) { + this.ssn = ssn; + this.firstName = firstName; + this.lastName = lastName; + this.age = age; + this.address = address; + } + + public String getId() { + return id; + } + + // no setter for Id. (getter is only exposed for some unit testing) + + public Integer getSsn() { + return ssn; + } + +// other getters/setters omitted +} +---- +==== + +[TIP] +==== +`@Field(targetType=...)` can come in handy when the native MongoDB type inferred by the mapping infrastructure does not match the expected one. +Like for `BigDecimal`, which is represented as `String` instead of `Decimal128`, just because earlier versions of MongoDB Server did not have support for it. + +[source,java] +---- +public class Balance { + + @Field(targetType = DECIMAL128) + private BigDecimal value; + + // ... +} +---- + +You may even consider your own, custom annotation. + +[source,java] +---- +@Target(ElementType.FIELD) +@Retention(RetentionPolicy.RUNTIME) +@Field(targetType = FieldType.DECIMAL128) +public @interface Decimal128 { } + +// ... + +public class Balance { + + @Decimal128 + private BigDecimal value; + + // ... +} +---- +==== + +=== Special Field Names + +Generally speaking MongoDB uses the dot (`.`) character as a path separator for nested documents or arrays. +This means that in a query (or update statement) a key like `a.b.c` targets an object structure as outlined below: + +[source,json] +---- +{ + 'a' : { + 'b' : { + 'c' : … + } + } +} +---- + +Therefore, up until MongoDB 5.0 field names must not contain dots (`.`). + +Using a `MappingMongoConverter#setMapKeyDotReplacement` allowed circumvent some of the limitations when storing `Map` structures by substituting dots on write with another character. + +[source,java] +---- +converter.setMapKeyDotReplacement("-"); +// ... + +source.map = Map.of("key.with.dot", "value") +converter.write(source,...) // -> map : { 'key-with-dot', 'value' } +---- + +With the release of MongoDB 5.0 this restriction on `Document` field names containing special characters was lifted. +We highly recommend reading more about limitations on using dots in field names in the https://www.mongodb.com/docs/manual/core/dot-dollar-considerations/[MongoDB Reference]. + +To allow dots in `Map` structures please set `preserveMapKeys` on the `MappingMongoConverter`. + +Using `@Field` allows customizing the field name to consider dots in two ways. + +. `@Field(name = "a.b")`: The name is considered to be a path. +Operations expect a structure of nested objects such as `{ a : { b : … } }`. +. `@Field(name = "a.b", fieldNameType = KEY)`: The names is considered a name as-is. +Operations expect a field with the given value as `{ 'a.b' : ….. }` + +[WARNING] +==== +Due to the special nature of the dot character in both MongoDB query and update statements field names containing dots cannot be targeted directly and therefore are excluded from being used in derived query methods. +Consider the following `Item` having a `categoryId` property that is mapped to the field named `cat.id`. + +[source,java] +---- +public class Item { + + @Field(name = "cat.id", fieldNameType = KEY) + String categoryId; + + // ... +} +---- + +Its raw representation will look like + +[source,json] +---- +{ + 'cat.id' : "5b28b5e7-52c2", + ... +} +---- + +Since we cannot target the `cat.id` field directly (as this would be interpreted as a path) we need the help of the xref:mongodb/aggregation-framework.adoc#mongo.aggregation[Aggregation Framework]. + +.Query fields with a dot in its name +[source,java] +---- +template.query(Item.class) + // $expr : { $eq : [ { $getField : { input : '$$CURRENT', 'cat.id' }, '5b28b5e7-52c2' ] } + .matching(expr(ComparisonOperators.valueOf(ObjectOperators.getValueOf("value")).equalToValue("5b28b5e7-52c2"))) <1> + .all(); +---- + +<1> The mapping layer takes care of translating the property name `value` into the actual field name. +It is absolutely valid to use the target field name here as well. + +.Update fields with a dot in its name +[source,java] +---- +template.update(Item.class) + .matching(where("id").is("r2d2")) + // $replaceWith: { $setField : { input: '$$CURRENT', field : 'cat.id', value : 'af29-f87f4e933f97' } } + .apply(AggregationUpdate.newUpdate(ReplaceWithOperation.replaceWithValue(ObjectOperators.setValueTo("value", "af29-f87f4e933f97")))) <1> + .first(); +---- + +<1> The mapping layer takes care of translating the property name `value` into the actual field name. +It is absolutely valid to use the target field name here as well. + +The above shows a simple example where the special field is present on the top document level. +Increased levels of nesting increase the complexity of the aggregation expression required to interact with the field. +==== + +[[mapping-custom-object-construction]] +=== Customized Object Construction + +The mapping subsystem allows the customization of the object construction by annotating a constructor with the `@PersistenceConstructor` annotation. +The values to be used for the constructor parameters are resolved in the following way: + +* If a parameter is annotated with the `@Value` annotation, the given expression is evaluated and the result is used as the parameter value. +* If the Java type has a property whose name matches the given field of the input document, then it's property information is used to select the appropriate constructor parameter to pass the input field value to. +This works only if the parameter name information is present in the java `.class` files which can be achieved by compiling the source with debug information or using the new `-parameters` command-line switch for javac in Java 8. +* Otherwise, a `MappingException` will be thrown indicating that the given constructor parameter could not be bound. + +[source,java] +---- +class OrderItem { + + private @Id String id; + private int quantity; + private double unitPrice; + + OrderItem(String id, @Value("#root.qty ?: 0") int quantity, double unitPrice) { + this.id = id; + this.quantity = quantity; + this.unitPrice = unitPrice; + } + + // getters/setters ommitted +} + +Document input = new Document("id", "4711"); +input.put("unitPrice", 2.5); +input.put("qty",5); +OrderItem item = converter.read(OrderItem.class, input); +---- + +NOTE: The SpEL expression in the `@Value` annotation of the `quantity` parameter falls back to the value `0` if the given property path cannot be resolved. + +Additional examples for using the `@PersistenceConstructor` annotation can be found in the https://github.com/spring-projects/spring-data-mongodb/blob/master/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java[MappingMongoConverterUnitTests] test suite. + +[[mapping-usage-events]] +=== Mapping Framework Events + +Events are fired throughout the lifecycle of the mapping process. +This is described in the xref:mongodb/lifecycle-events.adoc[Lifecycle Events] section. + +Declaring these beans in your Spring ApplicationContext causes them to be invoked whenever the event is dispatched. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/property-converters.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/property-converters.adoc new file mode 100644 index 0000000000..fed1f4c33e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/property-converters.adoc @@ -0,0 +1,106 @@ +[[mongo.property-converters]] += Property Converters + +While xref:mongodb/mapping/custom-conversions.adoc[type-based conversion] already offers ways to influence the conversion and representation of certain types within the target store, it has limitations when only certain values or properties of a particular type should be considered for conversion. +Property-based converters allow configuring conversion rules on a per-property basis, either declaratively (via `@ValueConverter`) or programmatically (by registering a `PropertyValueConverter` for a specific property). + +A `PropertyValueConverter` can transform a given value into its store representation (write) and back (read) as the following listing shows. +The additional `ValueConversionContext` provides additional information, such as mapping metadata and direct `read` and `write` methods. + +.A simple PropertyValueConverter +==== +[source,java] +---- +class ReversingValueConverter implements PropertyValueConverter { + + @Override + public String read(String value, ValueConversionContext context) { + return reverse(value); + } + + @Override + public String write(String value, ValueConversionContext context) { + return reverse(value); + } +} +---- +==== + +You can obtain `PropertyValueConverter` instances from `CustomConversions#getPropertyValueConverter(…)` by delegating to `PropertyValueConversions`, typically by using a `PropertyValueConverterFactory` to provide the actual converter. +Depending on your application's needs, you can chain or decorate multiple instances of `PropertyValueConverterFactory` -- for example, to apply caching. +By default, Spring Data MongoDB uses a caching implementation that can serve types with a default constructor or enum values. +A set of predefined factories is available through the factory methods in `PropertyValueConverterFactory`. +You can use `PropertyValueConverterFactory.beanFactoryAware(…)` to obtain a `PropertyValueConverter` instance from an `ApplicationContext`. + +You can change the default behavior through `ConverterConfiguration`. + +[[mongo.property-converters.declarative]] +== Declarative Value Converter + +The most straight forward usage of a `PropertyValueConverter` is by annotating properties with the `@ValueConverter` annotation that defines the converter type: + +.Declarative PropertyValueConverter +==== +[source,java] +---- +class Person { + + @ValueConverter(ReversingValueConverter.class) + String ssn; +} +---- +==== + +[[mongo.property-converters.programmatic]] +== Programmatic Value Converter Registration + +Programmatic registration registers `PropertyValueConverter` instances for properties within an entity model by using a `PropertyValueConverterRegistrar`, as the following example shows. +The difference between declarative registration and programmatic registration is that programmatic registration happens entirely outside of the entity model. +Such an approach is useful if you cannot or do not want to annotate the entity model. + +.Programmatic PropertyValueConverter registration +==== +[source,java] +---- +PropertyValueConverterRegistrar registrar = new PropertyValueConverterRegistrar(); + +registrar.registerConverter(Address.class, "street", new PropertyValueConverter() { … }); <1> + +// type safe registration +registrar.registerConverter(Person.class, Person::getSsn()) <2> + .writing(value -> encrypt(value)) + .reading(value -> decrypt(value)); +---- + +<1> Register a converter for the field identified by its name. +<2> Type safe variant that allows to register a converter and its conversion functions. +This method uses class proxies to determine the property. +Make sure that neither the class nor the accessors are `final` as otherwise this approach doesn't work. +==== + +WARNING: Dot notation (such as `registerConverter(Person.class, "address.street", …)`) for nagivating across properties into subdocuments is *not* supported when registering converters. + +TIP: `MongoValueConverter` offers a pre-typed `PropertyValueConverter` interface that uses `MongoConversionContext`. + +[[mongocustomconversions-configuration]] +== MongoCustomConversions configuration + +By default, `MongoCustomConversions` can handle declarative value converters, depending on the configured `PropertyValueConverterFactory`. +`MongoConverterConfigurationAdapter` helps to set up programmatic value conversions or define the `PropertyValueConverterFactory` to be used. + +.Configuration Sample +==== +[source,java] +---- +MongoCustomConversions.create(configurationAdapter -> { + + SimplePropertyValueConversions valueConversions = new SimplePropertyValueConversions(); + valueConversions.setConverterFactory(…); + valueConversions.setValueConverterRegistry(new PropertyValueConverterRegistrar() + .registerConverter(…) + .buildRegistry()); + + configurationAdapter.setPropertyValueConversions(valueConversions); +}); +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mapping/unwrapping-entities.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mapping/unwrapping-entities.adoc new file mode 100644 index 0000000000..cff702f179 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mapping/unwrapping-entities.adoc @@ -0,0 +1,382 @@ +[[unwrapped-entities]] += Unwrapping Types + +Unwrapped entities are used to design value objects in your Java domain model whose properties are flattened out into the parent's MongoDB Document. + +[[unwrapped-entities.mapping]] +== Unwrapped Types Mapping + +Consider the following domain model where `User.name` is annotated with `@Unwrapped`. +The `@Unwrapped` annotation signals that all properties of `UserName` should be flattened out into the `user` document that owns the `name` property. + +.Sample Code of unwrapping objects +==== +[source,java] +---- +class User { + + @Id + String userId; + + @Unwrapped(onEmpty = USE_NULL) <1> + UserName name; +} + +class UserName { + + String firstname; + + String lastname; + +} +---- + +[source,json] +---- +{ + "_id" : "1da2ba06-3ba7", + "firstname" : "Emma", + "lastname" : "Frost" +} +---- +<1> When loading the `name` property its value is set to `null` if both `firstname` and `lastname` are either `null` or not present. +By using `onEmpty=USE_EMPTY` an empty `UserName`, with potential `null` value for its properties, will be created. +==== + +For less verbose embeddable type declarations use `@Unwrapped.Nullable` and `@Unwrapped.Empty` instead `@Unwrapped(onEmpty = USE_NULL)` and `@Unwrapped(onEmpty = USE_EMPTY)`. +Both annotations are meta-annotated with JSR-305 `@javax.annotation.Nonnull` to aid with nullability inspections. + +[WARNING] +==== +It is possible to use complex types within an unwrapped object. +However, those must not be, nor contain unwrapped fields themselves. +==== + +[[unwrapped-entities.mapping.field-names]] +== Unwrapped Types field names + +A value object can be unwrapped multiple times by using the optional `prefix` attribute of the `@Unwrapped` annotation. +By dosing so the chosen prefix is prepended to each property or `@Field("…")` name in the unwrapped object. +Please note that values will overwrite each other if multiple properties render to the same field name. + +.Sample Code of unwrapped object with name prefix +==== +[source,java] +---- +class User { + + @Id + String userId; + + @Unwrapped.Nullable(prefix = "u_") <1> + UserName name; + + @Unwrapped.Nullable(prefix = "a_") <2> + UserName name; +} + +class UserName { + + String firstname; + + String lastname; +} +---- + +[source,json] +---- +{ + "_id" : "a6a805bd-f95f", + "u_firstname" : "Jean", <1> + "u_lastname" : "Grey", + "a_firstname" : "Something", <2> + "a_lastname" : "Else" +} +---- +<1> All properties of `UserName` are prefixed with `u_`. +<2> All properties of `UserName` are prefixed with `a_`. +==== + +While combining the `@Field` annotation with `@Unwrapped` on the very same property does not make sense and therefore leads to an error. +It is a totally valid approach to use `@Field` on any of the unwrapped types properties. + +.Sample Code unwrapping objects with `@Field` annotation +==== +[source,java] +---- +public class User { + + @Id + private String userId; + + @Unwrapped.Nullable(prefix = "u-") <1> + UserName name; +} + +public class UserName { + + @Field("first-name") <2> + private String firstname; + + @Field("last-name") + private String lastname; +} +---- + +[source,json] +---- +{ + "_id" : "2647f7b9-89da", + "u-first-name" : "Barbara", <2> + "u-last-name" : "Gordon" +} +---- +<1> All properties of `UserName` are prefixed with `u-`. +<2> Final field names are a result of concatenating `@Unwrapped(prefix)` and `@Field(name)`. +==== + +[[unwrapped-entities.queries]] +== Query on Unwrapped Objects + +Defining queries on unwrapped properties is possible on type- as well as field-level as the provided `Criteria` is matched against the domain type. +Prefixes and potential custom field names will be considered when rendering the actual query. +Use the property name of the unwrapped object to match against all contained fields as shown in the sample below. + +.Query on unwrapped object +==== +[source,java] +---- +UserName userName = new UserName("Carol", "Danvers") +Query findByUserName = query(where("name").is(userName)); +User user = template.findOne(findByUserName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "firstname" : "Carol", + "lastname" : "Danvers" +}) +---- +==== + +It is also possible to address any field of the unwrapped object directly using its property name as shown in the snippet below. + +.Query on field of unwrapped object +==== +[source,java] +---- +Query findByUserFirstName = query(where("name.firstname").is("Shuri")); +List users = template.findAll(findByUserFirstName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "firstname" : "Shuri" +}) +---- +==== + +[[unwrapped-entities.queries.sort]] +=== Sort by unwrapped field. + +Fields of unwrapped objects can be used for sorting via their property path as shown in the sample below. + +.Sort on unwrapped field +==== +[source,java] +---- +Query findByUserLastName = query(where("name.lastname").is("Romanoff")); +List user = template.findAll(findByUserName.withSort(Sort.by("name.firstname")), User.class); +---- + +[source,json] +---- +db.collection.find({ + "lastname" : "Romanoff" +}).sort({ "firstname" : 1 }) +---- +==== + +[NOTE] +==== +Though possible, using the unwrapped object itself as sort criteria includes all of its fields in unpredictable order and may result in inaccurate ordering. +==== + +[[unwrapped-entities.queries.project]] +=== Field projection on unwrapped objects + +Fields of unwrapped objects can be subject for projection either as a whole or via single fields as shown in the samples below. + +.Project on unwrapped object. +==== +[source,java] +---- +Query findByUserLastName = query(where("name.firstname").is("Gamora")); +findByUserLastName.fields().include("name"); <1> +List user = template.findAll(findByUserName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "lastname" : "Gamora" +}, +{ + "firstname" : 1, + "lastname" : 1 +}) +---- +<1> A field projection on an unwrapped object includes all of its properties. +==== + +.Project on a field of an unwrapped object. +==== +[source,java] +---- +Query findByUserLastName = query(where("name.lastname").is("Smoak")); +findByUserLastName.fields().include("name.firstname"); <1> +List user = template.findAll(findByUserName, User.class); +---- + +[source,json] +---- +db.collection.find({ + "lastname" : "Smoak" +}, +{ + "firstname" : 1 +}) +---- +<1> A field projection on an unwrapped object includes all of its properties. +==== + +[[unwrapped-entities.queries.by-example]] +=== Query By Example on unwrapped object. + +Unwrapped objects can be used within an `Example` probe just as any other type. +Please review the xref:mongodb/template-query-operations.adoc#mongo.query-by-example[Query By Example] section, to learn more about this feature. + +[[unwrapped-entities.queries.repository]] +=== Repository Queries on unwrapped objects. + +The `Repository` abstraction allows deriving queries on fields of unwrapped objects as well as the entire object. + +.Repository queries on unwrapped objects. +==== +[source,java] +---- +interface UserRepository extends CrudRepository { + + List findByName(UserName username); <1> + + List findByNameFirstname(String firstname); <2> +} +---- +<1> Matches against all fields of the unwrapped object. +<2> Matches against the `firstname`. +==== + +[NOTE] +==== +Index creation for unwrapped objects is suspended even if the repository `create-query-indexes` namespace attribute is set to `true`. +==== + +[[unwrapped-entities.update]] +== Update on Unwrapped Objects + +Unwrapped objects can be updated as any other object that is part of the domain model. +The mapping layer takes care of flattening structures into their surroundings. +It is possible to update single attributes of the unwrapped object as well as the entire value as shown in the examples below. + +.Update a single field of an unwrapped object. +==== +[source,java] +---- +Update update = new Update().set("name.firstname", "Janet"); +template.update(User.class).matching(where("id").is("Wasp")) + .apply(update).first() +---- + +[source,json] +---- +db.collection.update({ + "_id" : "Wasp" +}, +{ + "$set" { "firstname" : "Janet" } +}, +{ ... } +) +---- +==== + +.Update an unwrapped object. +==== +[source,java] +---- +Update update = new Update().set("name", new Name("Janet", "van Dyne")); +template.update(User.class).matching(where("id").is("Wasp")) + .apply(update).first() +---- + +[source,json] +---- +db.collection.update({ + "_id" : "Wasp" +}, +{ + "$set" { + "firstname" : "Janet", + "lastname" : "van Dyne", + } +}, +{ ... } +) +---- +==== + +[[unwrapped-entities.aggregations]] +== Aggregations on Unwrapped Objects + +The xref:mongodb/aggregation-framework.adoc[Aggregation Framework] will attempt to map unwrapped values of typed aggregations. +Please make sure to work with the property path including the wrapper object when referencing one of its values. +Other than that no special action is required. + +[[unwrapped-entities.indexes]] +== Index on Unwrapped Objects + +It is possible to attach the `@Indexed` annotation to properties of an unwrapped type just as it is done with regular objects. +It is not possible to use `@Indexed` along with the `@Unwrapped` annotation on the owning property. + +==== +[source,java] +---- +public class User { + + @Id + private String userId; + + @Unwrapped(onEmpty = USE_NULL) + UserName name; <1> + + // Invalid -> InvalidDataAccessApiUsageException + @Indexed <2> + @Unwrapped(onEmpty = USE_Empty) + Address address; +} + +public class UserName { + + private String firstname; + + @Indexed + private String lastname; <1> +} +---- +<1> Index created for `lastname` in `users` collection. +<2> Invalid `@Indexed` usage along with `@Unwrapped` +==== + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc new file mode 100644 index 0000000000..14e866cf14 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc @@ -0,0 +1,313 @@ +[[mongo.encryption]] += Encryption + +Client Side Encryption is a feature that encrypts data in your application before it is sent to MongoDB. +We recommend you get familiar with the concepts, ideally from the https://www.mongodb.com/docs/manual/core/security-in-use-encryption/[MongoDB Documentation] to learn more about its capabilities and restrictions before you continue applying Encryption through Spring Data. + +[NOTE] +==== +Make sure to set the drivers `com.mongodb.AutoEncryptionSettings` to use client-side encryption. +MongoDB does not support encryption for all field types. +Specific data types require deterministic encryption to preserve equality comparison functionality. +==== + +== Client Side Field Level Encryption (CSFLE) + +Choosing CSFLE gives you full flexibility and allows you to use different keys for a single field, eg. in a one key per tenant scenario. + +Please make sure to consult the https://www.mongodb.com/docs/manual/core/csfle/[MongoDB CSFLE Documentation] before you continue reading. + +[[mongo.encryption.automatic]] +=== Automatic Encryption (CSFLE) + +MongoDB supports https://www.mongodb.com/docs/manual/core/csfle/[Client-Side Field Level Encryption] out of the box using the MongoDB driver with its Automatic Encryption feature. +Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step. + +Please refer to the xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema] section for more information on defining a JSON Schema that holds encryption information. + +To make use of a the `MongoJsonSchema` it needs to be combined with `AutoEncryptionSettings` which can be done eg. via a `MongoClientSettingsBuilderCustomizer`. + +[source,java] +---- +@Bean +MongoClientSettingsBuilderCustomizer customizer(MappingContext mappingContext) { + return (builder) -> { + + // ... keyVaultCollection, kmsProvider, ... + + MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); + MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + + AutoEncryptionSettings autoEncryptionSettings = AutoEncryptionSettings.builder() + .keyVaultNamespace(keyVaultCollection) + .kmsProviders(kmsProviders) + .extraOptions(extraOpts) + .schemaMap(Collections.singletonMap("db.patient", patientSchema.schemaDocument().toBsonDocument())) + .build(); + + builder.autoEncryptionSettings(autoEncryptionSettings); + }; +} +---- + +[[mongo.encryption.explicit]] +=== Explicit Encryption (CSFLE) + +Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks. +The `@ExplicitEncrypted` annotation is a combination of the `@Encrypted` annotation used for xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation] and a xref:mongodb/mapping/property-converters.adoc[Property Converter]. +In other words, `@ExplicitEncrypted` uses existing building blocks to combine them for simplified explicit encryption support. + +[NOTE] +==== +Fields annotated with `@ExplicitEncrypted` are always encrypted as whole. +Consider the following example: + +[source,java] +---- +@ExplicitEncrypted(…) +String simpleValue; <1> + +@ExplicitEncrypted(…) +Address address; <2> + +@ExplicitEncrypted(…) +List<...> list; <3> + +@ExplicitEncrypted(…) +Map<..., ...> mapOfString; <4> +---- + +<1> Encrypts the value of the simple type such as a `String` if not `null`. +<2> Encrypts the entire `Address` object and all its nested fields as `Document`. +To only encrypt parts of the `Address`, like `Address#street` the `street` field within `Address` needs to be annotated with `@ExplicitEncrypted`. +<3> ``Collection``-like fields are encrypted as single value and not per entry. +<4> ``Map``-like fields are encrypted as single value and not as a key/value entry. +==== + +Client-Side Field Level Encryption allows you to choose between a deterministic and a randomized algorithm. Depending on the https://www.mongodb.com/docs/v5.0/reference/security-client-side-automatic-json-schema/#std-label-field-level-encryption-json-schema/[chosen algorithm], https://www.mongodb.com/docs/manual/core/csfle/reference/supported-operations/[different operations] may be supported. +To pick a certain algorithm use `@ExplicitEncrypted(algorithm)`, see `EncryptionAlgorithms` for algorithm constants. +Please read the https://www.mongodb.com/docs/manual/core/csfle/fundamentals/encryption-algorithms[Encryption Types] manual for more information on algorithms and their usage. + +To perform the actual encryption we require a Data Encryption Key (DEK). +Please refer to the https://www.mongodb.com/docs/manual/core/csfle/quick-start/#create-a-data-encryption-key[MongoDB Documentation] for more information on how to set up key management and create a Data Encryption Key. +The DEK can be referenced directly via its `id` or a defined _alternative name_. +The `@EncryptedField` annotation only allows referencing a DEK via an alternative name. +It is possible to provide an `EncryptionKeyResolver`, which will be discussed later, to any DEK. + +.Reference the Data Encryption Key +==== +[source,java] +---- +@EncryptedField(algorithm=…, altKeyName = "secret-key") <1> +String ssn; +---- + +[source,java] +---- +@EncryptedField(algorithm=…, altKeyName = "/name") <2> +String ssn; +---- + +<1> Use the DEK stored with the alternative name `secret-key`. +<2> Uses a field reference that will read the actual field value and use that for key lookup. +Always requires the full document to be present for save operations. +Fields cannot be used in queries/aggregations. +==== + +By default, the `@ExplicitEncrypted(value=…)` attribute references a `MongoEncryptionConverter`. +It is possible to change the default implementation and exchange it with any `PropertyValueConverter` implementation by providing the according type reference. +To learn more about custom `PropertyValueConverters` and the required configuration, please refer to the xref:mongodb/mapping/property-converters.adoc[Property Converters - Mapping specific fields] section. + +[[mongo.encryption.queryable]] +== Queryable Encryption (QE) + +Choosing QE enables you to run different types of queries, like _range_ or _equality_, against encrypted fields. + +Please make sure to consult the https://www.mongodb.com/docs/manual/core/queryable-encryption/[MongoDB QE Documentation] before you continue reading to learn more about QE features and limitations. + +=== Collection Setup + +Queryable Encryption requires upfront declaration of certain aspects allowed within an actual query against an encrypted field. +The information covers the algorithm in use as well as allowed query types along with their attributes and must be provided when creating the collection. + +`MongoOperations#createCollection(...)` can be used to do the initial setup for collections utilizing QE. +The configuration for QE via Spring Data uses the same building blocks (a xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation]) as CSFLE, converting the schema/properties into the configuration format required by MongoDB. + +[tabs] +====== +Manual Collection Setup:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +CollectionOptions collectionOptions = CollectionOptions.encryptedCollection(options -> options + .queryable(encrypted(string("ssn")).algorithm("Indexed"), equality().contention(0)) + .queryable(encrypted(int32("age")).algorithm("Range"), range().contention(8).min(0).max(150)) + .queryable(encrypted(int64("address.sign")).algorithm("Range"), range().contention(2).min(-10L).max(10L)) +); + +mongoTemplate.createCollection(Patient.class, collectionOptions); <1> +---- +<1> Using the template to create the collection may prevent capturing generated keyIds. In this case render the `Document` from the options and use the `createEncryptedCollection(...)` method via the encryption library. +==== + +Derived Collection Setup:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class Patient { + + @Id String id; + + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) + String ssn; + + @RangeEncrypted(contentionFactor = 8, rangeOptions = "{ 'min' : 0, 'max' : 150 }") + Integer age; + + Address address; +} + +MongoJsonSchema patientSchema = MongoJsonSchemaCreator.create(mappingContext) + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + +CollectionOptions collectionOptions = CollectionOptions.encryptedCollection(patientSchema); + +mongoTemplate.createCollection(Patient.class, collectionOptions); <1> +---- +<1> Using the template to create the collection may prevent capturing generated keyIds. In this case render the `Document` from the options and use the `createEncryptedCollection(...)` method via the encryption library. + +The `Queryable` annotation allows to define allowed query types for encrypted fields. +`@RangeEncrypted` is a combination of `@Encrypted` and `@Queryable` for fields allowing `range` queries. +It is possible to create custom annotations out of the provided ones. +==== + +MongoDB Collection Info:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="thrid"] +---- +{ + name: 'patient', + type: 'collection', + options: { + encryptedFields: { + escCollection: 'enxcol_.test.esc', + ecocCollection: 'enxcol_.test.ecoc', + fields: [ + { + keyId: ..., + path: 'ssn', + bsonType: 'string', + queries: [ { queryType: 'equality', contention: Long('0') } ] + }, + { + keyId: ..., + path: 'age', + bsonType: 'int', + queries: [ { queryType: 'range', contention: Long('8'), min: 0, max: 150 } ] + }, + { + keyId: ..., + path: 'address.sign', + bsonType: 'long', + queries: [ { queryType: 'range', contention: Long('2'), min: Long('-10'), max: Long('10') } ] + } + ] + } + } +} +---- +==== +====== + +[NOTE] +==== +- It is not possible to use both QE and CSFLE within the same collection. +- It is not possible to query a `range` indexed field with an `equality` operator. +- It is not possible to query an `equality` indexed field with a `range` operator. +- It is not possible to set `bypassAutoEncrytion(true)`. +- It is not possible to use self maintained encryption keys via `@Encrypted` in combination with Queryable Encryption. +- Contention is only optional on the server side, the clients requires you to set the value (Default us `8`). +- Additional options for eg. `min` and `max` need to match the actual field type. Make sure to use `$numberLong` etc. to ensure target types when parsing bson String. +- Queryable Encryption will an extra field `__safeContent__` to each of your documents. +Unless explicitly excluded the field will be loaded into memory when retrieving results. +==== + +[[mongo.encryption.queryable.automatic]] +=== Automatic Encryption (QE) + +MongoDB supports Queryable Encryption out of the box using the MongoDB driver with its Automatic Encryption feature. +Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step. + +All you need to do is create the collection according to the MongoDB documentation. +You may utilize techniques to create the required configuration outlined in the section above. + +[[mongo.encryption.queryable.manual]] +=== Explicit Encryption (QE) + +Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks based on the meta information provided by annotation within the domain model. + +[NOTE] +==== +There is no official support for using Explicit Queryable Encryption. +The audacious user may combine `@Encrypted` and `@Queryable` with `@ValueConverter(MongoEncryptionConverter.class)` at their own risk. +==== + +[[mongo.encryption.explicit-setup]] +[[mongo.encryption.converter-setup]] +== MongoEncryptionConverter Setup + +The converter setup for `MongoEncryptionConverter` requires a few steps as several components are involved. +The bean setup consists of the following: + +1. The `ClientEncryption` engine +2. A `MongoEncryptionConverter` instance configured with `ClientEncryption` and a `EncryptionKeyResolver`. +3. A `PropertyValueConverterFactory` that uses the registered `MongoEncryptionConverter` bean. + +The `EncryptionKeyResolver` uses an `EncryptionContext` providing access to the property allowing for dynamic DEK resolution. + +.Sample MongoEncryptionConverter Configuration +==== +[source,java] +---- +class Config extends AbstractMongoClientConfiguration { + + @Autowired ApplicationContext appContext; + + @Bean + ClientEncryption clientEncryption() { <1> + ClientEncryptionSettings encryptionSettings = ClientEncryptionSettings.builder(); + // … + + return ClientEncryptions.create(encryptionSettings); + } + + @Bean + MongoEncryptionConverter encryptingConverter(ClientEncryption clientEncryption) { + + Encryption encryption = MongoClientEncryption.just(clientEncryption); + EncryptionKeyResolver keyResolver = EncryptionKeyResolver.annotated((ctx) -> …); <2> + + return new MongoEncryptionConverter(encryption, keyResolver); <3> + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter adapter) { + + adapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(appContext)); <4> + } +} +---- + +<1> Set up a `Encryption` engine using `com.mongodb.client.vault.ClientEncryption`. +The instance is stateful and must be closed after usage. +Spring takes care of this because `ClientEncryption` is ``Closeable``. +<2> Set up an annotation-based `EncryptionKeyResolver` to determine the `EncryptionKey` from annotations. +<3> Create the `MongoEncryptionConverter`. +<4> Enable for a `PropertyValueConverter` lookup from the `BeanFactory`. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc new file mode 100644 index 0000000000..e8265b9837 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-group.adoc @@ -0,0 +1,97 @@ +[[mongo.group]] += Group Operations + +As an alternative to using Map-Reduce to perform data aggregation, you can use the https://www.mongodb.org/display/DOCS/Aggregation#Aggregation-Group[`group` operation] which feels similar to using SQL's group by query style, so it may feel more approachable vs. using Map-Reduce. Using the group operations does have some limitations, for example it is not supported in a shared environment and it returns the full result set in a single BSON object, so the result should be small, less than 10,000 keys. + +Spring provides integration with MongoDB's group operation by providing methods on MongoOperations to simplify the creation and running of group operations. It can convert the results of the group operation to a POJO and also integrates with Spring's https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#resources[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files if often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer. + +[[mongo.group.example]] +== Example Usage + +In order to understand how group operations work the following example is used, which is somewhat artificial. For a more realistic example consult the book 'MongoDB - The definitive guide'. A collection named `group_test_collection` created with the following rows. + +[source] +---- +{ "_id" : ObjectId("4ec1d25d41421e2015da64f1"), "x" : 1 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f2"), "x" : 1 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f3"), "x" : 2 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f4"), "x" : 3 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f5"), "x" : 3 } +{ "_id" : ObjectId("4ec1d25d41421e2015da64f6"), "x" : 3 } +---- + +We would like to group by the only field in each row, the `x` field and aggregate the number of times each specific value of `x` occurs. To do this we need to create an initial document that contains our count variable and also a reduce function which will increment it each time it is encountered. The Java code to run the group operation is shown below + +[source,java] +---- +GroupByResults results = mongoTemplate.group("group_test_collection", + GroupBy.key("x").initialDocument("{ count: 0 }").reduceFunction("function(doc, prev) { prev.count += 1 }"), + XObject.class); +---- + +The first argument is the name of the collection to run the group operation over, the second is a fluent API that specifies properties of the group operation via a `GroupBy` class. In this example we are using just the `intialDocument` and `reduceFunction` methods. You can also specify a key-function, as well as a finalizer as part of the fluent API. If you have multiple keys to group by, you can pass in a comma separated list of keys. + +The raw results of the group operation is a JSON document that looks like this + +[source] +---- +{ + "retval" : [ { "x" : 1.0 , "count" : 2.0} , + { "x" : 2.0 , "count" : 1.0} , + { "x" : 3.0 , "count" : 3.0} ] , + "count" : 6.0 , + "keys" : 3 , + "ok" : 1.0 +} +---- + +The document under the "retval" field is mapped onto the third argument in the group method, in this case XObject which is shown below. + +[source,java] +---- +public class XObject { + + private float x; + + private float count; + + + public float getX() { + return x; + } + + public void setX(float x) { + this.x = x; + } + + public float getCount() { + return count; + } + + public void setCount(float count) { + this.count = count; + } + + @Override + public String toString() { + return "XObject [x=" + x + " count = " + count + "]"; + } +} +---- + +You can also obtain the raw result as a `Document` by calling the method `getRawResults` on the `GroupByResults` class. + +There is an additional method overload of the group method on `MongoOperations` which lets you specify a `Criteria` object for selecting a subset of the rows. An example which uses a `Criteria` object, with some syntax sugar using static imports, as well as referencing a key-function and reduce function javascript files via a Spring Resource string is shown below. + +[source] +---- +import static org.springframework.data.mongodb.core.mapreduce.GroupBy.keyFunction; +import static org.springframework.data.mongodb.core.query.Criteria.where; + +GroupByResults results = mongoTemplate.group(where("x").gt(0), + "group_test_collection", + keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class); +---- + +include:../:aggregation-framework.adoc[] + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc new file mode 100644 index 0000000000..bfccec44fa --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-mapreduce.adoc @@ -0,0 +1,127 @@ +[[mongo.mapreduce]] += Map-Reduce Operations + +You can query MongoDB by using Map-Reduce, which is useful for batch processing, for data aggregation, and for when the query language does not fulfill your needs. + +Spring provides integration with MongoDB's Map-Reduce by providing methods on `MongoOperations` to simplify the creation and running of Map-Reduce operations.It can convert the results of a Map-Reduce operation to a POJO and integrates with Spring's link:{springDocsUrl}/core.html#resources[Resource abstraction].This lets you place your JavaScript files on the file system, classpath, HTTP server, or any other Spring Resource implementation and then reference the JavaScript resources through an easy URI style syntax -- for example, `classpath:reduce.js;`.Externalizing JavaScript code in files is often preferable to embedding them as Java strings in your code.Note that you can still pass JavaScript code as Java strings if you prefer. + +[[mongo.mapreduce.example]] +== Example Usage + +To understand how to perform Map-Reduce operations, we use an example from the book, _MongoDB - The Definitive Guide_ footnote:[Kristina Chodorow. _MongoDB - The Definitive Guide_. O'Reilly Media, 2013].In this example, we create three documents that have the values [a,b], [b,c], and [c,d], respectively.The values in each document are associated with the key, 'x', as the following example shows (assume these documents are in a collection named `jmr1`): + +[source] +---- +{ "_id" : ObjectId("4e5ff893c0277826074ec533"), "x" : [ "a", "b" ] } +{ "_id" : ObjectId("4e5ff893c0277826074ec534"), "x" : [ "b", "c" ] } +{ "_id" : ObjectId("4e5ff893c0277826074ec535"), "x" : [ "c", "d" ] } +---- + +The following map function counts the occurrence of each letter in the array for each document: + +[source,java] +---- +function () { + for (var i = 0; i < this.x.length; i++) { + emit(this.x[i], 1); + } +} +---- + +The follwing reduce function sums up the occurrence of each letter across all the documents: + +[source,java] +---- +function (key, values) { + var sum = 0; + for (var i = 0; i < values.length; i++) + sum += values[i]; + return sum; +} +---- + +Running the preceding functions result in the following collection: + +[source] +---- +{ "_id" : "a", "value" : 1 } +{ "_id" : "b", "value" : 2 } +{ "_id" : "c", "value" : 2 } +{ "_id" : "d", "value" : 1 } +---- + +Assuming that the map and reduce functions are located in `map.js` and `reduce.js` and bundled in your jar so they are available on the classpath, you can run a Map-Reduce operation as follows: + +[source,java] +---- +MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class); +for (ValueObject valueObject : results) { + System.out.println(valueObject); +} +---- + +The preceding exmaple produces the following output: + +[source] +---- +ValueObject [id=a, value=1.0] +ValueObject [id=b, value=2.0] +ValueObject [id=c, value=2.0] +ValueObject [id=d, value=1.0] +---- + +The `MapReduceResults` class implements `Iterable` and provides access to the raw output and timing and count statistics.The following listing shows the `ValueObject` class: + +[source,java] +---- +public class ValueObject { + + private String id; + private float value; + + public String getId() { + return id; + } + + public float getValue() { + return value; + } + + public void setValue(float value) { + this.value = value; + } + + @Override + public String toString() { + return "ValueObject [id=" + id + ", value=" + value + "]"; + } +} +---- + +By default, the output type of `INLINE` is used so that you need not specify an output collection.To specify additional Map-Reduce options, use an overloaded method that takes an additional `MapReduceOptions` argument.The class `MapReduceOptions` has a fluent API, so adding additional options can be done in a compact syntax.The following example sets the output collection to `jmr1_out` (note that setting only the output collection assumes a default output type of `REPLACE`): + +[source,java] +---- +MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", + new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class); +---- + +There is also a static import (`import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.options;`) that can be used to make the syntax slightly more compact, as the following example shows: + +[source,java] +---- +MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", + options().outputCollection("jmr1_out"), ValueObject.class); +---- + +You can also specify a query to reduce the set of data that is fed into the Map-Reduce operation.The following example removes the document that contains [a,b] from consideration for Map-Reduce operations: + +[source,java] +---- +Query query = new Query(where("x").ne(new String[] { "a", "b" })); +MapReduceResults results = mongoOperations.mapReduce(query, "jmr1", "classpath:map.js", "classpath:reduce.js", + options().outputCollection("jmr1_out"), ValueObject.class); +---- + +Note that you can specify additional limit and sort values on the query, but you cannot skip values. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc new file mode 100644 index 0000000000..345b5dbb6c --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc @@ -0,0 +1,122 @@ +[[mongo.search]] += MongoDB Search + +MongoDB enables users to do keyword or lexical search as well as vector search data using dedicated search indexes. + +[[mongo.search.vector]] +== Vector Search + +MongoDB Vector Search uses the `$vectorSearch` aggregation stage to run queries against specialized indexes. +Please refer to the MongoDB documentation to learn more about requirements and restrictions of `vectorSearch` indexes. + +[[mongo.search.vector.index]] +=== Managing Vector Indexes + +`SearchIndexOperationsProvider` implemented by `MongoTemplate` are the entrypoint to `SearchIndexOperations` offering various methods for managing vector indexes. + +The following snippet shows how to create a vector index for a collection + +.Create a Vector Index +[tabs] +====== +Java:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +VectorIndex index = new VectorIndex("vector_index") + .addVector("plotEmbedding"), vector -> vector.dimensions(1536).similarity(COSINE)) <1> + .addFilter("year"); <2> + +mongoTemplate.searchIndexOps(Movie.class) <3> + .createIndex(index); +---- +<1> A vector index may cover multiple vector embeddings that can be added via the `addVector` method. +<2> Vector indexes can contain additional fields to narrow down search results when running queries. +<3> Obtain `SearchIndexOperations` bound to the `Movie` type which is used for field name mapping. +==== + +Mongo Shell:: ++ +==== +[source,console,indent=0,subs="verbatim,quotes",role="secondary"] +---- +db.movie.createSearchIndex("movie", "vector_index", + { + "fields": [ + { + "type": "vector", + "numDimensions": 1536, + "path": "plot_embedding", <1> + "similarity": "cosine" + }, + { + "type": "filter", + "path": "year" + } + ] + } +) +---- +<1> Field name `plotEmbedding` got mapped to `plot_embedding` considering a `@Field(name = "...")` annotation. +==== +====== + +Once created, vector indexes are not immediately ready to use although the `exists` check returns `true`. +The actual status of a search index can be obtained via `SearchIndexOperations#status(...)`. +The `READY` state indicates the index is ready to accept queries. + +[[mongo.search.vector.query]] +=== Querying Vector Indexes + +Vector indexes can be queried by issuing an aggregation using a `VectorSearchOperation` via `MongoOperations` as shown in the following example + +.Query a Vector Index +[tabs] +====== +Java:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +VectorSearchOperation search = VectorSearchOperation.search("vector_index") <1> + .path("plotEmbedding") <2> + .vector( ... ) + .numCandidates(150) + .limit(10) + .withSearchScore("score"); <3> + +AggregationResults results = mongoTemplate + .aggregate(newAggregation(Movie.class, search), MovieWithSearchScore.class); +---- +<1> Provide the name of the vector index to query since a collection may hold multiple ones. +<2> The name of the path used for comparison. +<3> Optionally add the search score with given name to the result document. +==== + +Mongo Shell:: ++ +==== +[source,console,indent=0,subs="verbatim,quotes",role="secondary"] +---- +db.embedded_movies.aggregate([ + { + "$vectorSearch": { + "index": "vector_index", + "path": "plot_embedding", <1> + "queryVector": [ ... ], + "numCandidates": 150, + "limit": 10 + } + }, + { + "$addFields": { + "score": { $meta: "vectorSearchScore" } + } + } +]) +---- +<1> Field name `plotEmbedding` got mapped to `plot_embedding` considering a `@Field(name = "...")` annotation. +==== +====== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-server-side-scripts.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-server-side-scripts.adoc new file mode 100644 index 0000000000..0cac130b63 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-server-side-scripts.adoc @@ -0,0 +1,28 @@ +[[mongo.server-side-scripts]] += Script Operations + +[WARNING] +==== +https://docs.mongodb.com/master/release-notes/4.2-compatibility/[MongoDB 4.2] removed support for the `eval` command used +by `ScriptOperations`. + +There is no replacement for the removed functionality. +==== + +MongoDB allows running JavaScript functions on the server by either directly sending the script or calling a stored one. `ScriptOperations` can be accessed through `MongoTemplate` and provides basic abstraction for `JavaScript` usage. The following example shows how to us the `ScriptOperations` class: + +==== +[source,java] +---- +ScriptOperations scriptOps = template.scriptOps(); + +ExecutableMongoScript echoScript = new ExecutableMongoScript("function(x) { return x; }"); +scriptOps.execute(echoScript, "directly execute script"); <1> + +scriptOps.register(new NamedMongoScript("echo", echoScript)); <2> +scriptOps.call("echo", "execute script via name"); <3> +---- +<1> Run the script directly without storing the function on server side. +<2> Store the script using 'echo' as its name. The given name identifies the script and allows calling it later. +<3> Run the script with name 'echo' using the provided parameters. +==== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/cdi-integration.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/cdi-integration.adoc new file mode 100644 index 0000000000..06b2a42dc8 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/cdi-integration.adoc @@ -0,0 +1,38 @@ +[[mongodb.repositories.misc.cdi-integration]] += CDI Integration + +Instances of the repository interfaces are usually created by a container, and Spring is the most natural choice when working with Spring Data. +As of version 1.3.0, Spring Data MongoDB ships with a custom CDI extension that lets you use the repository abstraction in CDI environments. +The extension is part of the JAR. +To activate it, drop the Spring Data MongoDB JAR into your classpath. +You can now set up the infrastructure by implementing a CDI Producer for the `MongoTemplate`, as the following example shows: + +[source,java] +---- +class MongoTemplateProducer { + + @Produces + @ApplicationScoped + public MongoOperations createMongoTemplate() { + + MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(MongoClients.create(), "database"); + return new MongoTemplate(factory); + } +} +---- + +The Spring Data MongoDB CDI extension picks up the `MongoTemplate` available as a CDI bean and creates a proxy for a Spring Data repository whenever a bean of a repository type is requested by the container. +Thus, obtaining an instance of a Spring Data repository is a matter of declaring an `@Inject`-ed property, as the following example shows: + +[source,java] +---- +class RepositoryClient { + + @Inject + PersonRepository repository; + + public void businessMethod() { + List people = repository.findAll(); + } +} +---- diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/modifying-methods.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/modifying-methods.adoc new file mode 100644 index 0000000000..3d195ca0a9 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/modifying-methods.adoc @@ -0,0 +1,100 @@ +[[mongodb.repositories.queries]] += MongoDB-specific Data Manipulation Methods + +Next to the xref:mongodb/repositories/query-methods.adoc[query methods] it is possible to update data with specialized methods. + +[[mongodb.repositories.queries.update]] +== Update Methods + +You can also use the keywords in the preceding table to create queries that identify matching documents for running updates on them. +The actual update action is defined by the `@Update` annotation on the method itself, as the following listing shows. +Note that the naming schema for derived queries starts with `find`. +Using `update` (as in `updateAllByLastname(...)`) is allowed only in combination with `@Query`. + +The update is applied to *all* matching documents and it is *not* possible to limit the scope by passing in a `Page` or by using any of the <>. +The return type can be either `void` or a _numeric_ type, such as `long`, to hold the number of modified documents. + +.Update Methods +==== +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + @Update("{ '$inc' : { 'visits' : 1 } }") + long findAndIncrementVisitsByLastname(String lastname); <1> + + @Update("{ '$inc' : { 'visits' : ?1 } }") + void findAndIncrementVisitsByLastname(String lastname, int increment); <2> + + @Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + long findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); <3> + + @Update(pipeline = {"{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }"}) + void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); <4> + + @Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + long findAndPushShippingAddressByEmail(String email, Address address); <5> + + @Query("{ 'lastname' : ?0 }") + @Update("{ '$inc' : { 'visits' : ?1 } }") + void updateAllByLastname(String lastname, int increment); <6> +} +---- + +<1> The filter query for the update is derived from the method name. +The update is "`as is`" and does not bind any parameters. +<2> The actual increment value is defined by the `increment` method argument that is bound to the `?1` placeholder. +<3> Use the Spring Expression Language (SpEL) for parameter binding. +<4> Use the `pipeline` attribute to issue xref:mongodb/template-crud-operations.adoc#mongo-template.aggregation-update[aggregation pipeline updates]. +<5> The update may contain complex objects. +<6> Combine a xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[string based query] with an update. +==== + +WARNING: Repository updates do not emit persistence nor mapping lifecycle events. + +[[mongodb.repositories.queries.delete]] +== Delete Methods + +The keywords in the preceding table can be used in conjunction with `delete…By` or `remove…By` to create queries that delete matching documents. + +.`Delete…By` Query +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + List deleteByLastname(String lastname); <1> + + Long deletePersonByLastname(String lastname); <2> + + @Nullable + Person deleteSingleByLastname(String lastname); <3> + + Optional deleteByBirthdate(Date birthdate); <4> +} +---- +<1> Using a return type of `List` retrieves and returns all matching documents before actually deleting them. +<2> A numeric return type directly removes the matching documents, returning the total number of documents removed. +<3> A single domain type result retrieves and removes the first matching document. +<4> Same as in 3 but wrapped in an `Optional` type. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + Flux deleteByLastname(String lastname); <1> + + Mono deletePersonByLastname(String lastname); <2> + + Mono deleteSingleByLastname(String lastname); <3> +} +---- +<1> Using a return type of `Flux` retrieves and returns all matching documents before actually deleting them. +<2> A numeric return type directly removes the matching documents, returning the total number of documents removed. +<3> A single domain type result retrieves and removes the first matching document. +====== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/query-methods.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/query-methods.adoc new file mode 100644 index 0000000000..adb2392f04 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/query-methods.adoc @@ -0,0 +1,869 @@ +[[mongodb.repositories.queries]] += MongoDB-specific Query Methods + +Most of the data access operations you usually trigger on a repository result in a query being executed against the MongoDB databases. +Defining such a query is a matter of declaring a method on the repository interface, as the following example shows: + +.PersonRepository with query methods +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends PagingAndSortingRepository { + + List findByLastname(String lastname); <1> + + Page findByFirstname(String firstname, Pageable pageable); <2> + + Person findByShippingAddresses(Address address); <3> + + Person findFirstByLastname(String lastname); <4> + + Stream findAllBy(); <5> +} +---- +<1> The `findByLastname` method shows a query for all people with the given last name. +The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. +Thus, the method name results in a query expression of `{"lastname" : lastname}`. +<2> Applies pagination to a query. +You can equip your method signature with a `Pageable` parameter and let the method return a `Page` instance and Spring Data automatically pages the query accordingly. +<3> Shows that you can query based on properties that are not primitive types. +Throws `IncorrectResultSizeDataAccessException` if more than one match is found. +<4> Uses the `First` keyword to restrict the query to only the first result. +Unlike <3>, this method does not throw an exception if more than one match is found. +<5> Uses a Java 8 `Stream` that reads and converts individual elements while iterating the stream. + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface ReactivePersonRepository extends ReactiveSortingRepository { + + Flux findByFirstname(String firstname); <1> + + Flux findByFirstname(Publisher firstname); <2> + + Flux findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3> + + Mono findByFirstnameAndLastname(String firstname, String lastname); <4> + + Mono findFirstByLastname(String lastname); <5> +} +---- +<1> The method shows a query for all people with the given `lastname`. The query is derived by parsing the method name for constraints that can be concatenated with `And` and `Or`. Thus, the method name results in a query expression of `{"lastname" : lastname}`. +<2> The method shows a query for all people with the given `firstname` once the `firstname` is emitted by the given `Publisher`. +<3> Use `Pageable` to pass offset and sorting parameters to the database. +<4> Find a single entity for the given criteria. It completes with `IncorrectResultSizeDataAccessException` on non-unique results. +<5> Unless <4>, the first entity is always emitted even if the query yields more result documents. + +WARNING: The `Page` return type (as in `Mono`) is not supported by reactive repositories. + +It is possible to use `Pageable` in derived finder methods, to pass on `sort`, `limit` and `offset` parameters to the query to reduce load and network traffic. +The returned `Flux` will only emit data within the declared range. + +[source,java] +---- +Pageable page = PageRequest.of(1, 10, Sort.by("lastname")); +Flux persons = repository.findByFirstnameOrderByLastname("luke", page); +---- +==== +====== + +NOTE: We do not support referring to parameters that are mapped as `DBRef` in the domain class. + +.Supported keywords for query methods +[%collapsible] +==== +[cols="1,2,3",options="header"] +|=== +| Keyword +| Sample +| Logical result + +| `After` +| `findByBirthdateAfter(Date date)` +| `{"birthdate" : {"$gt" : date}}` + +| `GreaterThan` +| `findByAgeGreaterThan(int age)` +| `{"age" : {"$gt" : age}}` + +| `GreaterThanEqual` +| `findByAgeGreaterThanEqual(int age)` +| `{"age" : {"$gte" : age}}` + +| `Before` +| `findByBirthdateBefore(Date date)` +| `{"birthdate" : {"$lt" : date}}` + +| `LessThan` +| `findByAgeLessThan(int age)` +| `{"age" : {"$lt" : age}}` + +| `LessThanEqual` +| `findByAgeLessThanEqual(int age)` +| `{"age" : {"$lte" : age}}` + +| `Between` +| `findByAgeBetween(int from, int to)` + +`findByAgeBetween(Range range)` +| `{"age" : {"$gt" : from, "$lt" : to}}` + +lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range` + +| `In` +| `findByAgeIn(Collection ages)` +| `{"age" : {"$in" : [ages...]}}` + +| `NotIn` +| `findByAgeNotIn(Collection ages)` +| `{"age" : {"$nin" : [ages...]}}` + +| `IsNotNull`, `NotNull` +| `findByFirstnameNotNull()` +| `{"firstname" : {"$ne" : null}}` + +| `IsNull`, `Null` +| `findByFirstnameNull()` +| `{"firstname" : null}` + +| `Like`, `StartingWith`, `EndingWith` +| `findByFirstnameLike(String name)` +| `{"firstname" : name} (name as regex)` + +| `NotLike`, `IsNotLike` +| `findByFirstnameNotLike(String name)` +| `{"firstname" : { "$not" : name }} (name as regex)` + +| `Containing` on String +| `findByFirstnameContaining(String name)` +| `{"firstname" : name} (name as regex)` + +| `NotContaining` on String +| `findByFirstnameNotContaining(String name)` +| `{"firstname" : { "$not" : name}} (name as regex)` + +| `Containing` on Collection +| `findByAddressesContaining(Address address)` +| `{"addresses" : { "$in" : address}}` + +| `NotContaining` on Collection +| `findByAddressesNotContaining(Address address)` +| `{"addresses" : { "$not" : { "$in" : address}}}` + +| `Regex` +| `findByFirstnameRegex(String firstname)` +| `{"firstname" : {"$regex" : firstname }}` + +| `(No keyword)` +| `findByFirstname(String name)` +| `{"firstname" : name}` + +| `Not` +| `findByFirstnameNot(String name)` +| `{"firstname" : {"$ne" : name}}` + +| `Near` +| `findByLocationNear(Point point)` +| `{"location" : {"$near" : [x,y]}}` + +| `Near` +| `findByLocationNear(Point point, Distance max)` +| `{"location" : {"$near" : [x,y], "$maxDistance" : max}}` + +| `Near` +| `findByLocationNear(Point point, Distance min, Distance max)` +| `{"location" : {"$near" : [x,y], "$minDistance" : min, "$maxDistance" : max}}` + +| `Within` +| `findByLocationWithin(Circle circle)` +| `{"location" : {"$geoWithin" : {"$center" : [ [x, y], distance]}}}` + +| `Within` +| `findByLocationWithin(Box box)` +| `{"location" : {"$geoWithin" : {"$box" : [ [x1, y1], x2, y2]}}}` + +| `IsTrue`, `True` +| `findByActiveIsTrue()` +| `{"active" : true}` + +| `IsFalse`, `False` +| `findByActiveIsFalse()` +| `{"active" : false}` + +| `Exists` +| `findByLocationExists(boolean exists)` +| `{"location" : {"$exists" : exists }}` + +| `IgnoreCase` +| `findByUsernameIgnoreCase(String username)` +| `{"username" : {"$regex" : "^username$", "$options" : "i" }}` +|=== +==== + +NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. + +[[mongodb.repositories.queries.geo-spatial]] +== Geo-spatial Queries + +As you saw in the preceding table of keywords, a few keywords trigger geo-spatial operations within a MongoDB query. +The `Near` keyword allows some further modification, as the next few examples show. + +The following example shows how to define a `near` query that finds all persons with a given distance of a given point: + +.Advanced `Near` queries +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} + List findByLocationNear(Point location, Distance distance); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +interface PersonRepository extends ReactiveMongoRepository { + + // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} + Flux findByLocationNear(Point location, Distance distance); +} +---- +====== + +Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. +If the `Distance` was set up containing a `Metric`, we transparently use `$nearSphere` instead of `$code`, as the following example shows: + +.Using `Distance` with `Metrics` +==== +[source,java] +---- +Point point = new Point(43.7, 48.8); +Distance distance = new Distance(200, Metrics.KILOMETERS); +… = repository.findByLocationNear(point, distance); +// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}} +---- +==== + +NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. However, you can still pass in a `Pageable` argument to page results yourself. + +Using a `Distance` with a `Metric` causes a `$nearSphere` (instead of a plain `$near`) clause to be added. +Beyond that, the actual distance gets calculated according to the `Metrics` used. + +(Note that `Metric` does not refer to metric units of measure. +It could be miles rather than kilometers. +Rather, `metric` refers to the concept of a system of measurement, regardless of which system you use.) + +NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of the `$nearSphere` operator. + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + // {'geoNear' : 'location', 'near' : [x, y] } + GeoResults findByLocationNear(Point location); + + // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, + // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } + GeoResults findByLocationNear(Point location, Distance distance); + + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, + // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, + // 'spherical' : true } + GeoResults findByLocationNear(Point location, Distance min, Distance max); + + // {'geoNear' : 'location', 'near' : [x, y] } + GeoResults findByLocationNear(Point location); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +interface PersonRepository extends ReactiveMongoRepository { + + // {'geoNear' : 'location', 'near' : [x, y] } + Flux> findByLocationNear(Point location); + + // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, + // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } + Flux> findByLocationNear(Point location, Distance distance); + + // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, + // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, + // 'spherical' : true } + Flux> findByLocationNear(Point location, Distance min, Distance max); + + // {'geoNear' : 'location', 'near' : [x, y] } + Flux> findByLocationNear(Point location); +} +---- +====== + +[[mongodb.repositories.queries.json-based]] +== JSON-based Query Methods and Field Restriction + +By adding the `org.springframework.data.mongodb.repository.Query` annotation to your repository query methods, you can specify a MongoDB JSON query string to use instead of having the query be derived from the method name, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query("{ 'firstname' : ?0 }") + List findByThePersonsFirstname(String firstname); + +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query("{ 'firstname' : ?0 }") + Flux findByThePersonsFirstname(String firstname); + +} +---- +====== + +The `?0` placeholder lets you substitute the value from the method arguments into the JSON query string. + +NOTE: `String` parameter values are escaped during the binding process, which means that it is not possible to add MongoDB specific operators through the argument. + +You can also use the filter property to restrict the set of properties that is mapped into the Java object, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}") + List findByThePersonsFirstname(String firstname); + +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}") + Flux findByThePersonsFirstname(String firstname); + +} +---- +====== + +The query in the preceding example returns only the `firstname`, `lastname` and `Id` properties of the `Person` objects. +The `age` property, a `java.lang.Integer`, is not set and its value is therefore null. + +[[mongodb.repositories.queries.json-spel]] +== JSON-based Queries with SpEL Expressions + +Query strings and field definitions can be used together with SpEL expressions to create dynamic queries at runtime. +SpEL expressions can provide predicate values and can be used to extend predicates with subdocuments. + +Expressions expose method arguments through an array that contains all the arguments. +The following query uses `[0]` +to declare the predicate value for `lastname` (which is equivalent to the `?0` parameter binding): + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query("{'lastname': ?#{[0]} }") + List findByQueryWithExpression(String param0); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query("{'lastname': ?#{[0]} }") + Flux findByQueryWithExpression(String param0); +} +---- +====== + +Expressions can be used to invoke functions, evaluate conditionals, and construct values. +SpEL expressions used in conjunction with JSON reveal a side-effect, because Map-like declarations inside of SpEL read like JSON, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + @Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}") + List findByQueryWithExpressionAndNestedObject(boolean param0, String param1); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + @Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}") + Flux findByQueryWithExpressionAndNestedObject(boolean param0, String param1); +} +---- +====== + +WARNING: SpEL in query strings can be a powerful way to enhance queries. +However, they can also accept a broad range of unwanted arguments. +Make sure to sanitize strings before passing them to the query to avoid creation of vulnerabilities or unwanted changes to your query. + +Expression support is extensible through the Query SPI: `EvaluationContextExtension` & `ReactiveEvaluationContextExtension` +The Query SPI can contribute properties and functions and can customize the root object. +Extensions are retrieved from the application context at the time of SpEL evaluation when the query is built. +The following example shows how to use an evaluation context extension: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport { + + @Override + public String getExtensionId() { + return "security"; + } + + @Override + public Map getProperties() { + return Collections.singletonMap("principal", SecurityContextHolder.getCurrent().getPrincipal()); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public class SampleEvaluationContextExtension implements ReactiveEvaluationContextExtension { + + @Override + public String getExtensionId() { + return "security"; + } + + @Override + public Mono getExtension() { + return Mono.just(new EvaluationContextExtensionSupport() { ... }); + } +} +---- +====== + +NOTE: Bootstrapping `MongoRepositoryFactory` yourself is not application context-aware and requires further configuration to pick up Query SPI extensions. + +NOTE: Reactive query methods can make use of `org.springframework.data.spel.spi.ReactiveEvaluationContextExtension`. + +[[mongodb.repositories.queries.full-text]] +== Full-text Search Queries + +MongoDB's full-text search feature is store-specific and, therefore, can be found on `MongoRepository` rather than on the more general `CrudRepository`. +We need a document with a full-text index (see "`xref:mongodb/mapping/mapping.adoc#mapping-usage-indexes.text-index[Text Indexes]`" to learn how to create a full-text index). + +Additional methods on `MongoRepository` take `TextCriteria` as an input parameter. +In addition to those explicit methods, it is also possible to add a `TextCriteria`-derived repository method. +The criteria are added as an additional `AND` criteria. +Once the entity contains a `@TextScore`-annotated property, the document's full-text score can be retrieved. +Furthermore, the `@TextScore` annotated also makes it possible to sort by the document's score, as the following example shows: + +[source,java] +---- +@Document +class FullTextDocument { + + @Id String id; + @TextIndexed String title; + @TextIndexed String content; + @TextScore Float score; +} + +interface FullTextRepository extends Repository { + + // Execute a full-text search and define sorting dynamically + List findAllBy(TextCriteria criteria, Sort sort); + + // Paginate over a full-text search result + Page findAllBy(TextCriteria criteria, Pageable pageable); + + // Combine a derived query with a full-text search + List findByTitleOrderByScoreDesc(String title, TextCriteria criteria); +} + + +Sort sort = Sort.by("score"); +TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("spring", "data"); +List result = repository.findAllBy(criteria, sort); + +criteria = TextCriteria.forDefaultLanguage().matching("film"); +Page page = repository.findAllBy(criteria, PageRequest.of(1, 1, sort)); +List result = repository.findByTitleOrderByScoreDesc("mongodb", criteria); +---- + +[[mongodb.repositories.queries.aggregation]] +== Aggregation Methods + +The repository layer offers means to interact with xref:mongodb/aggregation-framework.adoc[the aggregation framework] via annotated repository query methods. +Similar to the xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[JSON based queries], you can define a pipeline using the `org.springframework.data.mongodb.repository.Aggregation` annotation. +The definition may contain simple placeholders like `?0` as well as link:{springDocsUrl}/core.html#expressions[SpEL expressions] `?#{ … }`. + +.Aggregating Repository Method +==== +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(); <1> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(Sort sort); <2> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }") + List groupByLastnameAnd(String property); <3> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }") + Slice groupByLastnameAnd(String property, Pageable page); <4> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + Stream groupByLastnameAndFirstnamesAsStream(); <5> + + @Aggregation(pipeline = { + "{ '$match' : { 'lastname' : '?0'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" + }) + Stream groupByLastnameAndFirstnamesAsStream(); <6> + + @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") + SumValue sumAgeUsingValueWrapper(); <7> + + @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") + Long sumAge(); <8> + + @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") + AggregationResults sumAgeRaw(); <9> + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + List findAllLastnames(); <10> + + @Aggregation(pipeline = { + "{ $group : { _id : '$author', books: { $push: '$title' } } }", + "{ $out : 'authors' }" + }) + void groupAndOutSkippingOutput(); <11> +} +---- +[source,java] +---- +public class PersonAggregate { + + private @Id String lastname; <2> + private List names; + + public PersonAggregate(String lastname, List names) { + // ... + } + + // Getter / Setter omitted +} + +public class SumValue { + + private final Long total; <6> <8> + + public SumValue(Long total) { + // ... + } + + // Getter omitted +} + +interface PersonProjection { + String getFirstname(); + String getLastname(); +} +---- +<1> Aggregation pipeline to group first names by `lastname` in the `Person` collection returning these as `PersonAggregate`. +<2> If `Sort` argument is present, `$sort` is appended after the declared pipeline stages so that it only affects the order of the final results after having passed all other aggregation stages. +Therefore, the `Sort` properties are mapped against the methods return type `PersonAggregate` which turns `Sort.by("lastname")` into `{ $sort : { '_id', 1 } }` because `PersonAggregate.lastname` is annotated with `@Id`. +<3> Replaces `?0` with the given value for `property` for a dynamic aggregation pipeline. +<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination. +<5> Aggregation methods can return interface based projections wrapping the resulting `org.bson.Document` behind a proxy, exposing getters delegating to fields within the document. +<6> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`. +<7> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type. +<8> Aggregations resulting in single document holding just an accumulation result like e.g. `$sum` can be extracted directly from the result `Document`. +To gain more control, you might consider `AggregationResult` as method return type as shown in <7>. +<9> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`. +<10> Like in <6>, a single value can be directly obtained from multiple result ``Document``s. +<11> Skips the output of the `$out` stage when return type is `void`. +==== + +In some scenarios, aggregations might require additional options, such as a maximum run time, additional log comments, or the permission to temporarily write data to disk. +Use the `@Meta` annotation to set those options via `maxExecutionTimeMs`, `comment` or `allowDiskUse`. + +[source,java] +---- +interface PersonRepository extends CrudRepository { + + @Meta(allowDiskUse = true) + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(); +} +---- + +Or use `@Meta` to create your own annotation as shown in the sample below. + +[source,java] +---- +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD }) +@Meta(allowDiskUse = true) +@interface AllowDiskUse { } + +interface PersonRepository extends CrudRepository { + + @AllowDiskUse + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + List groupByLastnameAndFirstnames(); +} +---- + +[NOTE] +==== +Simple-type single-result inspects the returned `Document` and checks for the following: + +. Only one entry in the document, return it. +. Two entries, one is the `_id` value. Return the other. +. Return for the first value assignable to the return type. +. Throw an exception if none of the above is applicable. +==== + +WARNING: The `Page` return type is not supported for repository methods using `@Aggregation`. However, you can use a +`Pageable` argument to add `$skip`, `$limit` and `$sort` to the pipeline and let the method return `Slice`. + +[[mongodb.repositories.queries.by-example]] +include::../../repositories/query-by-example.adoc[leveloffset=+1] + +[[mongodb.repositories.queries.scroll]] +include::{commons}@data-commons::page$repositories/scrolling.adoc[leveloffset=+1] + +[[mongodb.repositories.queries.sort]] +== Sorting Results + +MongoDB repositories allow various approaches to define sorting order. +Let's take a look at the following example: + +.Sorting Query Results +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends MongoRepository { + + List findByFirstnameSortByAgeDesc(String firstname); <1> + + List findByFirstname(String firstname, Sort sort); <2> + + @Query(sort = "{ age : -1 }") + List findByFirstname(String firstname); <3> + + @Query(sort = "{ age : -1 }") + List findByLastname(String lastname, Sort sort); <4> +} +---- +<1> Static sorting derived from method name. `SortByAgeDesc` results in `{ age : -1 }` for the sort parameter. +<2> Dynamic sorting using a method argument. +`Sort.by(DESC, "age")` creates `{ age : -1 }` for the sort parameter. +<3> Static sorting via `Query` annotation. +Sort parameter applied as stated in the `sort` attribute. +<4> Default sorting via `Query` annotation combined with dynamic one via a method argument. `Sort.unsorted()` +results in `{ age : -1 }`. +Using `Sort.by(ASC, "age")` overrides the defaults and creates `{ age : 1 }`. +`Sort.by +(ASC, "firstname")` alters the default and results in `{ age : -1, firstname : 1 }`. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveMongoRepository { + + Flux findByFirstnameSortByAgeDesc(String firstname); + + Flux findByFirstname(String firstname, Sort sort); + + @Query(sort = "{ age : -1 }") + Flux findByFirstname(String firstname); + + @Query(sort = "{ age : -1 }") + Flux findByLastname(String lastname, Sort sort); +} +---- +====== + +[[mongodb.repositories.index-hint]] +== Index Hints + +The `@Hint` annotation allows to override MongoDB's default index selection and forces the database to use the specified index instead. + +.Example of index hints +==== +[source,java] +---- +@Hint("lastname-idx") <1> +List findByLastname(String lastname); + +@Query(value = "{ 'firstname' : ?0 }", hint = "firstname-idx") <2> +List findByFirstname(String firstname); +---- +<1> Use the index with name `lastname-idx`. +<2> The `@Query` annotation defines the `hint` alias which is equivalent to adding the `@Hint` annotation. +==== + +For more information about index creation please refer to the xref:mongodb/template-collection-management.adoc[Collection Management] section. + +[[mongo.repositories.collation]] +== Collation Support + +Next to the xref:mongodb/collation.adoc[general Collation Support] repositories allow to define the collation for various operations. + +==== +[source,java] +---- +public interface PersonRepository extends MongoRepository { + + @Query(collation = "en_US") <1> + List findByFirstname(String firstname); + + @Query(collation = "{ 'locale' : 'en_US' }") <2> + List findPersonByFirstname(String firstname); + + @Query(collation = "?1") <3> + List findByFirstname(String firstname, Object collation); + + @Query(collation = "{ 'locale' : '?1' }") <4> + List findByFirstname(String firstname, String collation); + + List findByFirstname(String firstname, Collation collation); <5> + + @Query(collation = "{ 'locale' : 'en_US' }") + List findByFirstname(String firstname, @Nullable Collation collation); <6> +} +---- +<1> Static collation definition resulting in `{ 'locale' : 'en_US' }`. +<2> Static collation definition resulting in `{ 'locale' : 'en_US' }`. +<3> Dynamic collation depending on 2nd method argument. Allowed types include `String` (eg. 'en_US'), `Locacle` (eg. Locacle.US) +and `Document` (eg. new Document("locale", "en_US")) +<4> Dynamic collation depending on 2nd method argument. +<5> Apply the `Collation` method parameter to the query. +<6> The `Collation` method parameter overrides the default `collation` from `@Query` if not null. + +NOTE: In case you enabled the automatic index creation for repository finder methods a potential static collation definition, +as shown in (1) and (2), will be included when creating the index. + +TIP: The most specifc `Collation` outrules potentially defined others. Which means Method argument over query method annotation over domain type annotation. +==== + +To streamline usage of collation attributes throughout the codebase it is also possible to use the `@Collation` annotation, which serves as a meta annotation for the ones mentioned above. +The same rules and locations apply, plus, direct usage of `@Collation` supersedes any collation values defined on `@Query` and other annotations. +Which means, if a collation is declared via `@Query` and additionally via `@Collation`, then the one from `@Collation` is picked. + +.Using `@Collation` +==== +[source,java] +---- +@Collation("en_US") <1> +class Game { + // ... +} + +interface GameRepository extends Repository { + + @Collation("en_GB") <2> + List findByTitle(String title); + + @Collation("de_AT") <3> + @Query(collation="en_GB") + List findByDescriptionContaining(String keyword); +} +---- +<1> Instead of `@Document(collation=...)`. +<2> Instead of `@Query(collation=...)`. +<3> Favors `@Collation` over meta usage. +==== + +== Read Preferences + +The `@ReadPreference` annotation allows you to configure MongoDB's ReadPreferences. + +.Example of read preferences +==== +[source,java] +---- + +@ReadPreference("primaryPreferred") <1> +public interface PersonRepository extends CrudRepository { + + @ReadPreference("secondaryPreferred") <2> + List findWithReadPreferenceAnnotationByLastname(String lastname); + + @Query(readPreference = "nearest") <3> + List findWithReadPreferenceAtTagByFirstname(String firstname); + + List findWithReadPreferenceAtTagByFirstname(String firstname); <4> +---- +<1> Configure read preference for all repository operations (including inherited, non custom implementation ones) that do not have a query-level definition. Therefore, in this case the read preference mode will be `primaryPreferred` +<2> Use the read preference mode defined in annotation `ReadPreference`, in this case secondaryPreferred +<3> The `@Query` annotation defines the `read preference mode` alias which is equivalent to adding the `@ReadPreference` annotation. +<4> This query will use the read preference mode defined in the repository. +==== + +[TIP] +==== +The `MongoOperations` and `Query` API offer more fine grained control for `ReadPreference`. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/repositories/repositories.adoc b/src/main/antora/modules/ROOT/pages/mongodb/repositories/repositories.adoc new file mode 100644 index 0000000000..0746a0909e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/repositories/repositories.adoc @@ -0,0 +1,213 @@ +[[mongo.repositories]] += MongoDB Repositories + +[[mongo-repo-intro]] +This chapter points out the specialties for repository support for MongoDB. +This chapter builds on the core repository support explained in xref:repositories/core-concepts.adoc[core concepts]. +You should have a sound understanding of the basic concepts explained there. + +[[mongo-repo-usage]] +== Usage + +To access domain entities stored in a MongoDB, you can use our sophisticated repository support that eases implementation quite significantly. +To do so, create an interface for your repository, as the following example shows: + +.Sample Person entity +==== +[source,java] +---- +public class Person { + + @Id + private String id; + private String firstname; + private String lastname; + private Address address; + + // … getters and setters omitted +} +---- +==== + +Note that the domain type shown in the preceding example has a property named `id` of type `String`.The default serialization mechanism used in `MongoTemplate` (which backs the repository support) regards properties named `id` as the document ID. +Currently, we support `String`, `ObjectId`, and `BigInteger` as ID types. +Please see xref:mongodb/template-crud-operations.adoc#mongo-template.id-handling[ID mapping] for more information about on how the `id` field is handled in the mapping layer. + +Now that we have a domain object, we can define an interface that uses it, as follows: + +.Basic repository interface to persist Person entities +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface PersonRepository extends PagingAndSortingRepository { + + // additional custom query methods go here +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface PersonRepository extends ReactiveSortingRepository { + + // additional custom query methods go here +} +---- +====== + +To start using the repository, use the `@EnableMongoRepositories` annotation. +That annotation carries the same attributes as the namespace element. +If no base package is configured, the infrastructure scans the package of the annotated configuration class. +The following example shows how to configuration your application to use MongoDB repositories: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +@EnableMongoRepositories("com.acme.*.repositories") +class ApplicationConfig extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "e-store"; + } + + @Override + protected String getMappingBasePackage() { + return "com.acme.*.repositories"; + } +} +---- + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +@EnableReactiveMongoRepositories("com.acme.*.repositories") +class ApplicationConfig extends AbstractReactiveMongoConfiguration { + + @Override + protected String getDatabaseName() { + return "e-store"; + } + + @Override + protected String getMappingBasePackage() { + return "com.acme.*.repositories"; + } +} +---- + +NOTE: MongoDB uses two different drivers for imperative (synchronous/blocking) and reactive (non-blocking) data access. You must create a connection by using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support. Consequently, you must provide a separate configuration for MongoDB's Reactive Streams driver. Note that your application operates on two different connections if you use reactive and blocking Spring Data MongoDB templates and repositories. +==== + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="third"] +---- + + + + + + + + + + + + + +---- +====== + +This namespace element causes the base packages to be scanned for interfaces that extend `MongoRepository` and create Spring beans for each one found. +By default, the repositories get a `MongoTemplate` Spring bean wired that is called `mongoTemplate`, so you only need to configure `mongo-template-ref` explicitly if you deviate from this convention. + +Because our domain repository extends `PagingAndSortingRepository`, it provides you with methods for paginated and sorted access to the entities. +In the case of reactive repositories only `ReactiveSortingRepository` is available since the notion of a `Page` is not applicable. +However finder methods still accept a `Sort` and `Limit` parameter. + +[NOTE] +==== +The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor]. + +Spring Data MongoDB is built on top of the https://mongodb.github.io/mongo-java-driver-reactivestreams/[MongoDB Reactive Streams] driver, to provide maximal interoperability by relying on the https://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs, such as `ReactiveMongoOperations`, are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa), but conversion can easily clutter your code. + +Spring Data's Reactive Repository abstraction is a dynamic API, mostly defined by you and your requirements as you declare query methods. Reactive MongoDB repositories can be implemented by using either RxJava or Project Reactor wrapper types by extending from one of the following library-specific repository interfaces: + +* `ReactiveCrudRepository` +* `ReactiveSortingRepository` +* `RxJava3CrudRepository` +* `RxJava3SortingRepository` + +Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library. +==== + +In case you want to obtain methods for basic CRUD operations also add the `CrudRepository` interface. +Working with the repository instance is just a matter of dependency injecting it into a client . +Consequently, accessing the second page of `Person` objects at a page size of 10 would resemble the following code: + +.Paging access to Person entities +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@ExtendWith(SpringExtension.class) +@ContextConfiguration +class PersonRepositoryTests { + + @Autowired PersonRepository repository; + + @Test + void readsFirstPageCorrectly() { + + Page persons = repository.findAll(PageRequest.of(0, 10)); + assertThat(persons.isFirstPage()).isTrue(); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@ExtendWith(SpringExtension.class) +@ContextConfiguration +class PersonRepositoryTests { + + @Autowired PersonRepository repository; + + @Test + void readsFirstPageCorrectly() { + + Flux persons = repository.findAll(Sort.unsorted(), Limit.of(10)); + + persons.as(StepVerifer::create) + .expectNextCount(10) + .verifyComplete(); + } +} +---- +====== + +The preceding example creates an application context with Spring's unit test support, which performs annotation-based dependency injection into test cases. +Inside the test method, we use the repository to query the datastore. +We hand the repository a `PageRequest` instance that requests the first page of `Person` objects at a page size of 10. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/sharding.adoc b/src/main/antora/modules/ROOT/pages/mongodb/sharding.adoc new file mode 100644 index 0000000000..8678dc2178 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/sharding.adoc @@ -0,0 +1,76 @@ +[[sharding]] += Sharding + +MongoDB supports large data sets via sharding, a method for distributing data across multiple database servers. +Please refer to the https://docs.mongodb.com/manual/sharding/[MongoDB Documentation] to learn how to set up a sharded cluster, its requirements and limitations. + +Spring Data MongoDB uses the `@Sharded` annotation to identify entities stored in sharded collections as shown below. + +==== +[source,java] +---- +@Document("users") +@Sharded(shardKey = { "country", "userId" }) <1> +public class User { + + @Id + Long id; + + @Field("userid") + String userId; + + String country; +} +---- +<1> The properties of the shard key get mapped to the actual field names. +==== + +[[sharding.sharded-collections]] +== Sharded Collections + +Spring Data MongoDB does not auto set up sharding for collections nor indexes required for it. +The snippet below shows how to do so using the MongoDB client API. + +==== +[source,java] +---- +MongoDatabase adminDB = template.getMongoDbFactory() + .getMongoDatabase("admin"); <1> + +adminDB.runCommand(new Document("enableSharding", "db")); <2> + +Document shardCmd = new Document("shardCollection", "db.users") <3> + .append("key", new Document("country", 1).append("userid", 1)); <4> + +adminDB.runCommand(shardCmd); +---- +<1> Sharding commands need to be run against the _admin_ database. +<2> Enable sharding for a specific database if necessary. +<3> Shard a collection within the database having sharding enabled. +<4> Specify the shard key. +This example uses range based sharding. +==== + +[[sharding.shard-key]] +== Shard Key Handling + +The shard key consists of a single or multiple properties that must exist in every document in the target collection. +It is used to distribute documents across shards. + +Adding the `@Sharded` annotation to an entity enables Spring Data MongoDB to apply best effort optimisations required for sharded scenarios. +This means essentially adding required shard key information, if not already present, to `replaceOne` filter queries when upserting entities. +This may require an additional server round trip to determine the actual value of the current shard key. + +TIP: By setting `@Sharded(immutableKey = true)` Spring Data does not attempt to check if an entity shard key was changed. + +Please see the https://docs.mongodb.com/manual/reference/method/db.collection.replaceOne/#upsert[MongoDB Documentation] for further details. +The following list contains which operations are eligible for shard key auto-inclusion: + +* `(Reactive)CrudRepository.save(…)` +* `(Reactive)CrudRepository.saveAll(…)` +* `(Reactive)MongoTemplate.save(…)` + + + + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/tailable-cursors.adoc b/src/main/antora/modules/ROOT/pages/mongodb/tailable-cursors.adoc new file mode 100644 index 0000000000..97433e1416 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/tailable-cursors.adoc @@ -0,0 +1,101 @@ +// carry over the old bookmarks to prevent external links from failing +[[tailable-cursors]] += Tailable Cursors + +By default, MongoDB automatically closes a cursor when the client exhausts all results supplied by the cursor. +Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections], +you can use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client +consumed all initially returned data. + +TIP: Capped collections can be created with `MongoOperations.createCollection`. To do so, provide the required `CollectionOptions.empty().capped()...`. + +Tailable cursors can be consumed with both, the imperative and the reactive MongoDB API. It is highly recommended to use the +reactive variant, as it is less resource-intensive. However, if you cannot use the reactive API, you can still use a messaging +concept that is already prevalent in the Spring ecosystem. + +[[tailable-cursors.sync]] +== Tailable Cursors with `MessageListener` + +Listening to a capped collection using a Sync Driver creates a long running, blocking task that needs to be delegated to +a separate component. In this case, we need to first create a `MessageListenerContainer`, which will be the main entry point +for running the specific `SubscriptionRequest`. Spring Data MongoDB already ships with a default implementation that +operates on `MongoTemplate` and is capable of creating and running `Task` instances for a `TailableCursorRequest`. + +The following example shows how to use tailable cursors with `MessageListener` instances: + +.Tailable Cursors with `MessageListener` instances +==== +[source,java] +---- +MessageListenerContainer container = new DefaultMessageListenerContainer(template); +container.start(); <1> + +MessageListener listener = System.out::println; <2> + +TailableCursorRequest request = TailableCursorRequest.builder() + .collection("orders") <3> + .filter(query(where("value").lt(100))) <4> + .publishTo(listener) <5> + .build(); + +container.register(request, User.class); <6> + +// ... + +container.stop(); <7> +---- +<1> Starting the container intializes the resources and starts `Task` instances for already registered `SubscriptionRequest` instances. Requests added after startup are ran immediately. +<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion. +<3> Set the collection to listen to. +<4> Provide an optional filter for documents to receive. +<5> Set the message listener to publish incoming ``Message``s to. +<6> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel it to free resources. +<7> Do not forget to stop the container once you are sure you no longer need it. Doing so stops all running `Task` instances within the container. +==== + +[[tailable-cursors.reactive]] +== Reactive Tailable Cursors + +Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection. + +Tailable cursors may become dead, or invalid, if either the query returns no match or the cursor returns the document at the "`end`" of the collection and the application then deletes that document. The following example shows how to create and use an infinite stream query: + +.Infinite Stream queries with ReactiveMongoOperations +==== +[source,java] +---- +Flux stream = template.tail(query(where("name").is("Joe")), Person.class); + +Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe(); + +// … + +// Later: Dispose the subscription to close the stream +subscription.dispose(); +---- +==== + +Spring Data MongoDB Reactive repositories support infinite streams by annotating a query method with `@Tailable`. This works for methods that return `Flux` and other reactive types capable of emitting multiple elements, as the following example shows: + +.Infinite Stream queries with ReactiveMongoRepository +==== +[source,java] +---- + +public interface PersonRepository extends ReactiveMongoRepository { + + @Tailable + Flux findByFirstname(String firstname); + +} + +Flux stream = repository.findByFirstname("Joe"); + +Disposable subscription = stream.doOnNext(System.out::println).subscribe(); + +// … + +// Later: Dispose the subscription to close the stream +subscription.dispose(); +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc new file mode 100644 index 0000000000..f2a7a19bd6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-api.adoc @@ -0,0 +1,181 @@ +[[mongo-template]] += Template API + +The javadoc:org.springframework.data.mongodb.core.MongoTemplate[] and its javadoc:org.springframework.data.mongodb.core.ReactiveMongoTemplate[reactive] counterpart class, located in the `org.springframework.data.mongodb.core` package, is the central class of Spring's MongoDB support and provides a rich feature set for interacting with the database. +The template offers convenience operations to create, update, delete, and query MongoDB documents and provides a mapping between your domain objects and MongoDB documents. + +NOTE: Once configured, `MongoTemplate` is thread-safe and can be reused across multiple instances. + +[[mongo-template.convenience-methods]] +== Convenience Methods + +The javadoc:org.springframework.data.mongodb.core.MongoTemplate[] class implements the interface javadoc:org.springframework.data.mongodb.core.MongoOperations[]. +In as much as possible, the methods on `MongoOperations` are named after methods available on the MongoDB driver `Collection` object, to make the API familiar to existing MongoDB developers who are used to the driver API. +For example, you can find methods such as `find`, `findAndModify`, `findAndReplace`, `findOne`, `insert`, `remove`, `save`, `update`, and `updateMulti`. +The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `MongoOperations`. +A major difference between the two APIs is that `MongoOperations` can be passed domain objects instead of `Document`. +Also, `MongoOperations` has fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations. + +For more information please refer to the xref:mongodb/template-crud-operations.adoc[CRUD] and xref:mongodb/template-query-operations.adoc[Query] sections of the documentation. + +NOTE: The preferred way to reference the operations on `MongoTemplate` instance is through its interface, `MongoOperations`. + +[[mongo-template.execute-callbacks]] +== Execute Callbacks + +`MongoTemplate` offers many convenience methods to help you easily perform common tasks. +However, if you need to directly access the MongoDB driver API, you can use one of several `Execute` callback methods. +The `execute` callbacks gives you a reference to either a `MongoCollection` or a `MongoDatabase` object. + +* ` T` *execute* `(Class entityClass, CollectionCallback action)`: Runs the given `CollectionCallback` for the entity collection of the specified class. + +* ` T` *execute* `(String collectionName, CollectionCallback action)`: Runs the given `CollectionCallback` on the collection of the given name. + +* ` T` *execute* `(DbCallback action)`: Runs a DbCallback, translating any exceptions as necessary. +Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. + +* ` T` *execute* `(String collectionName, DbCallback action)`: Runs a `DbCallback` on the collection of the given name translating any exceptions as necessary. + +* ` T` *executeInSession* `(DbCallback action)`: Runs the given `DbCallback` within the same connection to the database so as to ensure consistency in a write-heavy environment where you may read the data that you wrote. + +The following example uses the javadoc:org.springframework.data.mongodb.core.CollectionCallback[] to return information about an index: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +boolean hasIndex = template.execute("geolocation", collection -> + Streamable.of(collection.listIndexes(org.bson.Document.class)) + .stream() + .map(document -> document.get("name")) + .anyMatch("location_2d"::equals) +); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono hasIndex = template.execute("geolocation", collection -> + Flux.from(collection.listIndexes(org.bson.Document.class)) + .map(document -> document.get("name")) + .filterWhen(name -> Mono.just("location_2d".equals(name))) + .map(it -> Boolean.TRUE) + .single(Boolean.FALSE) + ).next(); +---- +====== + +[[mongo-template.fluent-api]] +== Fluent API + +Being the central component when it comes to more low-level interaction with MongoDB `MongoTemplate` offers a wide range of methods covering needs from collection creation, index creation, and CRUD operations to more advanced functionality, such as Map-Reduce and aggregations. +You can find multiple overloads for each method. +Most of them cover optional or nullable parts of the API. + +`FluentMongoOperations` provides a more narrow interface for the common methods of `MongoOperations` and provides a more readable, fluent API. +The entry points (`insert(…)`, `find(…)`, `update(…)`, and others) follow a natural naming schema based on the operation to be run. +Moving on from the entry point, the API is designed to offer only context-dependent methods that lead to a terminating method that invokes the actual `MongoOperations` counterpart -- the `all` method in the case of the following example: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +List all = template.query(SWCharacter.class) <1> + .inCollection("star-wars") <2> + .as(Jedi.class) <3> + .matching(query(where("jedi").is(true))) <4> + .all(); +---- + +<1> The type used to map fields used in the query to. +<2> The collection name to use if not defined on the domain type. +<3> Result type if not using the original domain type. +<4> The lookup query. + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Flux all = template.query(SWCharacter.class) + .inCollection("star-wars") + .as(Jedi.class) + .matching(query(where("jedi").is(true))) + .all(); +---- +====== + +NOTE: Using projections allows `MongoTemplate` to optimize result mapping by limiting the actual response to fields required by the projection target type. +This applies as long as the javadoc:org.springframework.data.mongodb.core.query.Query[] itself does not contain any field restriction and the target type is a closed interface or DTO projection. + +WARNING: Projections must not be applied to xref:mongodb/mapping/document-references.adoc[DBRefs]. + +You can switch between retrieving a single entity and retrieving multiple entities as a `List` or a `Stream` through the terminating methods: `first()`, `one()`, `all()`, or `stream()`. + +When writing a geo-spatial query with `near(NearQuery)`, the number of terminating methods is altered to include only the methods that are valid for running a `geoNear` command in MongoDB (fetching entities as a `GeoResult` within `GeoResults`), as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +GeoResults results = template.query(SWCharacter.class) + .as(Jedi.class) + .near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis… + .all(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Flux> results = template.query(SWCharacter.class) + .as(Jedi.class) + .near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis… + .all(); +---- +====== + +[[mongo-template.exception-translation]] +== Exception Translation + +The Spring framework provides exception translation for a wide variety of database and mapping technologies. +This has traditionally been for JDBC and JPA. +The Spring support for MongoDB extends this feature to the MongoDB Database by providing an implementation of the `org.springframework.dao.support.PersistenceExceptionTranslator` interface. + +The motivation behind mapping to Spring's link:{springDocsUrl}/data-access.html#dao-exceptions[consistent data access exception hierarchy] is that you are then able to write portable and descriptive exception handling code without resorting to coding against MongoDB error codes. +All of Spring's data access exceptions are inherited from the root `DataAccessException` class so that you can be sure to catch all database related exception within a single try-catch block. +Note that not all exceptions thrown by the MongoDB driver inherit from the `MongoException` class. +The inner exception and message are preserved so that no information is lost. + +Some of the mappings performed by the javadoc:org.springframework.data.mongodb.core.MongoExceptionTranslator[] are `com.mongodb.Network` to `DataAccessResourceFailureException` and `MongoException` error codes 1003, 12001, 12010, 12011, and 12012 to `InvalidDataAccessApiUsageException`. +Look into the implementation for more details on the mapping. + +Exception Translation can be configured by setting a customized javadoc:org.springframework.data.mongodb.core.MongoExceptionTranslator[] on your `MongoDatabaseFactory` or its reactive variant. +You might also want to set the exception translator on the corresponding `MongoClientFactoryBean`. + +.Configuring `MongoExceptionTranslator` +==== +[source,java] +---- +ConnectionString uri = new ConnectionString("mongodb://username:password@localhost/database"); +SimpleMongoClientDatabaseFactory mongoDbFactory = new SimpleMongoClientDatabaseFactory(uri); +mongoDbFactory.setExceptionTranslator(myCustomExceptionTranslator); +---- +==== + +A motivation to customize exception can be MongoDB's behavior during transactions where some failures (such as write conflicts) can become transient and where a retry could lead to a successful operation. +In such a case, you could wrap exceptions with a specific MongoDB label and apply a different exception translation stragegy. + +[[mongo-template.type-mapping]] +== Domain Type Mapping + +The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the javadoc:org.springframework.data.mongodb.core.convert.MongoConverter[] interface. +Spring provides javadoc:org.springframework.data.mongodb.core.convert.MappingMongoConverter[], but you can also write your own converter. +While the `MappingMongoConverter` can use additional metadata to specify the mapping of objects to documents, it can also convert objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. +These conventions, as well as the use of mapping annotations, are explained in the xref:mongodb/mapping/mapping.adoc[Mapping] chapter. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-collection-management.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-collection-management.adoc new file mode 100644 index 0000000000..cdd20b335e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-collection-management.adoc @@ -0,0 +1,209 @@ +[[mongo-template.index-and-collections]] += Index and Collection Management + +`MongoTemplate` and `ReactiveMongoTemplate` provide methods for managing indexes and collections. +These methods are collected into a helper interface called `IndexOperations` respectively `ReactiveIndexOperations`. +You can access these operations by calling the `indexOps` method and passing in either the collection name or the `java.lang.Class` of your entity (the collection name is derived from the `.class`, either by name or from annotation metadata). + +The following listing shows the `IndexOperations` interface: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface IndexOperations { + + String ensureIndex(IndexDefinition indexDefinition); + + void alterIndex(String name, IndexOptions options); + + void dropIndex(String name); + + void dropAllIndexes(); + + List getIndexInfo(); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public interface ReactiveIndexOperations { + + Mono ensureIndex(IndexDefinition indexDefinition); + + Mono alterIndex(String name, IndexOptions options); + + Mono dropIndex(String name); + + Mono dropAllIndexes(); + + Flux getIndexInfo(); +---- +====== + +[[mongo-template.index-and-collections.index]] +== Methods for Creating an Index + +You can create an index on a collection to improve query performance by using the MongoTemplate class, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +template.indexOps(Person.class) + .ensureIndex(new Index().on("name",Order.ASCENDING)); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono createIndex = template.indexOps(Person.class) + .ensureIndex(new Index().on("name",Order.ASCENDING)); +---- +====== + +`ensureIndex` makes sure that an index for the provided IndexDefinition exists for the collection. + +You can create standard, geospatial, and text indexes by using the `IndexDefinition`, `GeoSpatialIndex` and `TextIndexDefinition` classes. +For example, given the `Venue` class defined in a previous section, you could declare a geospatial query, as the following example shows: + +[source,java] +---- +template.indexOps(Venue.class) + .ensureIndex(new GeospatialIndex("location")); +---- + +NOTE: `Index` and `GeospatialIndex` support configuration of xref:mongodb/template-query-operations.adoc#mongo.query.collation[collations]. + +[[mongo-template.index-and-collections.access]] +== Accessing Index Information + +The `IndexOperations` interface has the `getIndexInfo` method that returns a list of `IndexInfo` objects. +This list contains all the indexes defined on the collection. The following example defines an index on the `Person` class that has an `age` property: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +template.indexOps(Person.class) + .ensureIndex(new Index().on("age", Order.DESCENDING).unique()); + +List indexInfoList = template.indexOps(Person.class) + .getIndexInfo(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono ageIndex = template.indexOps(Person.class) + .ensureIndex(new Index().on("age", Order.DESCENDING).unique()); + +Flux indexInfo = ageIndex.then(template.indexOps(Person.class) + .getIndexInfo()); +---- +====== + +[[mongo-template.index-and-collections.collection]] +== Methods for Working with a Collection + +The following example shows how to create a collection: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +MongoCollection collection = null; +if (!template.getCollectionNames().contains("MyNewCollection")) { + collection = mongoTemplate.createCollection("MyNewCollection"); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +MongoCollection collection = template.getCollectionNames().collectList() + .flatMap(collectionNames -> { + if(!collectionNames.contains("MyNewCollection")) { + return template.createCollection("MyNewCollection"); + } + return template.getMongoDatabase().map(db -> db.getCollection("MyNewCollection")); + }); +---- +====== + +NOTE: Collection creation allows customization with `CollectionOptions` and supports xref:mongodb/collation.adoc[collations]. + +.Methods to interact with MongoCollections +[%collapsible] +==== +* *getCollectionNames*: Returns a set of collection names. +* *collectionExists*: Checks to see if a collection with a given name exists. +* *createCollection*: Creates an uncapped collection. +* *dropCollection*: Drops the collection. +* *getCollection*: Gets a collection by name, creating it if it does not exist. +==== + +[[time-series]] +== Time Series + +MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections that are optimized to efficiently store documents over time such as measurements or events. +Those collections need to be created as such before inserting any data. +Collections can be created by either running the `createCollection` command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. + +.Create a Time Series Collection +==== +.Create a Time Series via the MongoDB Driver +[source,java] +---- +template.execute(db -> { + + com.mongodb.client.model.CreateCollectionOptions options = new CreateCollectionOptions(); + options.timeSeriesOptions(new TimeSeriesOptions("timestamp")); + + db.createCollection("weather", options); + return "OK"; +}); +---- + +.Create a Time Series Collection with `CollectionOptions` +[source,java] +---- +template.createCollection("weather", CollectionOptions.timeSeries("timestamp")); +---- + +.Create a Time Series Collection derived from an Annotation +[source,java] +---- +@TimeSeries(collection="weather", timeField = "timestamp") +public class Measurement { + + String id; + Instant timestamp; + // ... +} + +template.createCollection(Measurement.class); +---- +==== + +The snippets above can easily be transferred to the reactive API offering the very same methods. +Make sure to properly _subscribe_ to the returned publishers. + +[TIP] +==== +You can use the `@TimeSeries#expireAfter` option to have MongoDB automatically remove expired buckets. +The attribute allows different timeout formats like `10s`, `3h`,... as well as expression (`#{@mySpringBean.timeout}`) and property placeholder (`${my.property.timeout}`) syntax. +==== diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc new file mode 100644 index 0000000000..4d920e1c52 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-config.adoc @@ -0,0 +1,167 @@ +[[mongo-template.instantiating]] += Configuration + +You can use the following configuration to create and register an instance of `MongoTemplate`, as the following example shows: + +.Registering a `MongoClient` object and enabling Spring's exception translation support +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Configuration +class ApplicationConfiguration { + + @Bean + MongoClient mongoClient() { + return MongoClients.create("mongodb://localhost:27017"); + } + + @Bean + MongoOperations mongoTemplate(MongoClient mongoClient) { + return new MongoTemplate(mongoClient, "geospatial"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Configuration +class ReactiveApplicationConfiguration { + + @Bean + MongoClient mongoClient() { + return MongoClients.create("mongodb://localhost:27017"); + } + + @Bean + ReactiveMongoOperations mongoTemplate(MongoClient mongoClient) { + return new ReactiveMongoTemplate(mongoClient, "geospatial"); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="third"] +---- + + + + + + +---- +====== + +There are several overloaded constructors of javadoc:org.springframework.data.mongodb.core.MongoTemplate[] and javadoc:org.springframework.data.mongodb.core.ReactiveMongoTemplate[]: + +* `MongoTemplate(MongoClient mongo, String databaseName)`: Takes the `MongoClient` object and the default database name to operate against. +* `MongoTemplate(MongoDatabaseFactory mongoDbFactory)`: Takes a MongoDbFactory object that encapsulated the `MongoClient` object, database name, and username and password. +* `MongoTemplate(MongoDatabaseFactory mongoDbFactory, MongoConverter mongoConverter)`: Adds a `MongoConverter` to use for mapping. + +Other optional properties that you might like to set when creating a `MongoTemplate` / `ReactiveMongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, `ReadPreference` and others listed below. + +[[mongo-template.read-preference]] +== Default Read Preference + +The default read preference applied to read operations if no other preference was defined via the xref:mongodb/template-query-operations.adoc#mongo.query.read-preference[Query]. + +[[mongo-template.writeresultchecking]] +== WriteResultChecking Policy + +When in development, it is handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully when, in fact, the database was not modified according to your expectations. You can set the `WriteResultChecking` property of `MongoTemplate` to one of the following values: `EXCEPTION` or `NONE`, to either throw an `Exception` or do nothing, respectively. The default is to use a `WriteResultChecking` value of `NONE`. + +[[mongo-template.writeconcern]] +== Default WriteConcern + +If it has not yet been specified through the driver at a higher level (such as `com.mongodb.client.MongoClient`), you can set the `com.mongodb.WriteConcern` property that the `MongoTemplate` uses for write operations. If the `WriteConcern` property is not set, it defaults to the one set in the MongoDB driver's DB or Collection setting. + +[[mongo-template.writeconcernresolver]] +== WriteConcernResolver + +For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert, and save operations), a strategy interface called `WriteConcernResolver` can be configured on `MongoTemplate`. Since `MongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The following listing shows the `WriteConcernResolver` interface: + +[source,java] +---- +public interface WriteConcernResolver { + WriteConcern resolve(MongoAction action); +} +---- + +You can use the `MongoAction` argument to determine the `WriteConcern` value or use the value of the Template itself as a default. +`MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `Document`, the operation (`REMOVE`, `UPDATE`, `INSERT`, `INSERT_LIST`, or `SAVE`), and a few other pieces of contextual information. +The following example shows two sets of classes getting different `WriteConcern` settings: + +[source,java] +---- +public class MyAppWriteConcernResolver implements WriteConcernResolver { + + @Override + public WriteConcern resolve(MongoAction action) { + if (action.getEntityType().getSimpleName().contains("Audit")) { + return WriteConcern.ACKNOWLEDGED; + } else if (action.getEntityType().getSimpleName().contains("Metadata")) { + return WriteConcern.JOURNALED; + } + return action.getDefaultWriteConcern(); + } +} +---- + +[[mongo-template.entity-lifecycle-events]] +== Publish entity lifecycle events + +The template publishes xref:mongodb/lifecycle-events.adoc#mongodb.mapping-usage.events[lifecycle events]. +In case there are no listeners present, this feature can be disabled. + +[source,java] +---- +@Bean +MongoOperations mongoTemplate(MongoClient mongoClient) { + MongoTemplate template = new MongoTemplate(mongoClient, "geospatial"); + template.setEntityLifecycleEventsEnabled(false); + // ... +} +---- + +[[mongo-template.entity-callbacks-config]] +== Configure EntityCallbacks + +Nest to lifecycle events the template invokes xref:mongodb/lifecycle-events.adoc#mongo.entity-callbacks[EntityCallbacks] which can be (if not auto configured) set via the template API. + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +@Bean +MongoOperations mongoTemplate(MongoClient mongoClient) { + MongoTemplate template = new MongoTemplate(mongoClient, "..."); + template.setEntityCallbacks(EntityCallbacks.create(...)); + // ... +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +@Bean +ReactiveMongoOperations mongoTemplate(MongoClient mongoClient) { + ReactiveMongoTemplate template = new ReactiveMongoTemplate(mongoClient, "..."); + template.setEntityCallbacks(ReactiveEntityCallbacks.create(...)); + // ... +} +---- +====== + +[[mongo-template.count-documents-config]] +== Document count configuration + +By setting `MongoTemplate#useEstimatedCount(...)` to `true` _MongoTemplate#count(...)_ operations, that use an empty filter query, will be delegated to `estimatedCount`, as long as there is no transaction active and the template is not bound to a xref:mongodb/client-session-transactions.adoc[session]. +Please refer to the xref:mongodb/template-document-count.adoc#mongo.query.count[Counting Documents] section for more information. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-crud-operations.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-crud-operations.adoc new file mode 100644 index 0000000000..491bb4ab7d --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-crud-operations.adoc @@ -0,0 +1,700 @@ +[[mongo-template.save-update-remove]] += Saving, Updating, and Removing Documents + +`MongoTemplate` / `ReactiveMongoTemplatge` let you save, update, and delete your domain objects and map those objects to documents stored in MongoDB. +The API signatures of the imperative and reactive API are mainly the same only differing in their return types. +While the synchronous API uses `void`, single `Object` and `List` the reactive counterpart consists of `Mono`, `Mono` and `Flux`. + +Consider the following class: + +[source,java] +---- +include::example$example/Person.java[tags=class] +---- + +Given the `Person` class in the preceding example, you can save, update and delete the object, as the following example shows: + +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public class MongoApplication { + + private static final Log log = LogFactory.getLog(MongoApplication.class); + + public static void main(String[] args) { + + MongoOperations template = new MongoTemplate(new SimpleMongoClientDbFactory(MongoClients.create(), "database")); + + Person p = new Person("Joe", 34); + + // Insert is used to initially store the object into the database. + template.insert(p); + log.info("Insert: " + p); + + // Find + p = template.findById(p.getId(), Person.class); + log.info("Found: " + p); + + // Update + template.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class); + p = template.findOne(query(where("name").is("Joe")), Person.class); + log.info("Updated: " + p); + + // Delete + template.remove(p); + + // Check that deletion worked + List people = template.findAll(Person.class); + log.info("Number of people = : " + people.size()); + + + template.dropCollection(Person.class); + } +} +---- + +The preceding example would produce the following log output (including debug messages from `MongoTemplate`): + +[source] +---- +DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information. +DEBUG work.data.mongodb.core.MongoTemplate: 632 - insert Document containing fields: [_class, age, name] in collection: person +INFO org.spring.example.MongoApp: 30 - Insert: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] +DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "_id" : { "$oid" : "4ddc6e784ce5b1eba3ceaf5c"}} in db.collection: database.person +INFO org.spring.example.MongoApp: 34 - Found: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] +DEBUG work.data.mongodb.core.MongoTemplate: 778 - calling update using query: { "name" : "Joe"} and update: { "$set" : { "age" : 35}} in collection: person +DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "name" : "Joe"} in db.collection: database.person +INFO org.spring.example.MongoApp: 39 - Updated: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=35] +DEBUG work.data.mongodb.core.MongoTemplate: 823 - remove using query: { "id" : "4ddc6e784ce5b1eba3ceaf5c"} in collection: person +INFO org.spring.example.MongoApp: 46 - Number of people = : 0 +DEBUG work.data.mongodb.core.MongoTemplate: 376 - Dropped collection [database.person] +---- +==== + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +public class ReactiveMongoApplication { + + private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApplication.class); + + public static void main(String[] args) throws Exception { + + CountDownLatch latch = new CountDownLatch(1); + + ReactiveMongoTemplate template = new ReactiveMongoTemplate(MongoClients.create(), "database"); + + template.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person)) + .flatMap(person -> template.findById(person.getId(), Person.class)) + .doOnNext(person -> log.info("Found: " + person)) + .zipWith(person -> template.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class)) + .flatMap(tuple -> template.remove(tuple.getT1())).flatMap(deleteResult -> template.findAll(Person.class)) + .count().doOnSuccess(count -> { + log.info("Number of people: " + count); + latch.countDown(); + }) + + .subscribe(); + + latch.await(); + } +} +---- +====== + +`MongoConverter` caused implicit conversion between a `String` and an `ObjectId` stored in the database by recognizing (through convention) the `Id` property name. + +The preceding example is meant to show the use of save, update, and remove operations on `MongoTemplate` / `ReactiveMongoTemplate` and not to show complex mapping functionality. +The query syntax used in the preceding example is explained in more detail in the section "`xref:mongodb/template-query-operations.adoc[Querying Documents]`". + +IMPORTANT: MongoDB requires that you have an `_id` field for all documents. Please refer to the xref:mongodb/template-crud-operations.adoc[ID handling] section for details on the special treatment of this field. + +IMPORTANT: MongoDB collections can contain documents that represent instances of a variety of types. Please refer to the xref:mongodb/converters-type-mapping.adoc[type mapping] for details. + +[[mongo-template.save-insert]] +== Insert / Save + +There are several convenient methods on `MongoTemplate` for saving and inserting your objects. +To have more fine-grained control over the conversion process, you can register Spring converters with the `MappingMongoConverter` -- for example `Converter` and `Converter`. + +NOTE: The difference between insert and save operations is that a save operation performs an insert if the object is not already present. + +The simple case of using the save operation is to save a POJO. +In this case, the collection name is determined by name (not fully qualified) of the class. +You may also call the save operation with a specific collection name. You can use mapping metadata to override the collection in which to store the object. + +When inserting or saving, if the `Id` property is not set, the assumption is that its value will be auto-generated by the database. +Consequently, for auto-generation of an `ObjectId` to succeed, the type of the `Id` property or field in your class must be a `String`, an `ObjectId`, or a `BigInteger`. + +The following example shows how to save a document and retrieving its contents: + +.Inserting and retrieving documents using the MongoTemplate +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Criteria.query; + +//... + +template.insert(new Person("Bob", 33)); + +Person person = template.query(Person.class) + .matching(query(where("age").is(33))) + .oneValue(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Criteria.query; + +//... + +Mono person = mongoTemplate.insert(new Person("Bob", 33)) + .then(mongoTemplate.query(Person.class) + .matching(query(where("age").is(33))) + .one()); +---- +====== + +The following insert and save operations are available: + +* `void` *save* `(Object objectToSave)`: Save the object to the default collection. +* `void` *save* `(Object objectToSave, String collectionName)`: Save the object to the specified collection. + +A similar set of insert operations is also available: + +* `void` *insert* `(Object objectToSave)`: Insert the object to the default collection. +* `void` *insert* `(Object objectToSave, String collectionName)`: Insert the object to the specified collection. + +[[mongo-template.id-handling]] +=== How the `_id` Field is Handled in the Mapping Layer + +MongoDB requires that you have an `_id` field for all documents. +If you do not provide one, the driver assigns an `ObjectId` with a generated value without considering your domain model as the server isn't aware of your identifier type. +When you use the `MappingMongoConverter`, certain rules govern how properties from the Java class are mapped to this `_id` field: + +. A property or field annotated with `@Id` (`org.springframework.data.annotation.Id`) maps to the `_id` field. +. A property or field without an annotation but named `id` maps to the `_id` field. + +The following outlines what type conversion, if any, is done on the property mapped to the `_id` document field when using the `MappingMongoConverter` (the default for `MongoTemplate`). + +. If possible, an `id` property or field declared as a `String` in the Java class is converted to and stored as an `ObjectId` by using a Spring `Converter`. Valid conversion rules are delegated to the MongoDB Java driver. If it cannot be converted to an `ObjectId`, then the value is stored as a string in the database. +. An `id` property or field declared as `BigInteger` in the Java class is converted to and stored as an `ObjectId` by using a Spring `Converter`. + +If no field or property specified in the previous sets of rules is present in the Java class, an implicit `_id` file is generated by the driver but not mapped to a property or field of the Java class. + +When querying and updating, `MongoTemplate` uses the converter that corresponds to the preceding rules for saving documents so that field names and types used in your queries can match what is in your domain classes. + +Some environments require a customized approach to map `Id` values such as data stored in MongoDB that did not run through the Spring Data mapping layer. Documents can contain `_id` values that can be represented either as `ObjectId` or as `String`. +Reading documents from the store back to the domain type works just fine. Querying for documents via their `id` can be cumbersome due to the implicit `ObjectId` conversion. Therefore documents cannot be retrieved that way. +For those cases `@MongoId` provides more control over the actual id mapping attempts. + +.`@MongoId` mapping +==== +[source,java] +---- +public class PlainStringId { + @MongoId String id; <1> +} + +public class PlainObjectId { + @MongoId ObjectId id; <2> +} + +public class StringToObjectId { + @MongoId(FieldType.OBJECT_ID) String id; <3> +} +---- +<1> The id is treated as `String` without further conversion. +<2> The id is treated as `ObjectId`. +<3> The id is treated as `ObjectId` if the given `String` is a valid `ObjectId` hex, otherwise as `String`. Corresponds to `@Id` usage. +==== + + +[[mongo-template.save-insert.collection]] +=== Into Which Collection Are My Documents Saved? + +There are two ways to manage the collection name that is used for the documents. +The default collection name that is used is the class name changed to start with a lower-case letter. +So a `com.test.Person` class is stored in the `person` collection. +You can customize this by providing a different collection name with the `@Document` annotation. +You can also override the collection name by providing your own collection name as the last parameter for the selected `MongoTemplate` method calls. + +[[mongo-template.save-insert.individual]] +=== Inserting or Saving Individual Objects + +The MongoDB driver supports inserting a collection of documents in a single operation. +The following methods in the `MongoOperations` interface support this functionality: + +* *insert*: Inserts an object. If there is an existing document with the same `id`, an error is generated. +* *insertAll*: Takes a `Collection` of objects as the first parameter. This method inspects each object and inserts it into the appropriate collection, based on the rules specified earlier. +* *save*: Saves the object, overwriting any object that might have the same `id`. + +[[mongo-template.save-insert.batch]] +=== Inserting Several Objects in a Batch + +The MongoDB driver supports inserting a collection of documents in one operation. +The following methods in the `MongoOperations` interface support this functionality via `insert` or a dedicated `BulkOperations` interface. + +.Batch Insert +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +Collection inserted = template.insert(List.of(...), Person.class); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Flux inserted = template.insert(List.of(...), Person.class); +---- +====== + +.Bulk Insert +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +BulkWriteResult result = template.bulkOps(BulkMode.ORDERED, Person.class) + .insert(List.of(...)) + .execute(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono result = template.bulkOps(BulkMode.ORDERED, Person.class) + .insert(List.of(...)) + .execute(); +---- +====== + +[NOTE] +==== +Server performance of batch and bulk is identical. +However bulk operations do not publish xref:mongodb/lifecycle-events.adoc[lifecycle events]. +==== + +[IMPORTANT] +==== +Any `@Version` property that has not been set prior to calling insert will be auto initialized with `1` (in case of a simple type like `int`) or `0` for wrapper types (eg. `Integer`). + +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongodb-template-update]] +== Update + +For updates, you can update the first document found by using `MongoOperation.updateFirst` or you can update all documents that were found to match the query by using the `MongoOperation.updateMulti` method or `all` on the fluent API. +The following example shows an update of all `SAVINGS` accounts where we are adding a one-time $50.00 bonus to the balance by using the `$inc` operator: + +.Updating documents by using the `MongoTemplate` / `ReactiveMongoTemplate` +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import org.springframework.data.mongodb.core.query.Update; + +// ... + +UpdateResult result = template.update(Account.class) + .matching(where("accounts.accountType").is(Type.SAVINGS)) + .apply(new Update().inc("accounts.$.balance", 50.00)) + .all(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import org.springframework.data.mongodb.core.query.Update; + +// ... + +Mono result = template.update(Account.class) + .matching(where("accounts.accountType").is(Type.SAVINGS)) + .apply(new Update().inc("accounts.$.balance", 50.00)) + .all(); +---- +====== + +In addition to the `Query` discussed earlier, we provide the update definition by using an `Update` object. +The `Update` class has methods that match the update modifiers available for MongoDB. +Most methods return the `Update` object to provide a fluent style for the API. + +[IMPORTANT] +==== +`@Version` properties if not included in the `Update` will be automatically incremented. +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongodb-template-update.methods]] +=== Methods for Running Updates for Documents + +* *updateFirst*: Updates the first document that matches the query document criteria with the updated document. +* *updateMulti*: Updates all objects that match the query document criteria with the updated document. + +WARNING: `updateFirst` does not support ordering for MongoDB Versions below 8.0. Running one of the older versions, please use xref:mongodb/template-crud-operations.adoc#mongo-template.find-and-upsert[findAndModify] to apply `Sort`. + +NOTE: Index hints for the update operation can be provided via `Query.withHint(...)`. + +[[mongodb-template-update.update]] +=== Methods in the `Update` Class + +You can use a little "'syntax sugar'" with the `Update` class, as its methods are meant to be chained together. +Also, you can kick-start the creation of a new `Update` instance by using `public static Update update(String key, Object value)` and using static imports. + +The `Update` class contains the following methods: + +* `Update` *addToSet* `(String key, Object value)` Update using the `$addToSet` update modifier +* `Update` *currentDate* `(String key)` Update using the `$currentDate` update modifier +* `Update` *currentTimestamp* `(String key)` Update using the `$currentDate` update modifier with `$type` `timestamp` +* `Update` *inc* `(String key, Number inc)` Update using the `$inc` update modifier +* `Update` *max* `(String key, Object max)` Update using the `$max` update modifier +* `Update` *min* `(String key, Object min)` Update using the `$min` update modifier +* `Update` *multiply* `(String key, Number multiplier)` Update using the `$mul` update modifier +* `Update` *pop* `(String key, Update.Position pos)` Update using the `$pop` update modifier +* `Update` *pull* `(String key, Object value)` Update using the `$pull` update modifier +* `Update` *pullAll* `(String key, Object[] values)` Update using the `$pullAll` update modifier +* `Update` *push* `(String key, Object value)` Update using the `$push` update modifier +* `Update` *pushAll* `(String key, Object[] values)` Update using the `$pushAll` update modifier +* `Update` *rename* `(String oldName, String newName)` Update using the `$rename` update modifier +* `Update` *set* `(String key, Object value)` Update using the `$set` update modifier +* `Update` *setOnInsert* `(String key, Object value)` Update using the `$setOnInsert` update modifier +* `Update` *unset* `(String key)` Update using the `$unset` update modifier + +Some update modifiers, such as `$push` and `$addToSet`, allow nesting of additional operators. + +[source,java] +---- +// { $push : { "category" : { "$each" : [ "spring" , "data" ] } } } +new Update().push("category").each("spring", "data") + +// { $push : { "key" : { "$position" : 0 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } } +new Update().push("key").atPosition(Position.FIRST).each(Arrays.asList("Arya", "Arry", "Weasel")); + +// { $push : { "key" : { "$slice" : 5 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } } +new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")); + +// { $addToSet : { "values" : { "$each" : [ "spring" , "data" , "mongodb" ] } } } +new Update().addToSet("values").each("spring", "data", "mongodb"); +---- + +[[mongo-template.aggregation-update]] +=== Aggregation Pipeline Updates + +Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an xref:mongodb/aggregation-framework.adoc[Aggregation Pipeline] via `AggregationUpdate`. +Using `AggregationUpdate` allows leveraging https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline[MongoDB 4.2 aggregations] in an update operation. +Using aggregations in an update allows updating one or more fields by expressing multiple stages and multiple conditions with a single operation. + +The update can consist of the following stages: + +* `AggregationUpdate.set(...).toValue(...)` -> `$set : { ... }` +* `AggregationUpdate.unset(...)` -> `$unset : [ ... ]` +* `AggregationUpdate.replaceWith(...)` -> `$replaceWith : { ... }` + +.Update Aggregation +==== +[source,java] +---- +AggregationUpdate update = Aggregation.newUpdate() + .set("average").toValue(ArithmeticOperators.valueOf("tests").avg()) <1> + .set("grade").toValue(ConditionalOperators.switchCases( <2> + when(valueOf("average").greaterThanEqualToValue(90)).then("A"), + when(valueOf("average").greaterThanEqualToValue(80)).then("B"), + when(valueOf("average").greaterThanEqualToValue(70)).then("C"), + when(valueOf("average").greaterThanEqualToValue(60)).then("D")) + .defaultTo("F") + ); + +template.update(Student.class) <3> + .apply(update) + .all(); <4> +---- +[source,javascript] +---- +db.students.update( <3> + { }, + [ + { $set: { average : { $avg: "$tests" } } }, <1> + { $set: { grade: { $switch: { <2> + branches: [ + { case: { $gte: [ "$average", 90 ] }, then: "A" }, + { case: { $gte: [ "$average", 80 ] }, then: "B" }, + { case: { $gte: [ "$average", 70 ] }, then: "C" }, + { case: { $gte: [ "$average", 60 ] }, then: "D" } + ], + default: "F" + } } } } + ], + { multi: true } <4> +) +---- +<1> The 1st `$set` stage calculates a new field _average_ based on the average of the _tests_ field. +<2> The 2nd `$set` stage calculates a new field _grade_ based on the _average_ field calculated by the first aggregation stage. +<3> The pipeline is run on the _students_ collection and uses `Student` for the aggregation field mapping. +<4> Apply the update to all matching documents in the collection. +==== + +[[mongo-template.upserts]] +== Upsert + +Related to performing an `updateFirst` operation, you can also perform an `upsert` operation, which will perform an insert if no document is found that matches the query. +The document that is inserted is a combination of the query document and the update document. +The following example shows how to use the `upsert` method: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +UpdateResult result = template.update(Person.class) + .matching(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update")) + .apply(update("address", addr)) + .upsert(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +Mono result = template.update(Person.class) + .matching(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update")) + .apply(update("address", addr)) + .upsert(); +---- +====== + +WARNING: `upsert` does not support ordering. Please use xref:mongodb/template-crud-operations.adoc#mongo-template.find-and-upsert[findAndModify] to apply `Sort`. + +[IMPORTANT] +==== +`@Version` properties if not included in the `Update` will be automatically initialized. +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongo-template.replace]] +=== Replacing Documents in a Collection + +The various `replace` methods available via `MongoTemplate` allow to override the first matching Document. +If no match is found a new one can be upserted (as outlined in the previous section) by providing `ReplaceOptions` with according configuration. + +==== +.Replace one +[source,java] +---- +Person tom = template.insert(new Person("Motte", 21)); <1> +Query query = Query.query(Criteria.where("firstName").is(tom.getFirstName())); <2> +tom.setFirstname("Tom"); <3> +template.replace(query, tom, ReplaceOptions.none()); <4> +---- +<1> Insert a new document. +<2> The query used to identify the single document to replace. +<3> Set up the replacement document which must hold either the same `_id` as the existing or no `_id` at all. +<4> Run the replace operation. +.Replace one with upsert +[source,java] +---- +Person tom = new Person("id-123", "Tom", 21) <1> +Query query = Query.query(Criteria.where("firstName").is(tom.getFirstName())); +template.replace(query, tom, ReplaceOptions.replaceOptions().upsert()); <2> +---- +<1> The `_id` value needs to be present for upsert, otherwise MongoDB will create a new potentially with the domain type incompatible `ObjectId`. +As MongoDB is not aware of your domain type, any `@Field(targetType)` hints are not considered and the resulting `ObjectId` might be not compatible with your domain model. +<2> Use `upsert` to insert a new document if no match is found +==== + +[WARNING] +==== +It is not possible to change the `_id` of existing documents with a replace operation. +On `upsert` MongoDB uses 2 ways of determining the new id for the entry: +* The `_id` is used within the query as in `{"_id" : 1234 }` +* The `_id` is present in the replacement document. +If no `_id` is provided in either way, MongoDB will create a new `ObjectId` for the document. +This may lead to mapping and data lookup malfunctions if the used domain types `id` property has a different type like e.g. `Long`. +==== + +[[mongo-template.find-and-upsert]] +== Find and Modify + +The `findAndModify(…)` method on `MongoCollection` can update a document and return either the old or newly updated document in a single operation. +`MongoTemplate` provides four `findAndModify` overloaded methods that take `Query` and `Update` classes and converts from `Document` to your POJOs: + +[source,java] +---- + T findAndModify(Query query, Update update, Class entityClass); + + T findAndModify(Query query, Update update, Class entityClass, String collectionName); + + T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + + T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, String collectionName); +---- + +The following example inserts a few `Person` objects into the container and performs a `findAndUpdate` operation: + +[source,java] +---- +template.insert(new Person("Tom", 21)); +template.insert(new Person("Dick", 22)); +template.insert(new Person("Harry", 23)); + +Query query = new Query(Criteria.where("firstName").is("Harry")); +Update update = new Update().inc("age", 1); + +Person oldValue = template.update(Person.class) + .matching(query) + .apply(update) + .findAndModifyValue(); // oldValue.age == 23 + +Person newValue = template.query(Person.class) + .matching(query) + .findOneValue(); // newValye.age == 24 + +Person newestValue = template.update(Person.class) + .matching(query) + .apply(update) + .withOptions(FindAndModifyOptions.options().returnNew(true)) // Now return the newly updated document when updating + .findAndModifyValue(); // newestValue.age == 25 +---- + +The `FindAndModifyOptions` method lets you set the options of `returnNew`, `upsert`, and `remove`. +An example extending from the previous code snippet follows: + +[source,java] +---- +Person upserted = template.update(Person.class) + .matching(new Query(Criteria.where("firstName").is("Mary"))) + .apply(update) + .withOptions(FindAndModifyOptions.options().upsert(true).returnNew(true)) + .findAndModifyValue() +---- + +[IMPORTANT] +==== +`@Version` properties if not included in the `Update` will be automatically incremented. +Read more in the see xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] section. +==== + +[[mongo-template.find-and-replace]] +== Find and Replace + +The most straight forward method of replacing an entire `Document` is via its `id` using the `save` method. +However this might not always be feasible. +`findAndReplace` offers an alternative that allows to identify the document to replace via a simple query. + +.Find and Replace Documents +==== +[source,java] +---- +Optional result = template.update(Person.class) <1> + .matching(query(where("firstame").is("Tom"))) <2> + .replaceWith(new Person("Dick")) + .withOptions(FindAndReplaceOptions.options().upsert()) <3> + .as(User.class) <4> + .findAndReplace(); <5> +---- +<1> Use the fluent update API with the domain type given for mapping the query and deriving the collection name or just use `MongoOperations#findAndReplace`. +<2> The actual match query mapped against the given domain type. Provide `sort`, `fields` and `collation` settings via the query. +<3> Additional optional hook to provide options other than the defaults, like `upsert`. +<4> An optional projection type used for mapping the operation result. If none given the initial domain type is used. +<5> Trigger the actual processing. Use `findAndReplaceValue` to obtain the nullable result instead of an `Optional`. +==== + +IMPORTANT: Please note that the replacement must not hold an `id` itself as the `id` of the existing `Document` will be +carried over to the replacement by the store itself. Also keep in mind that `findAndReplace` will only replace the first +document matching the query criteria depending on a potentially given sort order. + +[[mongo-template.delete]] +== Delete + +You can use one of five overloaded methods to remove an object from the database: + +==== +[source,java] +---- +template.remove(tywin, "GOT"); <1> + +template.remove(query(where("lastname").is("lannister")), "GOT"); <2> + +template.remove(new Query().limit(3), "GOT"); <3> + +template.findAllAndRemove(query(where("lastname").is("lannister"), "GOT"); <4> + +template.findAllAndRemove(new Query().limit(3), "GOT"); <5> +---- +<1> Remove a single entity specified by its `_id` from the associated collection. +<2> Remove all documents that match the criteria of the query from the `GOT` collection. +<3> Remove the first three documents in the `GOT` collection. Unlike <2>, the documents to remove are identified by their `_id`, running the given query, applying `sort`, `limit`, and `skip` options first, and then removing all at once in a separate step. +<4> Remove all documents matching the criteria of the query from the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one. +<5> Remove the first three documents in the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one. +==== + +[[mongo-template.optimistic-locking]] +== Optimistic Locking + +The `@Version` annotation provides syntax similar to that of JPA in the context of MongoDB and makes sure updates are only applied to documents with a matching version. +Therefore, the actual value of the version property is added to the update query in such a way that the update does not have any effect if another operation altered the document in the meantime. +In that case, an `OptimisticLockingFailureException` is thrown. +The following example shows these features: + +==== +[source,java] +---- +@Document +class Person { + + @Id String id; + String firstname; + String lastname; + @Version Long version; +} + +Person daenerys = template.insert(new Person("Daenerys")); <1> + +Person tmp = template.findOne(query(where("id").is(daenerys.getId())), Person.class); <2> + +daenerys.setLastname("Targaryen"); +template.save(daenerys); <3> + +template.save(tmp); // throws OptimisticLockingFailureException <4> +---- +<1> Intially insert document. `version` is set to `0`. +<2> Load the just inserted document. `version` is still `0`. +<3> Update the document with `version = 0`. Set the `lastname` and bump `version` to `1`. +<4> Try to update the previously loaded document that still has `version = 0`. The operation fails with an `OptimisticLockingFailureException`, as the current `version` is `1`. +==== + +Only certain CRUD operations on `MongoTemplate` do consider and alter version properties. Please consult `MongoOperations` java doc for detailed information. + +IMPORTANT: Optimistic Locking requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed. + +NOTE: As of Version 2.2 `MongoOperations` also includes the `@Version` property when removing an entity from the database. +To remove a `Document` without version check use `MongoOperations#remove(Query,...)` instead of `MongoOperations#remove(Object)`. + +NOTE: As of Version 2.2 repositories check for the outcome of acknowledged deletes when removing versioned entities. +An `OptimisticLockingFailureException` is raised if a versioned entity cannot be deleted through `CrudRepository.delete(Object)`. In such case, the version was changed or the object was deleted in the meantime. Use `CrudRepository.deleteById(ID)` to bypass optimistic locking functionality and delete objects regardless of their version. + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-document-count.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-document-count.adoc new file mode 100644 index 0000000000..9372662352 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-document-count.adoc @@ -0,0 +1,48 @@ +[[mongo.query.count]] += Counting Documents + +The template API offers various methods to count the number of documents matching a given criteria. +One of them outlined below. + +==== +[source,java] +---- +template.query(Person.class) + .matching(query(where("firstname").is("luke"))) + .count(); +---- +==== + +In pre-3.x versions of SpringData MongoDB the count operation used MongoDBs internal collection statistics. +With the introduction of xref:mongodb/client-session-transactions.adoc#mongo.transactions[MongoDB Transactions] this was no longer possible because statistics would not correctly reflect potential changes during a transaction requiring an aggregation-based count approach. +So in version 2.x `MongoOperations.count()` would use the collection statistics if no transaction was in progress, and the aggregation variant if so. + +As of Spring Data MongoDB 3.x any `count` operation uses regardless the existence of filter criteria the aggregation-based count approach via MongoDBs `countDocuments`. +If the application is fine with the limitations of working upon collection statistics `MongoOperations.estimatedCount()` offers an alternative. + +[TIP] +==== +By setting `MongoTemplate#useEstimatedCount(...)` to `true` _MongoTemplate#count(...)_ operations, that use an empty filter query, will be delegated to `estimatedCount`, as long as there is no transaction active and the template is not bound to a xref:mongodb/client-session-transactions.adoc[session]. +It will still be possible to obtain exact numbers via `MongoTemplate#exactCount`, but may speed up things. +==== + +[NOTE] +==== +MongoDBs native `countDocuments` method and the `$match` aggregation, do not support `$near` and `$nearSphere` but require `$geoWithin` along with `$center` or `$centerSphere` which does not support `$minDistance` (see https://jira.mongodb.org/browse/SERVER-37043). + +Therefore a given `Query` will be rewritten for `count` operations using `Reactive`-/`MongoTemplate` to bypass the issue like shown below. + +[source,javascript] +---- +{ location : { $near : [-73.99171, 40.738868], $maxDistance : 1.1 } } <1> +{ location : { $geoWithin : { $center: [ [-73.99171, 40.738868], 1.1] } } } <2> + +{ location : { $near : [-73.99171, 40.738868], $minDistance : 0.1, $maxDistance : 1.1 } } <3> +{$and :[ { $nor :[ { location :{ $geoWithin :{ $center :[ [-73.99171, 40.738868 ], 0.01] } } } ]}, { location :{ $geoWithin :{ $center :[ [-73.99171, 40.738868 ], 1.1] } } } ] } <4> +---- +<1> Count source query using `$near`. +<2> Rewritten query now using `$geoWithin` with `$center`. +<3> Count source query using `$near` with `$minDistance` and `$maxDistance`. +<4> Rewritten query now a combination of `$nor` `$geowithin` critierias to work around unsupported `$minDistance`. +==== + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc new file mode 100644 index 0000000000..c7305b0a71 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-gridfs.adoc @@ -0,0 +1,214 @@ +[[gridfs]] += GridFS Support + +MongoDB supports storing binary files inside its filesystem, GridFS. +Spring Data MongoDB provides a javadoc:org.springframework.data.mongodb.gridfs.GridFsOperations[] and javadoc:org.springframework.data.mongodb.gridfs.ReactiveGridFsOperations[] interface as well as the corresponding implementation, `GridFsTemplate` and `ReactiveGridFsTemplate`, to let you interact with the filesystem. +You can set up a template instance by handing it a `MongoDatabaseFactory`/`ReactiveMongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsConfiguration extends AbstractMongoClientConfiguration { + + // … further configuration omitted + + @Bean + public GridFsTemplate gridFsTemplate() { + return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsConfiguration extends AbstractReactiveMongoConfiguration { + + // … further configuration omitted + + @Bean + public ReactiveGridFsTemplate reactiveGridFsTemplate() { + return new ReactiveGridFsTemplate(reactiveMongoDbFactory(), mappingMongoConverter()); + } +} +---- + +XML:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="secondary"] +---- + + + + + + + + + + + + +---- +====== + +The template can now be injected and used to perform storage and retrieval operations, as the following example shows: + +.Using GridFS to store files +[tabs] +====== +Imperative:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void storeFileToGridFs() { + + FileMetadata metadata = new FileMetadata(); + // populate metadata + Resource file = … // lookup File or Resource + + operations.store(file.getInputStream(), "filename.txt", metadata); + } +} +---- +The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. +The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. +Alternatively, you can also provide a `Document`. +==== + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsClient { + + @Autowired + ReactiveGridFsTemplate operations; + + @Test + public Mono storeFileToGridFs() { + + FileMetadata metadata = new FileMetadata(); + // populate metadata + Publisher file = … // lookup File or Resource + + return operations.store(file, "filename.txt", metadata); + } +} +---- +The `store(…)` operations take an `Publisher`, a filename, and (optionally) metadata information about the file to store. +The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `ReactiveGridFsTemplate`. +Alternatively, you can also provide a `Document`. + +The MongoDB's driver uses `AsyncInputStream` and `AsyncOutputStream` interfaces to exchange binary streams. +Spring Data MongoDB adapts these interfaces to `Publisher`. +Read more about `DataBuffer` in https://docs.spring.io/spring-framework/docs/{springVersion}/reference/html/core.html#databuffers[Spring's reference documentation]. +==== +====== + +You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. +Let's have a look at the `find(…)` methods first. +You can either find a single file or multiple files that match a `Query`. +You can use the `GridFsCriteria` helper class to define queries. +It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. +The following example shows how to use the template to query for files: + +.Using GridFsTemplate to query for files +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void findFilesInGridFs() { + GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsClient { + + @Autowired + ReactiveGridFsTemplate operations; + + @Test + public Flux findFilesInGridFs() { + return operations.find(query(whereFilename().is("filename.txt"))) + } +} +---- +====== + +NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded. + +The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. +They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. +The following example shows how to use `GridFsTemplate` to read files: + +.Using GridFsTemplate to read files +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + public GridFsResources[] readFilesFromGridFs() { + return operations.getResources("*.txt"); + } +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class ReactiveGridFsClient { + + @Autowired + ReactiveGridFsOperations operations; + + public Flux readFilesFromGridFs() { + return operations.getResources("*.txt"); + } +} +---- +====== + +`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database. + +NOTE: By default, `GridFsTemplate` obtains `GridFSBucket` once upon the first GridFS interaction. +After that, the template instance reuses the cached bucket. +To use different buckets, from the same Template instance use the constructor accepting `Supplier`. diff --git a/src/main/antora/modules/ROOT/pages/mongodb/template-query-operations.adoc b/src/main/antora/modules/ROOT/pages/mongodb/template-query-operations.adoc new file mode 100644 index 0000000000..a424748205 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/template-query-operations.adoc @@ -0,0 +1,945 @@ +[[mongo.query]] += Querying Documents +:page-section-summary-toc: 1 + +You can use the `Query` and `Criteria` classes to express your queries. +They have method names that mirror the native MongoDB operator names, such as `lt`, `lte`, `is`, and others. +The `Query` and `Criteria` classes follow a fluent API style so that you can chain together multiple method criteria and queries while having easy-to-understand code. +To improve readability, static imports let you avoid using the 'new' keyword for creating `Query` and `Criteria` instances. +You can also use `BasicQuery` to create `Query` instances from plain JSON Strings, as shown in the following example: + +.Creating a Query instance from a plain JSON String +==== +[source,java] +---- +BasicQuery query = new BasicQuery("{ age : { $lt : 50 }, accounts.balance : { $gt : 1000.00 }}"); +List result = mongoTemplate.find(query, Person.class); +---- +==== + +[[mongodb-template-query]] +== Querying Documents in a Collection + +Earlier, we saw how to retrieve a single document by using the `findOne` and `findById` methods on `MongoTemplate`. +These methods return a single domain object right way or using a reactive API a `Mono` emitting a single element. +We can also query for a collection of documents to be returned as a list of domain objects. +Assuming that we have a number of `Person` objects with name and age stored as documents in a collection and that each person has an embedded account document with a balance, we can now run a query using the following code: + +.Querying for documents using the MongoTemplate +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +// ... + +List result = template.query(Person.class) + .matching(query(where("age").lt(50).and("accounts.balance").gt(1000.00d))) + .all(); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; + +// ... + +Flux result = template.query(Person.class) + .matching(query(where("age").lt(50).and("accounts.balance").gt(1000.00d))) + .all(); +---- +====== + +All find methods take a `Query` object as a parameter. +This object defines the criteria and options used to perform the query. +The criteria are specified by using a `Criteria` object that has a static factory method named `where` to instantiate a new `Criteria` object. +We recommend using static imports for `org.springframework.data.mongodb.core.query.Criteria.where` and `Query.query` to make the query more readable. + +The query should return a `List` or `Flux` of `Person` objects that meet the specified criteria. +The rest of this section lists the methods of the `Criteria` and `Query` classes that correspond to the operators provided in MongoDB. +Most methods return the `Criteria` object, to provide a fluent style for the API. + +[[mongodb-template-query.criteria]] +.Methods of the Criteria Class +[%collapsible] +==== +The `Criteria` class provides the following methods, all of which correspond to operators in MongoDB: + +* `Criteria` *all* `(Object o)` Creates a criterion using the `$all` operator +* `Criteria` *and* `(String key)` Adds a chained `Criteria` with the specified `key` to the current `Criteria` and returns the newly created one +* `Criteria` *andOperator* `(Criteria... criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later) +* `Criteria` *andOperator* `(Collection criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later) +* `Criteria` *elemMatch* `(Criteria c)` Creates a criterion using the `$elemMatch` operator +* `Criteria` *exists* `(boolean b)` Creates a criterion using the `$exists` operator +* `Criteria` *gt* `(Object o)` Creates a criterion using the `$gt` operator +* `Criteria` *gte* `(Object o)` Creates a criterion using the `$gte` operator +* `Criteria` *in* `(Object... o)` Creates a criterion using the `$in` operator for a varargs argument. +* `Criteria` *in* `(Collection collection)` Creates a criterion using the `$in` operator using a collection +* `Criteria` *is* `(Object o)` Creates a criterion using field matching (`{ key:value }`). If the specified value is a document, the order of the fields and exact equality in the document matters. +* `Criteria` *lt* `(Object o)` Creates a criterion using the `$lt` operator +* `Criteria` *lte* `(Object o)` Creates a criterion using the `$lte` operator +* `Criteria` *mod* `(Number value, Number remainder)` Creates a criterion using the `$mod` operator +* `Criteria` *ne* `(Object o)` Creates a criterion using the `$ne` operator +* `Criteria` *nin* `(Object... o)` Creates a criterion using the `$nin` operator +* `Criteria` *norOperator* `(Criteria... criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria +* `Criteria` *norOperator* `(Collection criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria +* `Criteria` *not* `()` Creates a criterion using the `$not` meta operator which affects the clause directly following +* `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria +* `Criteria` *orOperator* `(Collection criteria)` Creates an or query using the `$or` operator for all of the provided criteria +* `Criteria` *regex* `(String re)` Creates a criterion using a `$regex` +* `Criteria` *sampleRate* `(double sampleRate)` Creates a criterion using the `$sampleRate` operator +* `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator +* `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator +* `Criteria` *matchingDocumentStructure* `(MongoJsonSchema schema)` Creates a criterion using the `$jsonSchema` operator for xref:mongodb/mapping/mapping-schema.adoc[JSON schema criteria]. `$jsonSchema` can only be applied on the top level of a query and not property specific. Use the `properties` attribute of the schema to match against nested fields. +* `Criteria` *bits()* is the gateway to https://docs.mongodb.com/manual/reference/operator/query-bitwise/[MongoDB bitwise query operators] like `$bitsAllClear`. + +The Criteria class also provides the following methods for geospatial queries. + +* `Criteria` *within* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. +* `Criteria` *within* `(Box box)` Creates a geospatial criterion using a `$geoWithin $box` operation. +* `Criteria` *withinSphere* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. +* `Criteria` *near* `(Point point)` Creates a geospatial criterion using a `$near` operation +* `Criteria` *nearSphere* `(Point point)` Creates a geospatial criterion using `$nearSphere$center` operations. This is only available for MongoDB 1.7 and higher. +* `Criteria` *minDistance* `(double minDistance)` Creates a geospatial criterion using the `$minDistance` operation, for use with $near. +* `Criteria` *maxDistance* `(double maxDistance)` Creates a geospatial criterion using the `$maxDistance` operation, for use with $near. +==== + +The `Query` class has some additional methods that allow to select certain fields as well as to limit and sort the result. + +[[mongodb-template-query.query]] +.Methods of the Query class +[%collapsible] +==== +* `Query` *addCriteria* `(Criteria criteria)` used to add additional criteria to the query +* `Field` *fields* `()` used to define fields to be included in the query results +* `Query` *limit* `(int limit)` used to limit the size of the returned results to the provided limit (used for paging) +* `Query` *skip* `(int skip)` used to skip the provided number of documents in the results (used for paging) +* `Query` *with* `(Sort sort)` used to provide sort definition for the results +* `Query` *with* `(ScrollPosition position)` used to provide a scroll position (Offset- or Keyset-based pagination) to start or resume a `Scroll` +==== + +[[mongo-template.query.result-projection]] + +The template API allows direct usage of result projections that enable you to map queries against a given domain type while projecting the operation result onto another one as outlined below. + +[source,java] +---- +class + +template.query(SWCharacter.class) + .as(Jedi.class) +---- + +For more information on result projections please refer to the xref:repositories/projections.adoc[Projections] section of the documentation. + +[[mongo-template.querying.field-selection]] +== Selecting fields + +MongoDB supports https://docs.mongodb.com/manual/tutorial/project-fields-from-query-results/[projecting fields] returned by a query. +A projection can include and exclude fields (the `_id` field is always included unless explicitly excluded) based on their name. + +.Selecting result fields +==== +[source,java] +---- +public class Person { + + @Id String id; + String firstname; + + @Field("last_name") + String lastname; + + Address address; +} + +query.fields().include("lastname"); <1> + +query.fields().exclude("id").include("lastname") <2> + +query.fields().include("address") <3> + +query.fields().include("address.city") <4> +---- +<1> Result will contain both `_id` and `last_name` via `{ "last_name" : 1 }`. +<2> Result will only contain the `last_name` via `{ "_id" : 0, "last_name" : 1 }`. +<3> Result will contain the `_id` and entire `address` object via `{ "address" : 1 }`. +<4> Result will contain the `_id` and and `address` object that only contains the `city` field via `{ "address.city" : 1 }`. +==== + +Starting with MongoDB 4.4 you can use aggregation expressions for field projections as shown below: + +.Computing result fields using expressions +==== +[source,java] +---- +query.fields() + .project(MongoExpression.create("'$toUpper' : '$last_name'")) <1> + .as("last_name"); <2> + +query.fields() + .project(StringOperators.valueOf("lastname").toUpper()) <3> + .as("last_name"); + +query.fields() + .project(AggregationSpELExpression.expressionOf("toUpper(lastname)")) <4> + .as("last_name"); +---- +<1> Use a native expression. The used field name must refer to field names within the database document. +<2> Assign the field name to which the expression result is projected. The resulting field name is not mapped against the domain model. +<3> Use an `AggregationExpression`. Other than native `MongoExpression`, field names are mapped to the ones used in the domain model. +<4> Use SpEL along with an `AggregationExpression` to invoke expression functions. Field names are mapped to the ones used in the domain model. +==== + +`@Query(fields="…")` allows usage of expression field projections at `Repository` level as described in xref:mongodb/repositories/repositories.adoc#mongodb.repositories.queries.json-based[MongoDB JSON-based Query Methods and Field Restriction]. + +[[mongo.query.additional-query-options]] +== Additional Query Options + +MongoDB offers various ways of applying meta information, like a comment or a batch size, to a query.Using the `Query` API +directly there are several methods for those options. + +[[mongo.query.hints]] +=== Hints + +Index hints can be applied in two ways, using the index name or its field definition. + +==== +[source,java] +---- +template.query(Person.class) + .matching(query("...").withHint("index-to-use")); + +template.query(Person.class) + .matching(query("...").withHint("{ firstname : 1 }")); +---- +==== + +[[mongo.query.cursor-size]] +=== Cursor Batch Size + +The cursor batch size defines the number of documents to return in each response batch. +==== +[source,java] +---- +Query query = query(where("firstname").is("luke")) + .cursorBatchSize(100) +---- +==== + +[[mongo.query.collation]] +=== Collations + +Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options, as the following two examples show: + +==== +[source,java] +---- +Collation collation = Collation.of("de"); + +Query query = new Query(Criteria.where("firstName").is("Amél")) + .collation(collation); + +List results = template.find(query, Person.class); +---- +==== + +[[mongo.query.read-preference]] +=== Read Preference + +The `ReadPreference` to use can be set directly on the `Query` object to be run as outlined below. + +==== +[source,java] +---- +template.find(Person.class) + .matching(query(where(...)).withReadPreference(ReadPreference.secondary())) + .all(); +---- +==== + +NOTE: The preference set on the `Query` instance will supersede the default `ReadPreference` of `MongoTemplate`. + +[[mongo.query.comment]] +=== Comments + +Queries can be equipped with comments which makes them easier to look up in server logs. + +==== +[source,java] +---- +template.find(Person.class) + .matching(query(where(...)).comment("Use the force luke!")) + .all(); +---- +==== + +[[mongo-template.query.distinct]] +== Query Distinct Values + +MongoDB provides an operation to obtain distinct values for a single field by using a query from the resulting documents. +Resulting values are not required to have the same data type, nor is the feature limited to simple types. +For retrieval, the actual result type does matter for the sake of conversion and typing. The following example shows how to query for distinct values: + +.Retrieving distinct values +==== +[source,java] +---- +template.query(Person.class) <1> + .distinct("lastname") <2> + .all(); <3> +---- +<1> Query the `Person` collection. +<2> Select distinct values of the `lastname` field. The field name is mapped according to the domain types property declaration, taking potential `@Field` annotations into account. +<3> Retrieve all distinct values as a `List` of `Object` (due to no explicit result type being specified). +==== + +Retrieving distinct values into a `Collection` of `Object` is the most flexible way, as it tries to determine the property value of the domain type and convert results to the desired type or mapping `Document` structures. + +Sometimes, when all values of the desired field are fixed to a certain type, it is more convenient to directly obtain a correctly typed `Collection`, as shown in the following example: + +.Retrieving strongly typed distinct values +==== +[source,java] +---- +template.query(Person.class) <1> + .distinct("lastname") <2> + .as(String.class) <3> + .all(); <4> +---- +<1> Query the collection of `Person`. +<2> Select distinct values of the `lastname` field. The fieldname is mapped according to the domain types property declaration, taking potential `@Field` annotations into account. +<3> Retrieved values are converted into the desired target type -- in this case, `String`. It is also possible to map the values to a more complex type if the stored field contains a document. +<4> Retrieve all distinct values as a `List` of `String`. If the type cannot be converted into the desired target type, this method throws a `DataAccessException`. +==== + +[[mongo.geospatial]] ++= GeoSpatial Queries + +MongoDB supports GeoSpatial queries through the use of operators such as `$near`, `$within`, `geoWithin`, and `$nearSphere`. Methods specific to geospatial queries are available on the `Criteria` class. There are also a few shape classes (`Box`, `Circle`, and `Point`) that are used in conjunction with geospatial related `Criteria` methods. + +NOTE: Using GeoSpatial queries requires attention when used within MongoDB transactions, see xref:mongodb/client-session-transactions.adoc#mongo.transactions.behavior[Special behavior inside transactions]. + +To understand how to perform GeoSpatial queries, consider the following `Venue` class (taken from the integration tests and relying on the rich `MappingMongoConverter`): + +.Venue.java +[%collapsible] +==== +[source,java] +---- +@Document(collection="newyork") +public class Venue { + + @Id + private String id; + private String name; + private double[] location; + + @PersistenceConstructor + Venue(String name, double[] location) { + super(); + this.name = name; + this.location = location; + } + + public Venue(String name, double x, double y) { + super(); + this.name = name; + this.location = new double[] { x, y }; + } + + public String getName() { + return name; + } + + public double[] getLocation() { + return location; + } + + @Override + public String toString() { + return "Venue [id=" + id + ", name=" + name + ", location=" + + Arrays.toString(location) + "]"; + } +} +---- +==== + +To find locations within a `Circle`, you can use the following query: + +[source,java] +---- +Circle circle = new Circle(-73.99171, 40.738868, 0.01); +List venues = + template.find(new Query(Criteria.where("location").within(circle)), Venue.class); +---- + +To find venues within a `Circle` using spherical coordinates, you can use the following query: + +[source,java] +---- +Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784); +List venues = + template.find(new Query(Criteria.where("location").withinSphere(circle)), Venue.class); +---- + +To find venues within a `Box`, you can use the following query: + +[source,java] +---- +//lower-left then upper-right +Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)); +List venues = + template.find(new Query(Criteria.where("location").within(box)), Venue.class); +---- + +To find venues near a `Point`, you can use the following queries: + +[source,java] +---- +Point point = new Point(-73.99171, 40.738868); +List venues = + template.find(new Query(Criteria.where("location").near(point).maxDistance(0.01)), Venue.class); +---- + +[source,java] +---- +Point point = new Point(-73.99171, 40.738868); +List venues = + template.find(new Query(Criteria.where("location").near(point).minDistance(0.01).maxDistance(100)), Venue.class); +---- + +To find venues near a `Point` using spherical coordinates, you can use the following query: + +[source,java] +---- +Point point = new Point(-73.99171, 40.738868); +List venues = + template.find(new Query( + Criteria.where("location").nearSphere(point).maxDistance(0.003712240453784)), + Venue.class); +---- + +[[mongo.geo-near]] +== Geo-near Queries + +[WARNING] +==== +*Changed in 2.2!* + +https://docs.mongodb.com/master/release-notes/4.2-compatibility/[MongoDB 4.2] removed support for the +`geoNear` command which had been previously used to run the `NearQuery`. + +Spring Data MongoDB 2.2 `MongoOperations#geoNear` uses the `$geoNear` https://docs.mongodb.com/manual/reference/operator/aggregation/geoNear/[aggregation] +instead of the `geoNear` command to run a `NearQuery`. + +The calculated distance (the `dis` when using a geoNear command) previously returned within a wrapper type now is embedded +into the resulting document. +If the given domain type already contains a property with that name, the calculated distance +is named `calculated-distance` with a potentially random postfix. + +Target types may contain a property named after the returned distance to (additionally) read it back directly into the domain type as shown below. + +[source,java] +---- +GeoResults = template.query(Venue.class) <1> + .as(VenueWithDistanceField.class) <2> + .near(NearQuery.near(new GeoJsonPoint(-73.99, 40.73), KILOMETERS)) + .all(); +---- +<1> Domain type used to identify the target collection and potential query mapping. +<2> Target type containing a `dis` field of type `Number`. +==== + +MongoDB supports querying the database for geo locations and calculating the distance from a given origin at the same time. With geo-near queries, you can express queries such as "find all restaurants in the surrounding 10 miles". To let you do so, `MongoOperations` provides `geoNear(…)` methods that take a `NearQuery` as an argument (as well as the already familiar entity type and collection), as shown in the following example: + +[source,java] +---- +Point location = new Point(-73.99171, 40.738868); +NearQuery query = NearQuery.near(location).maxDistance(new Distance(10, Metrics.MILES)); + +GeoResults = operations.geoNear(query, Restaurant.class); +---- + +We use the `NearQuery` builder API to set up a query to return all `Restaurant` instances surrounding the given `Point` out to 10 miles. +The `Metrics` enum used here actually implements an interface so that other metrics could be plugged into a distance as well. +A `Metric` is backed by a multiplier to transform the distance value of the given metric into native distances. +The sample shown here would consider the 10 to be miles. Using one of the built-in metrics (miles and kilometers) automatically triggers the spherical flag to be set on the query. +If you want to avoid that, pass plain `double` values into `maxDistance(…)`. +For more information, see the Javadoc of javadoc:org.springframework.data.mongodb.core.query.NearQuery[] and `Distance`. + +The geo-near operations return a `GeoResults` wrapper object that encapsulates `GeoResult` instances. +Wrapping `GeoResults` allows accessing the average distance of all results. +A single `GeoResult` object carries the entity found plus its distance from the origin. + +[[mongo.geo-json]] +== GeoJSON Support + +MongoDB supports https://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data. See the https://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions. + +[[mongo.geo-json.domain.classes]] +== GeoJSON Types in Domain Classes + +Usage of https://geojson.org/[GeoJSON] types in domain classes is straightforward. The `org.springframework.data.mongodb.core.geo` package contains types such as `GeoJsonPoint`, `GeoJsonPolygon`, and others. These types are extend the existing `org.springframework.data.geo` types. The following example uses a javadoc:org.springframework.data.mongodb.core.geo.GeoJsonPoint[]: + +==== +[source,java] +---- +public class Store { + + String id; + + /** + * { "type" : "Point", "coordinates" : [ x, y ] } + */ + GeoJsonPoint location; +} +---- +==== + +[TIP] +==== +If the `coordinates` of a GeoJSON object represent _latitude_ and _longitude_ pairs, the _longitude_ goes first followed by _latitude_. + +`GeoJsonPoint` therefore treats `getX()` as _longitude_ and `getY()` as _latitude_. +==== + +[[mongo.geo-json.query-methods]] +== GeoJSON Types in Repository Query Methods + +Using GeoJSON types as repository query parameters forces usage of the `$geometry` operator when creating the query, as the following example shows: + +==== +[source,java] +---- +public interface StoreRepository extends CrudRepository { + + List findByLocationWithin(Polygon polygon); <1> + +} + +/* + * { + * "location": { + * "$geoWithin": { + * "$geometry": { + * "type": "Polygon", + * "coordinates": [ + * [ + * [-73.992514,40.758934], + * [-73.961138,40.760348], + * [-73.991658,40.730006], + * [-73.992514,40.758934] + * ] + * ] + * } + * } + * } + * } + */ +repo.findByLocationWithin( <2> + new GeoJsonPolygon( + new Point(-73.992514, 40.758934), + new Point(-73.961138, 40.760348), + new Point(-73.991658, 40.730006), + new Point(-73.992514, 40.758934))); <3> + +/* + * { + * "location" : { + * "$geoWithin" : { + * "$polygon" : [ [-73.992514,40.758934] , [-73.961138,40.760348] , [-73.991658,40.730006] ] + * } + * } + * } + */ +repo.findByLocationWithin( <4> + new Polygon( + new Point(-73.992514, 40.758934), + new Point(-73.961138, 40.760348), + new Point(-73.991658, 40.730006))); +---- +<1> Repository method definition using the commons type allows calling it with both the GeoJSON and the legacy format. +<2> Use GeoJSON type to make use of `$geometry` operator. +<3> Note that GeoJSON polygons need to define a closed ring. +<4> Use the legacy format `$polygon` operator. +==== + +[[mongo.geo-json.metrics]] +== Metrics and Distance calculation + +Then MongoDB `$geoNear` operator allows usage of a GeoJSON Point or legacy coordinate pairs. + +==== +[source,java] +---- +NearQuery.near(new Point(-73.99171, 40.738868)) +---- +[source,json] +---- +{ + "$geoNear": { + //... + "near": [-73.99171, 40.738868] + } +} +---- +==== +==== +[source,java] +---- +NearQuery.near(new GeoJsonPoint(-73.99171, 40.738868)) +---- +[source,json] +---- +{ + "$geoNear": { + //... + "near": { "type": "Point", "coordinates": [-73.99171, 40.738868] } + } +} + +---- +==== + +Though syntactically different the server is fine accepting both no matter what format the target Document within the collection +is using. + +WARNING: There is a huge difference in the distance calculation. Using the legacy format operates +upon _Radians_ on an Earth like sphere, whereas the GeoJSON format uses _Meters_. + +To avoid a serious headache make sure to set the `Metric` to the desired unit of measure which ensures the +distance to be calculated correctly. + +In other words: + +==== +Assume you've got 5 Documents like the ones below: +[source,json] +---- +{ + "_id" : ObjectId("5c10f3735d38908db52796a5"), + "name" : "Penn Station", + "location" : { "type" : "Point", "coordinates" : [ -73.99408, 40.75057 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a6"), + "name" : "10gen Office", + "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a9"), + "name" : "City Bakery ", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796aa"), + "name" : "Splash Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } +} +{ + "_id" : ObjectId("5c10f3735d38908db52796ab"), + "name" : "Momofuku Milk Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.985839, 40.731698 ] } +} +---- +==== + +Fetching all Documents within a 400 Meter radius from `[-73.99171, 40.738868]` would look like this using +GeoJSON: + +.GeoNear with GeoJSON +==== +[source,json] +---- +{ + "$geoNear": { + "maxDistance": 400, <1> + "num": 10, + "near": { type: "Point", coordinates: [-73.99171, 40.738868] }, + "spherical":true, <2> + "key": "location", + "distanceField": "distance" + } +} +---- +Returning the following 3 Documents: +[source,json] +---- +{ + "_id" : ObjectId("5c10f3735d38908db52796a6"), + "name" : "10gen Office", + "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] } + "distance" : 0.0 <3> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a9"), + "name" : "City Bakery ", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 69.3582262492474 <3> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796aa"), + "name" : "Splash Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 69.3582262492474 <3> +} +---- +<1> Maximum distance from center point in _Meters_. +<2> GeoJSON always operates upon a sphere. +<3> Distance from center point in _Meters_. +==== + +Now, when using legacy coordinate pairs one operates upon _Radians_ as discussed before. So we use `Metrics#KILOMETERS +when constructing the `$geoNear` command. The `Metric` makes sure the distance multiplier is set correctly. + +.GeoNear with Legacy Coordinate Pairs +==== +[source,json] +---- +{ + "$geoNear": { + "maxDistance": 0.0000627142377, <1> + "distanceMultiplier": 6378.137, <2> + "num": 10, + "near": [-73.99171, 40.738868], + "spherical":true, <3> + "key": "location", + "distanceField": "distance" + } +} +---- +Returning the 3 Documents just like the GeoJSON variant: +[source,json] +---- +{ + "_id" : ObjectId("5c10f3735d38908db52796a6"), + "name" : "10gen Office", + "location" : { "type" : "Point", "coordinates" : [ -73.99171, 40.738868 ] } + "distance" : 0.0 <4> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796a9"), + "name" : "City Bakery ", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 0.0693586286032982 <4> +} +{ + "_id" : ObjectId("5c10f3735d38908db52796aa"), + "name" : "Splash Bar", + "location" : { "type" : "Point", "coordinates" : [ -73.992491, 40.738673 ] } + "distance" : 0.0693586286032982 <4> +} +---- +<1> Maximum distance from center point in _Radians_. +<2> The distance multiplier so we get _Kilometers_ as resulting distance. +<3> Make sure we operate on a 2d_sphere index. +<4> Distance from center point in _Kilometers_ - take it times 1000 to match _Meters_ of the GeoJSON variant. +==== + +[[mongo.textsearch]] +== Full-text Search + +Since version 2.6 of MongoDB, you can run full-text queries by using the `$text` operator. Methods and operations specific to full-text queries are available in `TextQuery` and `TextCriteria`. When doing full text search, see the https://docs.mongodb.org/manual/reference/operator/query/text/#behavior[MongoDB reference] for its behavior and limitations. + +Before you can actually use full-text search, you must set up the search index correctly. +See xref:mongodb/mapping/mapping.adoc#mapping-usage-indexes.text-index[Text Index] for more detail on how to create index structures. +The following example shows how to set up a full-text search: + +[source,javascript] +---- +db.foo.createIndex( +{ + title : "text", + content : "text" +}, +{ + weights : { + title : 3 + } +} +) +---- + +A query searching for `coffee cake` can be defined and run as follows: + +.Full Text Query +==== +[source,java] +---- +Query query = TextQuery + .queryText(new TextCriteria().matchingAny("coffee", "cake")); + +List page = template.find(query, Document.class); +---- +==== + +To sort results by relevance according to the `weights` use `TextQuery.sortByScore`. + +.Full Text Query - Sort by Score +==== +[source,java] +---- +Query query = TextQuery + .queryText(new TextCriteria().matchingAny("coffee", "cake")) + .sortByScore() <1> + .includeScore(); <2> + +List page = template.find(query, Document.class); +---- +<1> Use the score property for sorting results by relevance which triggers `.sort({'score': {'$meta': 'textScore'}})`. +<2> Use `TextQuery.includeScore()` to include the calculated relevance in the resulting `Document`. +==== + +You can exclude search terms by prefixing the term with `-` or by using `notMatching`, as shown in the following example (note that the two lines have the same effect and are thus redundant): + +[source,java] +---- +// search for 'coffee' and not 'cake' +TextQuery.queryText(new TextCriteria().matching("coffee").matching("-cake")); +TextQuery.queryText(new TextCriteria().matching("coffee").notMatching("cake")); +---- + +`TextCriteria.matching` takes the provided term as is. +Therefore, you can define phrases by putting them between double quotation marks (for example, `\"coffee cake\")` or using by `TextCriteria.phrase.` +The following example shows both ways of defining a phrase: + +[source,java] +---- +// search for phrase 'coffee cake' +TextQuery.queryText(new TextCriteria().matching("\"coffee cake\"")); +TextQuery.queryText(new TextCriteria().phrase("coffee cake")); +---- + +You can set flags for `$caseSensitive` and `$diacriticSensitive` by using the corresponding methods on `TextCriteria`. +Note that these two optional flags have been introduced in MongoDB 3.2 and are not included in the query unless explicitly set. + +[[mongo.query-by-example]] +== Query by Example + +xref:mongodb/repositories/query-methods.adoc#query-by-example[Query by Example] can be used on the Template API level run example queries. + +The following snipped shows how to query by example: + +.Typed Example Query +[source,java] +---- +Person probe = new Person(); +probe.lastname = "stark"; + +Example example = Example.of(probe); + +Query query = new Query(new Criteria().alike(example)); +List result = template.find(query, Person.class); +---- + +By default `Example` is strictly typed. This means that the mapped query has an included type match, restricting it to probe assignable types. +For example, when sticking with the default type key (`_class`), the query has restrictions such as (`_class : { $in : [ com.acme.Person] }`). + +By using the `UntypedExampleMatcher`, it is possible to bypass the default behavior and skip the type restriction. So, as long as field names match, nearly any domain type can be used as the probe for creating the reference, as the following example shows: + +.Untyped Example Query +==== +[source, java] +---- + +class JustAnArbitraryClassWithMatchingFieldName { + @Field("lastname") String value; +} + +JustAnArbitraryClassWithMatchingFieldNames probe = new JustAnArbitraryClassWithMatchingFieldNames(); +probe.value = "stark"; + +Example example = Example.of(probe, UntypedExampleMatcher.matching()); + +Query query = new Query(new Criteria().alike(example)); +List result = template.find(query, Person.class); +---- +==== + +[NOTE] +==== +When including `null` values in the `ExampleSpec`, Spring Data Mongo uses embedded document matching instead of dot notation property matching. +Doing so forces exact document matching for all property values and the property order in the embedded document. +==== + +[NOTE] +==== +`UntypedExampleMatcher` is likely the right choice for you if you are storing different entities within a single collection or opted out of writing type hints. + +Also, keep in mind that using `@TypeAlias` requires eager initialization of the `MappingContext`. To do so, configure `initialEntitySet` to to ensure proper alias resolution for read operations. +==== + +Spring Data MongoDB provides support for different matching options: + +.`StringMatcher` options +[%collapsible] +==== +[cols="1,2", options="header"] +|=== +| Matching +| Logical result + +| `DEFAULT` (case-sensitive) +| `{"firstname" : firstname}` + +| `DEFAULT` (case-insensitive) +| `{"firstname" : { $regex: firstname, $options: 'i'}}` + +| `EXACT` (case-sensitive) +| `{"firstname" : { $regex: /^firstname$/}}` + +| `EXACT` (case-insensitive) +| `{"firstname" : { $regex: /^firstname$/, $options: 'i'}}` + +| `STARTING` (case-sensitive) +| `{"firstname" : { $regex: /^firstname/}}` + +| `STARTING` (case-insensitive) +| `{"firstname" : { $regex: /^firstname/, $options: 'i'}}` + +| `ENDING` (case-sensitive) +| `{"firstname" : { $regex: /firstname$/}}` + +| `ENDING` (case-insensitive) +| `{"firstname" : { $regex: /firstname$/, $options: 'i'}}` + +| `CONTAINING` (case-sensitive) +| `{"firstname" : { $regex: /.\*firstname.*/}}` + +| `CONTAINING` (case-insensitive) +| `{"firstname" : { $regex: /.\*firstname.*/, $options: 'i'}}` + +| `REGEX` (case-sensitive) +| `{"firstname" : { $regex: /firstname/}}` + +| `REGEX` (case-insensitive) +| `{"firstname" : { $regex: /firstname/, $options: 'i'}}` + +|=== +==== + +[[mongo.jsonSchema.query]] +== Query a collection for matching JSON Schema + +You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows: + +.Query for Documents matching a `$jsonSchema` +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); + +template.find(query(matchingDocumentStructure(schema)), Person.class); +---- +==== + +Please refer to the xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] section to learn more about the schema support in Spring Data MongoDB. + + + diff --git a/src/main/antora/modules/ROOT/pages/mongodb/value-expressions.adoc b/src/main/antora/modules/ROOT/pages/mongodb/value-expressions.adoc new file mode 100644 index 0000000000..6356a46265 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/mongodb/value-expressions.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$value-expressions.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/observability/conventions.adoc b/src/main/antora/modules/ROOT/pages/observability/conventions.adoc new file mode 100644 index 0000000000..6c18493fd0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/conventions.adoc @@ -0,0 +1,11 @@ +[[observability-conventions]] += Conventions + +Below you can find a list of all `GlobalObservationConvention` and `ObservationConvention` declared by this project. + +.ObservationConvention implementations +|=== +|ObservationConvention Class Name | Applicable ObservationContext Class Name +|`org.springframework.data.mongodb.observability.DefaultMongoHandlerObservationConvention`|`MongoHandlerContext` +|`org.springframework.data.mongodb.observability.MongoHandlerObservationConvention`|`MongoHandlerContext` +|=== diff --git a/src/main/antora/modules/ROOT/pages/observability/metrics.adoc b/src/main/antora/modules/ROOT/pages/observability/metrics.adoc new file mode 100644 index 0000000000..8b5ce92a64 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/metrics.adoc @@ -0,0 +1,39 @@ +[[observability-metrics]] += Metrics + +Below you can find a list of all metrics declared by this project. + +[[observability-metrics-mongodb-command-observation]] +== Mongodb Command Observation + +____ +Timer created around a MongoDB command execution. +____ + +**Metric name** `spring.data.mongodb.command`. **Type** `timer`. + +**Metric name** `spring.data.mongodb.command.active`. **Type** `long task timer`. + +IMPORTANT: KeyValues that are added after starting the Observation might be missing from the *.active metrics. + +IMPORTANT: Micrometer internally uses `nanoseconds` for the baseunit. However, each backend determines the actual baseunit. (i.e. Prometheus uses seconds) + +Fully qualified name of the enclosing class `org.springframework.data.mongodb.observability.MongoObservation`. + +.Low cardinality Keys +[cols="a,a"] +|=== +|Name | Description +|`db.connection_string` _(required)_|MongoDB connection string. +|`db.mongodb.collection` _(required)_|MongoDB collection name. +|`db.name` _(required)_|MongoDB database name. +|`db.operation` _(required)_|MongoDB command value. +|`db.system` _(required)_|MongoDB database system. +|`db.user` _(required)_|MongoDB user. +|`net.peer.name` _(required)_|Name of the database host. +|`net.peer.port` _(required)_|Logical remote port number. +|`net.sock.peer.addr` _(required)_|Mongo peer address. +|`net.sock.peer.port` _(required)_|Mongo peer port. +|`net.transport` _(required)_|Network transport. +|`spring.data.mongodb.cluster_id` _(required)_|MongoDB cluster identifier. +|=== diff --git a/src/main/antora/modules/ROOT/pages/observability/observability.adoc b/src/main/antora/modules/ROOT/pages/observability/observability.adoc new file mode 100644 index 0000000000..8a9b0a1eeb --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/observability.adoc @@ -0,0 +1,42 @@ +:root-target: ../../../../target/ + +[[mongodb.observability]] += Observability + +Spring Data MongoDB currently has the most up-to-date code to support Observability in your MongoDB application. +These changes, however, haven't been picked up by Spring Boot (yet). +Until those changes are applied, if you wish to use Spring Data MongoDB's flavor of Observability, you must carry out the following steps. + +. First of all, you must opt into Spring Data MongoDB's configuration settings by customizing `MongoClientSettings` through either your `@SpringBootApplication` class or one of your configuration classes. ++ +.Registering MongoDB Micrometer customizer setup +==== +[source,java] +---- +@Bean +MongoClientSettingsBuilderCustomizer mongoMetricsSynchronousContextProvider(ObservationRegistry registry) { + return (clientSettingsBuilder) -> { + clientSettingsBuilder.contextProvider(ContextProviderFactory.create(registry)) + .addCommandListener(new MongoObservationCommandListener(registry)); + }; +} +---- +==== ++ +. Your project must include *Spring Boot Actuator*. +. Disable Spring Boot's autoconfigured MongoDB command listener and enable tracing manually by adding the following properties to your `application.properties` ++ +.Custom settings to apply +==== +[source] +---- +# Disable Spring Boot's autoconfigured tracing +management.metrics.mongo.command.enabled=false +# Enable it manually +management.tracing.enabled=true +---- +Be sure to add any other relevant settings needed to configure the tracer you are using based upon Micrometer's reference documentation. +==== + +This should do it! You are now running with Spring Data MongoDB's usage of Spring Observability's `Observation` API. +See also https://opentelemetry.io/docs/reference/specification/trace/semantic_conventions/database/#mongodb[OpenTelemetry Semantic Conventions] for further reference. diff --git a/src/main/antora/modules/ROOT/pages/observability/spans.adoc b/src/main/antora/modules/ROOT/pages/observability/spans.adoc new file mode 100644 index 0000000000..8e79d33a86 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/observability/spans.adoc @@ -0,0 +1,30 @@ +[[observability-spans]] += Spans + +Below you can find a list of all spans declared by this project. + +[[observability-spans-mongodb-command-observation]] +== Mongodb Command Observation Span + +> Timer created around a MongoDB command execution. + +**Span name** `spring.data.mongodb.command`. + +Fully qualified name of the enclosing class `org.springframework.data.mongodb.observability.MongoObservation`. + +.Tag Keys +|=== +|Name | Description +|`db.connection_string` _(required)_|MongoDB connection string. +|`db.mongodb.collection` _(required)_|MongoDB collection name. +|`db.name` _(required)_|MongoDB database name. +|`db.operation` _(required)_|MongoDB command value. +|`db.system` _(required)_|MongoDB database system. +|`db.user` _(required)_|MongoDB user. +|`net.peer.name` _(required)_|Name of the database host. +|`net.peer.port` _(required)_|Logical remote port number. +|`net.sock.peer.addr` _(required)_|Mongo peer address. +|`net.sock.peer.port` _(required)_|Mongo peer port. +|`net.transport` _(required)_|Network transport. +|`spring.data.mongodb.cluster_id` _(required)_|MongoDB cluster identifier. +|=== diff --git a/src/main/antora/modules/ROOT/pages/preface.adoc b/src/main/antora/modules/ROOT/pages/preface.adoc new file mode 100644 index 0000000000..d52509c81e --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/preface.adoc @@ -0,0 +1,105 @@ +[[requirements]] += Requirements + +The Spring Data MongoDB 4.x binaries require JDK level 17 and above and https://spring.io/docs[Spring Framework] {springVersion} and above. + +In terms of database and driver, you need at least version 4.x of https://www.mongodb.org/[MongoDB] and a compatible MongoDB Java Driver (5.2.x). + +[[compatibility.matrix]] +== Compatibility Matrix + +The following compatibility matrix summarizes Spring Data versions to MongoDB driver/database versions. +Database versions show server generations that pass the Spring Data test suite. +You can use newer server versions unless your application uses functionality that is affected by xref:preface.adoc#compatibility.changes[changes in the MongoDB server]. +See also the https://www.mongodb.com/docs/drivers/java/sync/current/compatibility/[official MongoDB driver compatibility matrix] for driver- and server version compatibility. + +==== +[cols="h,m,m,m", options="header"] +|=== + +|Spring Data Release Train +|Spring Data MongoDB +|Driver Version +|Database Versions + +|2025.0 +|4.5.x +|5.3.x +|6.x to 8.x + +|2024.1 +|4.4.x +|5.2.x +|4.4.x to 8.x + +|2024.0 +|4.3.x +|4.11.x & 5.x +|4.4.x to 7.x + +|2023.1 +|4.2.x +|4.9.x +|4.4.x to 7.x + +|2023.0 (*) +|4.1.x +|4.9.x +|4.4.x to 6.x + +|2022.0 (*) +|4.0.x +|4.7.x +|4.4.x to 6.x + +|2021.2 (*) +|3.4.x +|4.6.x +|4.4.x to 5.0.x + +|2021.1 (*) +|3.3.x +|4.4.x +|4.4.x to 5.0.x + +|2021.0 (*) +|3.2.x +|4.1.x +|4.4.x + +|2020.0 (*) +|3.1.x +|4.1.x +|4.4.x + +|Neumann (*) +|3.0.x +|4.0.x +|4.4.x + +|Moore (*) +|2.2.x +|3.11.x/Reactive Streams 1.12.x +|4.2.x + +|Lovelace (*) +|2.1.x +|3.8.x/Reactive Streams 1.9.x +|4.0.x + +|=== +(*) https://spring.io/projects/spring-data-mongodb#support[End of OSS Support] +==== + +[[compatibility.changes]] +[[compatibility.changes-4.4]] +=== Relevant Changes in MongoDB 4.4 + +* Fields list must not contain text search score property when no `$text` criteria present. See also https://docs.mongodb.com/manual/reference/operator/query/text/[`$text` operator] +* Sort must not be an empty document when running map reduce. + +[[compatibility.changes-4.2]] +=== Relevant Changes in MongoDB 4.2 + +* Removal of `geoNear` command. See also https://docs.mongodb.com/manual/release-notes/4.2-compatibility/#remove-support-for-the-geonear-command[Removal of `geoNear`] +* Removal of `eval` command. See also https://docs.mongodb.com/manual/release-notes/4.2-compatibility/#remove-support-for-the-eval-command[Removal of `eval`] diff --git a/src/main/antora/modules/ROOT/pages/repositories.adoc b/src/main/antora/modules/ROOT/pages/repositories.adoc new file mode 100644 index 0000000000..01b79f6c52 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories.adoc @@ -0,0 +1,8 @@ +[[mongodb.repositories]] += Repositories +:page-section-summary-toc: 1 + +This chapter explains the basic foundations of Spring Data repositories and MongoDB specifics. +Before continuing to the MongoDB specifics, make sure you have a sound understanding of the basic concepts. + +The goal of the Spring Data repository abstraction is to significantly reduce the amount of boilerplate code required to implement data access layers for various persistence stores. diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc new file mode 100644 index 0000000000..1a4af7a60b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc @@ -0,0 +1,12 @@ +include::{commons}@data-commons::page$repositories/core-concepts.adoc[] + +[[mongodb.entity-persistence.state-detection-strategies]] +include::{commons}@data-commons::page$is-new-state-detection.adoc[leveloffset=+1] + +[NOTE] +==== +Cassandra provides no means to generate identifiers upon inserting data. +As consequence, entities must be associated with identifier values. +Spring Data defaults to identifier inspection to determine whether an entity is new. +If you want to use xref:mongodb/auditing.adoc[auditing] make sure to either use xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] or implement `Persistable` for proper entity state detection. +==== diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc new file mode 100644 index 0000000000..f84313e9da --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-domain-events.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/core-domain-events.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc new file mode 100644 index 0000000000..75dcea1e4f --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/core-extensions.adoc @@ -0,0 +1,242 @@ +[[core.extensions]] += Spring Data Extensions + +This section documents a set of Spring Data extensions that enable Spring Data usage in a variety of contexts. +Currently, most of the integration is targeted towards Spring MVC. + +include::{commons}@data-commons::page$repositories/core-extensions-querydsl.adoc[leveloffset=1] + +[[mongodb.repositories.queries.type-safe]] +=== Type-safe Query Methods with Querydsl + +MongoDB repository and its reactive counterpart integrates with the http://www.querydsl.com/[Querydsl] project, which provides a way to perform type-safe queries. + +[quote,Querydsl Team] +Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API. + +It provides the following features: + +* Code completion in the IDE (all properties, methods, and operations can be expanded in your favorite Java IDE). +* Almost no syntactically invalid queries allowed (type-safe on all levels). +* Domain types and properties can be referenced safely -- no strings involved! +* Adapts better to refactoring changes in domain types. +* Incremental query definition is easier. + +See the http://www.querydsl.com/static/querydsl/latest/reference/html/[Querydsl documentation] for how to bootstrap your environment for APT-based code generation using Maven or Ant. + +QueryDSL lets you write queries such as the following: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +QPerson person = QPerson.person; +List result = repository.findAll(person.address.zipCode.eq("C0123")); + +Page page = repository.findAll(person.lastname.contains("a"), + PageRequest.of(0, 2, Direction.ASC, "lastname")); +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +QPerson person = QPerson.person; + +Flux result = repository.findAll(person.address.zipCode.eq("C0123")); +---- +====== + +`QPerson` is a class that is generated by the Java annotation processor. +See xref:#mongodb.repositories.queries.type-safe.apt[Setting up Annotation Processing] for how to set up Annotation Processing with your Build System. +It is a `Predicate` that lets you write type-safe queries. +Notice that there are no strings in the query other than the `C0123` value. + +You can use the generated `Predicate` class by using the `QuerydslPredicateExecutor` / `ReactiveQuerydslPredicateExecutor` interface, which the following listing shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +public interface QuerydslPredicateExecutor { + + Optional findOne(Predicate predicate); + + List findAll(Predicate predicate); + + List findAll(Predicate predicate, Sort sort); + + List findAll(Predicate predicate, OrderSpecifier... orders); + + Page findAll(Predicate predicate, Pageable pageable); + + List findAll(OrderSpecifier... orders); + + long count(Predicate predicate); + + boolean exists(Predicate predicate); + + R findBy(Predicate predicate, Function, R> queryFunction); +} +---- + +Reactive:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +interface ReactiveQuerydslPredicateExecutor { + + Mono findOne(Predicate predicate); + + Flux findAll(Predicate predicate); + + Flux findAll(Predicate predicate, Sort sort); + + Flux findAll(Predicate predicate, OrderSpecifier... orders); + + Flux findAll(OrderSpecifier... orders); + + Mono count(Predicate predicate); + + Mono exists(Predicate predicate); + + > P findBy(Predicate predicate, + Function, P> queryFunction); +} +---- +====== + +To use this in your repository implementation, add it to the list of repository interfaces from which your interface inherits, as the following example shows: + +[tabs] +====== +Imperative:: ++ +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +interface PersonRepository extends MongoRepository, QuerydslPredicateExecutor { + + // additional query methods go here +} +---- + +Reactive:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- + +interface PersonRepository extends ReactiveMongoRepository, ReactiveQuerydslPredicateExecutor { + + // additional query methods go here +} +---- + +NOTE: Please note that joins (DBRef's) are not supported with Reactive MongoDB support. +==== +====== + +[[mongodb.repositories.queries.type-safe.apt]] +=== Setting up Annotation Processing + +To use Querydsl with Spring Data MongoDB, you need to set up annotation processing in your build system that generates the `Q` classes. +While you could write the `Q` classes by hand, it is recommended to use the Querydsl annotation processor to generate them for you to keep your `Q` classes in sync with your domain model. + +Spring Data MongoDB ships with an annotation processor javadoc:org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor[] that isn't registered by default. +Typically, annotation processors are registered through Java's service loader via `META-INF/services/javax.annotation.processing.Processor` that also activates these once you have them on the class path. +Most Spring Data users do not use Querydsl, so it does not make sense to require additional mandatory dependencies for projects that would not benefit from Querydsl. +Hence, you need to activate annotation processing in your build system. + +The following example shows how to set up annotation processing by mentioning dependencies and compiler config changes in Maven and Gradle: + +[tabs] +====== +Maven:: ++ +[source,xml,indent=0,subs="verbatim,quotes",role="primary"] +---- + + + org.springframework.data + spring-data-mongodb + + + + com.querydsl + querydsl-mongodb + ${querydslVersion} + + + + + org.mongodb + mongo-java-driver + + + + + + com.querydsl + querydsl-apt + ${querydslVersion} + jakarta + provided + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + + + + + target/generated-test-sources + target/generated-sources + + + + +---- + +Gradle:: ++ +==== +[source,groovy,indent=0,subs="verbatim,quotes",role="secondary"] +---- +dependencies { + implementation 'com.querydsl:querydsl-mongodb:${querydslVersion}' + + annotationProcessor 'com.querydsl:querydsl-apt:${querydslVersion}:jakarta' + annotationProcessor 'org.springframework.data:spring-data-mongodb' + + testAnnotationProcessor 'com.querydsl:querydsl-apt:${querydslVersion}:jakarta' + testAnnotationProcessor 'org.springframework.data:spring-data-mongodb' +} + +tasks.withType(JavaCompile).configureEach { + options.compilerArgs += [ + "-processor", + "org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor"] +} +---- +==== +====== + +Note that the setup above shows the simplest usage omitting any other options or dependencies that your project might require. +This way of configuring annotation processing disables Java's annotation processor scanning because MongoDB requires specifying `-processor` by class name. +If you're using other annotation processors, you need to add them to the list of `-processor`/`annotationProcessors` as well. + +include::{commons}@data-commons::page$repositories/core-extensions-web.adoc[leveloffset=1] + +include::{commons}@data-commons::page$repositories/core-extensions-populators.adoc[leveloffset=1] diff --git a/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc new file mode 100644 index 0000000000..2ae01801b1 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/create-instances.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/create-instances.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc new file mode 100644 index 0000000000..c7615191a6 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/custom-implementations.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/custom-implementations.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/definition.adoc b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc new file mode 100644 index 0000000000..bd65a8af83 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/definition.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/definition.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc new file mode 100644 index 0000000000..081bac9f61 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/null-handling.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/null-handling.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/projections.adoc b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc new file mode 100644 index 0000000000..9302203c56 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/projections.adoc @@ -0,0 +1,2 @@ +[[mongodb.projections]] +include::{commons}@data-commons::page$repositories/projections.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-by-example.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-by-example.adoc new file mode 100644 index 0000000000..6c07119c72 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-by-example.adoc @@ -0,0 +1,25 @@ +include::{commons}@data-commons::query-by-example.adoc[] + +[[query-by-example.running]] +== Running an Example + +The following example shows how to query by example when using a repository (of `Person` objects, in this case): + +.Query by Example using a repository +==== +[source, java] +---- +public interface PersonRepository extends QueryByExampleExecutor { + +} + +public class PersonService { + + @Autowired PersonRepository personRepository; + + public List findPeople(Person probe) { + return personRepository.findAll(Example.of(probe)); + } +} +---- +==== diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc new file mode 100644 index 0000000000..e495eddc6b --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-keywords-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-keywords-reference.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc new file mode 100644 index 0000000000..614da0b059 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-methods-details.adoc @@ -0,0 +1,2 @@ +:feature-scroll: +include::{commons}@data-commons::page$repositories/query-methods-details.adoc[] diff --git a/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc new file mode 100644 index 0000000000..a73c3201d0 --- /dev/null +++ b/src/main/antora/modules/ROOT/pages/repositories/query-return-types-reference.adoc @@ -0,0 +1 @@ +include::{commons}@data-commons::page$repositories/query-return-types-reference.adoc[] diff --git a/src/main/antora/resources/antora-resources/antora.yml b/src/main/antora/resources/antora-resources/antora.yml new file mode 100644 index 0000000000..857dc03fbe --- /dev/null +++ b/src/main/antora/resources/antora-resources/antora.yml @@ -0,0 +1,23 @@ +version: ${antora-component.version} +prerelease: ${antora-component.prerelease} + +asciidoc: + attributes: + version: ${project.version} + copyright-year: ${current.year} + springversionshort: ${spring.short} + springversion: ${spring} + attribute-missing: 'warn' + commons: ${springdata.commons.docs} + include-xml-namespaces: false + spring-data-commons-docs-url: https://docs.spring.io/spring-data/commons/reference + spring-data-commons-javadoc-base: https://docs.spring.io/spring-data/commons/docs/${springdata.commons}/api/ + springdocsurl: https://docs.spring.io/spring-framework/reference/{springversionshort} + springjavadocurl: https://docs.spring.io/spring-framework/docs/${spring}/javadoc-api + spring-framework-docs: '{springdocsurl}' + spring-framework-javadoc: '{springjavadocurl}' + springhateoasversion: ${spring-hateoas} + releasetrainversion: ${releasetrain} + reactor: ${reactor} + mongoversion: ${mongo} + store: Mongo diff --git a/src/main/asciidoc/index.adoc b/src/main/asciidoc/index.adoc deleted file mode 100644 index 4166c6efff..0000000000 --- a/src/main/asciidoc/index.adoc +++ /dev/null @@ -1,49 +0,0 @@ -= Spring Data MongoDB - Reference Documentation -Mark Pollack; Thomas Risberg; Oliver Gierke; Costin Leau; Jon Brisbin; Thomas Darimont; Christoph Strobl; Mark Paluch -:revnumber: {version} -:revdate: {localdate} -:toc: -:toc-placement!: -:spring-data-commons-docs: ../../../../spring-data-commons/src/main/asciidoc - -(C) 2008-2018 The original authors. - -NOTE: _Copies of this document may be made for your own use and for distribution to others, provided that you do not charge any fee for such copies and further provided that each copy contains this Copyright Notice, whether distributed in print or electronically._ - -toc::[] - -include::preface.adoc[] - -:leveloffset: +1 -include::new-features.adoc[] -include::{spring-data-commons-docs}/dependencies.adoc[] -include::{spring-data-commons-docs}/repositories.adoc[] -:leveloffset: -1 - -[[reference]] -= Reference Documentation - -:leveloffset: +1 -include::reference/introduction.adoc[] -include::reference/mongodb.adoc[] -include::reference/reactive-mongodb.adoc[] -include::reference/mongo-repositories.adoc[] -include::reference/reactive-mongo-repositories.adoc[] -include::{spring-data-commons-docs}/auditing.adoc[] -include::reference/mongo-auditing.adoc[] -include::reference/mapping.adoc[] -include::reference/cross-store.adoc[] -include::reference/jmx.adoc[] -include::reference/mongo-3.adoc[] -:leveloffset: -1 - -[[appendix]] -= Appendix - -:numbered!: -:leveloffset: +1 -include::{spring-data-commons-docs}/repository-namespace-reference.adoc[] -include::{spring-data-commons-docs}/repository-populator-namespace-reference.adoc[] -include::{spring-data-commons-docs}/repository-query-keywords-reference.adoc[] -include::{spring-data-commons-docs}/repository-query-return-types-reference.adoc[] -:leveloffset: -1 diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc deleted file mode 100644 index d7628f27a0..0000000000 --- a/src/main/asciidoc/new-features.adoc +++ /dev/null @@ -1,71 +0,0 @@ -[[new-features]] -= New & Noteworthy - -[[new-features.2-1-0]] -== What's new in Spring Data MongoDB 2.1 -* Cursor-based aggregation execution. -* <> for imperative and reactive Template API. -* <>. -* <> for queries and collection creation. -* <> for imperative and reactive drivers. -* Tailable cursors for imperative driver. - -[[new-features.2-0-0]] -== What's new in Spring Data MongoDB 2.0 -* Upgrade to Java 8. -* Usage of the `Document` API instead of `DBObject`. -* <>. -* <> queries. -* Support for aggregation result streaming via Java 8 `Stream`. -* <> for CRUD and aggregation operations. -* Kotlin extensions for Template and Collection API. -* Integration of collations for collection and index creation and query operations. -* Query-by-Example support without type matching. -* Add support for isolation ``Update``s. -* Tooling support for null-safety via Spring's `@NonNullApi` and `@Nullable` annotations. -* Deprecated cross-store support and removed Log4j appender. - -[[new-features.1-10-0]] -== What's new in Spring Data MongoDB 1.10 -* Compatible with MongoDB Server 3.4 and the MongoDB Java Driver 3.4. -* New annotations for `@CountQuery`, `@DeleteQuery` and `@ExistsQuery`. -* Extended support for MongoDB 3.2 and MongoDB 3.4 aggregation operators (see <>). -* Support partial filter expression when creating indexes. -* Publish lifecycle events when loading/converting ``DBRef``s. -* Added any-match mode for Query By Example. -* Support for `$caseSensitive` and `$diacriticSensitive` text search. -* Support for GeoJSON Polygon with hole. -* Performance improvements by bulk fetching ``DBRef``s. -* Multi-faceted aggregations using `$facet`, `$bucket` and `$bucketAuto` via `Aggregation`. - -[[new-features.1-9-0]] -== What's new in Spring Data MongoDB 1.9 -* The following annotations have been enabled to build own, composed annotations: `@Document`, `@Id`, `@Field`, `@Indexed`, `@CompoundIndex`, `@GeoSpatialIndexed`, `@TextIndexed`, `@Query`, `@Meta`. -* Support for <> in repository query methods. -* Support for <>. -* Out-of-the-box support for `java.util.Currency` in object mapping. -* Add support for the bulk operations introduced in MongoDB 2.6. -* Upgrade to Querydsl 4. -* Assert compatibility with MongoDB 3.0 and MongoDB Java Driver 3.2 (see: <>). - -[[new-features.1-8-0]] -== What's new in Spring Data MongoDB 1.8 - -* `Criteria` offers support for creating `$geoIntersects`. -* Support http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#expressions[SpEL expressions] in `@Query`. -* `MongoMappingEvents` expose the collection name they are issued for. -* Improved support for ``. -* Improved index creation failure error message. - -[[new-features.1-7-0]] -== What's new in Spring Data MongoDB 1.7 - -* Assert compatibility with MongoDB 3.0 and MongoDB Java Driver 3-beta3 (see: <>). -* Support JSR-310 and ThreeTen back-port date/time types. -* Allow `Stream` as query method return type (see: <>). -* Added http://geojson.org/[GeoJSON] support in both domain types and queries (see: <>). -* `QueryDslPredicateExcecutor` now supports `findAll(OrderSpecifier… orders)`. -* Support calling JavaScript functions via <>. -* Improve support for `CONTAINS` keyword on collection like properties. -* Support for `$bit`, `$mul` and `$position` operators to `Update`. - diff --git a/src/main/asciidoc/preface.adoc b/src/main/asciidoc/preface.adoc deleted file mode 100644 index fd7c8ef111..0000000000 --- a/src/main/asciidoc/preface.adoc +++ /dev/null @@ -1,59 +0,0 @@ -[[preface]] -= Preface - -The Spring Data MongoDB project applies core Spring concepts to the development of solutions using the MongoDB document style data store. We provide a "template" as a high-level abstraction for storing and querying documents. You will notice similarities to the JDBC support in the Spring Framework. - -This document is the reference guide for Spring Data - Document Support. It explains Document module concepts and semantics and the syntax for various store namespaces. - -This section provides some basic introduction to Spring and Document databases. The rest of the document refers only to Spring Data MongoDB features and assumes the user is familiar with MongoDB and Spring concepts. - -[[get-started:first-steps:spring]] -== Knowing Spring -Spring Data uses Spring framework's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html[core] functionality, such as the http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#beans[IoC] container, http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#validation[type conversion system], http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#expressions[expression language], http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/integration.html#jmx[JMX integration], and portable http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html#dao-exceptions[DAO exception hierarchy]. While it is not important to know the Spring APIs, understanding the concepts behind them is. At a minimum, the idea behind IoC should be familiar for whatever IoC container you choose to use. - -The core functionality of the MongoDB support can be used directly, with no need to invoke the IoC services of the Spring Container. This is much like `JdbcTemplate` which can be used 'standalone' without any other services of the Spring container. To leverage all the features of Spring Data MongoDB, such as the repository support, you will need to configure some parts of the library using Spring. - -To learn more about Spring, you can refer to the comprehensive (and sometimes disarming) documentation that explains in detail the Spring Framework. There are a lot of articles, blog entries and books on the matter - take a look at the Spring framework http://spring.io/docs[home page] for more information. - -[[get-started:first-steps:nosql]] -== Knowing NoSQL and Document databases -NoSQL stores have taken the storage world by storm. It is a vast domain with a plethora of solutions, terms and patterns (to make things worse even the term itself has multiple http://www.google.com/search?q=nosoql+acronym[meanings]). While some of the principles are common, it is crucial that the user is familiar to some degree with MongoDB. The best way to get acquainted to this solutions is to read their documentation and follow their examples - it usually doesn't take more then 5-10 minutes to go through them and if you are coming from an RDMBS-only background many times these exercises can be an eye opener. - -The jumping off ground for learning about MongoDB is http://www.mongodb.org/[www.mongodb.org]. Here is a list of other useful resources: - -* The http://docs.mongodb.org/manual/[manual] introduces MongoDB and contains links to getting started guides, reference documentation and tutorials. -* The http://try.mongodb.org/[online shell] provides a convenient way to interact with a MongoDB instance in combination with the online http://docs.mongodb.org/manual/tutorial/getting-started/[tutorial.] -* MongoDB http://docs.mongodb.org/ecosystem/drivers/java/[Java Language Center] -* Several http://www.mongodb.org/books[books] available for purchase -* Karl Seguin's online book: http://openmymind.net/mongodb.pdf[The Little MongoDB Book] - -[[requirements]] -== Requirements - -Spring Data MongoDB 1.x binaries requires JDK level 6.0 and above, and http://spring.io/docs[Spring Framework] {springVersion} and above. - -In terms of document stores, http://www.mongodb.org/[MongoDB] at least 2.6. - -== Additional Help Resources - -Learning a new framework is not always straight forward. In this section, we try to provide what we think is an easy to follow guide for starting with Spring Data MongoDB module. However, if you encounter issues or you are just looking for an advice, feel free to use one of the links below: - -[[get-started:help]] -=== Support - -There are a few support options available: - -[[get-started:help:community]] -==== Community Forum - -Spring Data on Stackoverflow http://stackoverflow.com/questions/tagged/spring-data[Stackoverflow] is a tag for all Spring Data (not just Document) users to share information and help each other. Note that registration is needed *only* for posting. - -[[get-started:help:professional]] -==== Professional Support - -Professional, from-the-source support, with guaranteed response time, is available from http://pivotal.io/[Pivotal Sofware, Inc.], the company behind Spring Data and Spring. - -[[get-started:up-to-date]] -=== Following Development - -For information on the Spring Data Mongo source code repository, nightly builds and snapshot artifacts please see the http://projects.spring.io/spring-data-mongodb/[Spring Data Mongo homepage]. You can help make Spring Data best serve the needs of the Spring community by interacting with developers through the Community on http://stackoverflow.com/questions/tagged/spring-data[Stackoverflow]. To follow developer activity look for the mailing list information on the Spring Data Mongo homepage. If you encounter a bug or want to suggest an improvement, please create a ticket on the Spring Data issue https://jira.spring.io/browse/DATAMONGO[tracker]. To stay up to date with the latest news and announcements in the Spring eco system, subscribe to the Spring Community http://spring.io[Portal]. Lastly, you can follow the Spring http://spring.io/blog[blog ]or the project team on Twitter (http://twitter.com/SpringData[SpringData]). diff --git a/src/main/asciidoc/reference/cross-store.adoc b/src/main/asciidoc/reference/cross-store.adoc deleted file mode 100644 index 763837d627..0000000000 --- a/src/main/asciidoc/reference/cross-store.adoc +++ /dev/null @@ -1,252 +0,0 @@ -[[mongo.cross.store]] -= Cross Store support - -WARNING: Deprecated - will be removed without replacement. - -Sometimes you need to store data in multiple data stores and these data stores can be of different types. One might be relational while the other a document store. For this use case we have created a separate module in the MongoDB support that handles what we call cross-store support. The current implementation is based on JPA as the driver for the relational database and we allow select fields in the Entities to be stored in a Mongo database. In addition to allowing you to store your data in two stores we also coordinate persistence operations for the non-transactional MongoDB store with the transaction life-cycle for the relational database. - -[[mongodb_cross-store-configuration]] -== Cross Store Configuration - -Assuming that you have a working JPA application and would like to add some cross-store persistence for MongoDB. What do you have to add to your configuration? - -First of all you need to add a dependency on the module. Using Maven this is done by adding a dependency to your pom: - -.Example Maven pom.xml with spring-data-mongodb-cross-store dependency -==== -[source,xml] ----- - - 4.0.0 - - ... - - - - org.springframework.data - spring-data-mongodb-cross-store - ${spring.data.mongo.version} - - - ... - - ----- -==== - -Once this is done we need to enable AspectJ for the project. The cross-store support is implemented using AspectJ aspects so by enabling compile time AspectJ support the cross-store features will become available to your project. In Maven you would add an additional plugin to the section of the pom: - -.Example Maven pom.xml with AspectJ plugin enabled -==== -[source,xml] ----- - - 4.0.0 - - ... - - - - - … - - - org.codehaus.mojo - aspectj-maven-plugin - 1.0 - - - - org.aspectj - aspectjrt - ${aspectj.version} - - - org.aspectj - aspectjtools - ${aspectj.version} - - - - - - compile - test-compile - - - - - true - - - org.springframework - spring-aspects - - - org.springframework.data - spring-data-mongodb-cross-store - - - 1.6 - 1.6 - - - - ... - - - - -... - - ----- -==== - -Finally, you need to configure your project to use MongoDB and also configure the aspects that are used. The following XML snippet should be added to your application context: - -.Example application context with MongoDB and cross-store aspect support -==== -[source,xml] ----- - - - - ... - - - - - - - - - - - - - - - - - - - - - - ... - - ----- -==== - -[[mongodb_cross-store-application]] -== Writing the Cross Store Application - -We are assuming that you have a working JPA application so we will only cover the additional steps needed to persist part of your Entity in your Mongo database. First you need to identify the field you want persisted. It should be a domain class and follow the general rules for the Mongo mapping support covered in previous chapters. The field you want persisted in MongoDB should be annotated using the `@RelatedDocument` annotation. That is really all you need to do!. The cross-store aspects take care of the rest. This includes marking the field with `@Transient` so it won't be persisted using JPA, keeping track of any changes made to the field value and writing them to the database on successful transaction completion, loading the document from MongoDB the first time the value is used in your application. Here is an example of a simple Entity that has a field annotated with `@RelatedDocument`. - -.Example of Entity with @RelatedDocument -==== -[source,java] ----- -@Entity -public class Customer { - - @Id - @GeneratedValue(strategy = GenerationType.IDENTITY) - private Long id; - - private String firstName; - - private String lastName; - - @RelatedDocument - private SurveyInfo surveyInfo; - - // getters and setters omitted -} ----- -==== - -.Example of domain class to be stored as document -==== -[source,java] ----- -public class SurveyInfo { - - private Map questionsAndAnswers; - - public SurveyInfo() { - this.questionsAndAnswers = new HashMap(); - } - - public SurveyInfo(Map questionsAndAnswers) { - this.questionsAndAnswers = questionsAndAnswers; - } - - public Map getQuestionsAndAnswers() { - return questionsAndAnswers; - } - - public void setQuestionsAndAnswers(Map questionsAndAnswers) { - this.questionsAndAnswers = questionsAndAnswers; - } - - public SurveyInfo addQuestionAndAnswer(String question, String answer) { - this.questionsAndAnswers.put(question, answer); - return this; - } -} ----- -==== - -Once the SurveyInfo has been set on the Customer object above the MongoTemplate that was configured above is used to save the SurveyInfo along with some metadata about the JPA Entity is stored in a MongoDB collection named after the fully qualified name of the JPA Entity class. The following code: - -.Example of code using the JPA Entity configured for cross-store persistence -==== -[source,java] ----- -Customer customer = new Customer(); -customer.setFirstName("Sven"); -customer.setLastName("Olafsen"); -SurveyInfo surveyInfo = new SurveyInfo() - .addQuestionAndAnswer("age", "22") - .addQuestionAndAnswer("married", "Yes") - .addQuestionAndAnswer("citizenship", "Norwegian"); -customer.setSurveyInfo(surveyInfo); -customerRepository.save(customer); ----- -==== - -Executing the code above results in the following JSON document stored in MongoDB. - -.Example of JSON document stored in MongoDB -==== -[source,javascript] ----- -{ "_id" : ObjectId( "4d9e8b6e3c55287f87d4b79e" ), - "_entity_id" : 1, - "_entity_class" : "org.springframework.data.mongodb.examples.custsvc.domain.Customer", - "_entity_field_name" : "surveyInfo", - "questionsAndAnswers" : { "married" : "Yes", - "age" : "22", - "citizenship" : "Norwegian" }, - "_entity_field_class" : "org.springframework.data.mongodb.examples.custsvc.domain.SurveyInfo" } ----- -==== diff --git a/src/main/asciidoc/reference/introduction.adoc b/src/main/asciidoc/reference/introduction.adoc deleted file mode 100644 index ff14c7a69c..0000000000 --- a/src/main/asciidoc/reference/introduction.adoc +++ /dev/null @@ -1,11 +0,0 @@ -[[introduction]] -= Introduction - -== Document Structure - -This part of the reference documentation explains the core functionality offered by Spring Data MongoDB. - -<> introduces the MongoDB module feature set. - -<> introduces the repository support for MongoDB. - diff --git a/src/main/asciidoc/reference/jmx.adoc b/src/main/asciidoc/reference/jmx.adoc deleted file mode 100644 index 474319c6aa..0000000000 --- a/src/main/asciidoc/reference/jmx.adoc +++ /dev/null @@ -1,64 +0,0 @@ -[[mongo.jmx]] -= JMX support - -The JMX support for MongoDB exposes the results of executing the 'serverStatus' command on the admin database for a single MongoDB server instance. It also exposes an administrative MBean, MongoAdmin which will let you perform administrative operations such as drop or create a database. The JMX features build upon the JMX feature set available in the Spring Framework. See http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/integration.html#jmx[here ] for more details. - -[[mongodb:jmx-configuration]] -== MongoDB JMX Configuration - -Spring's Mongo namespace enables you to easily enable JMX functionality - -.XML schema to configure MongoDB -==== -[source,xml] ----- - - - - - - - - - - - - - - - - - - - - ----- -==== - -This will expose several MBeans - -* AssertMetrics -* BackgroundFlushingMetrics -* BtreeIndexCounters -* ConnectionMetrics -* GlobalLoclMetrics -* MemoryMetrics -* OperationCounters -* ServerInfo -* MongoAdmin - -This is shown below in a screenshot from JConsole - -image::jconsole.png[] \ No newline at end of file diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc deleted file mode 100644 index f799f1e971..0000000000 --- a/src/main/asciidoc/reference/mapping.adoc +++ /dev/null @@ -1,638 +0,0 @@ -[[mapping-chapter]] -= Mapping - -Rich mapping support is provided by the `MappingMongoConverter`. `MappingMongoConverter` has a rich metadata model that provides a full feature set of functionality to map domain objects to MongoDB documents.The mapping metadata model is populated using annotations on your domain objects. However, the infrastructure is not limited to using annotations as the only source of metadata information. The `MappingMongoConverter` also allows you to map objects to documents without providing any additional metadata, by following a set of conventions. - -In this section we will describe the features of the `MappingMongoConverter`. How to use conventions for mapping objects to documents and how to override those conventions with annotation based mapping metadata. - -[[mapping-conventions]] -== Convention based Mapping - -`MappingMongoConverter` has a few conventions for mapping objects to documents when no additional mapping metadata is provided. The conventions are: - -* The short Java class name is mapped to the collection name in the following manner. The class `com.bigbank.SavingsAccount` maps to `savingsAccount` collection name. -* All nested objects are stored as nested objects in the document and *not* as DBRefs -* The converter will use any Spring Converters registered with it to override the default mapping of object properties to document field/values. -* The fields of an object are used to convert to and from fields in the document. Public JavaBean properties are not used. -* You can have a single non-zero argument constructor whose constructor argument names match top level field names of document, that constructor will be used. Otherwise the zero arg constructor will be used. if there is more than one non-zero argument constructor an exception will be thrown. - -[[mapping.conventions.id-field]] -=== How the `_id` field is handled in the mapping layer - -MongoDB requires that you have an `_id` field for all documents. If you don't provide one the driver will assign a ObjectId with a generated value. The "_id" field can be of any type the, other than arrays, so long as it is unique. The driver naturally supports all primitive types and Dates. When using the `MappingMongoConverter` there are certain rules that govern how properties from the Java class is mapped to this `_id` field. - -The following outlines what field will be mapped to the `_id` document field: - -* A field annotated with `@Id` (`org.springframework.data.annotation.Id`) will be mapped to the `_id` field. -* A field without an annotation but named `id` will be mapped to the `_id` field. -* The default field name for identifiers is `_id` and can be customized via the `@Field` annotation. - -[cols="1,2", options="header"] -.Examples for the translation of `_id` field definitions -|=== -| Field definition -| Resulting Id-Fieldname in MongoDB - -| `String` id -| `_id` - -| `@Field` `String` id -| `_id` - -| `@Field("x")` `String` id -| `x` - -| `@Id` `String` x -| `_id` - -| `@Field("x")` `@Id` `String` x -| `_id` -|=== - -The following outlines what type conversion, if any, will be done on the property mapped to the _id document field. - -* If a field named `id` is declared as a String or BigInteger in the Java class it will be converted to and stored as an ObjectId if possible. ObjectId as a field type is also valid. If you specify a value for `id` in your application, the conversion to an ObjectId is detected to the MongoDBdriver. If the specified `id` value cannot be converted to an ObjectId, then the value will be stored as is in the document's _id field. -* If a field named `id` id field is not declared as a String, BigInteger, or ObjectID in the Java class then you should assign it a value in your application so it can be stored 'as-is' in the document's _id field. -* If no field named `id` is present in the Java class then an implicit `_id` file will be generated by the driver but not mapped to a property or field of the Java class. - -When querying and updating `MongoTemplate` will use the converter to handle conversions of the `Query` and `Update` objects that correspond to the above rules for saving documents so field names and types used in your queries will be able to match what is in your domain classes. - -[[mapping-conversion]] -== Data mapping and type conversion - -This section explain how types are mapped to a MongoDB representation and vice versa. Spring Data MongoDB supports all types that can be represented as BSON, MongoDB's internal document format. -In addition to these types, Spring Data MongoDB provides a set of built-in converters to map additional types. You can provide your own converters to adjust type conversion, see <> for further details. - -[cols="3,1,6", options="header"] -.Type -|=== -| Type -| Type conversion -| Sample - -| `String` -| native -| `{"firstname" : "Dave"}` - -| `double`, `Double`, `float`, `Float` -| native -| `{"weight" : 42.5}` - -| `int`, `Integer`, `short`, `Short` -| native + -32-bit integer -| `{"height" : 42}` - -| `long`, `Long` -| native + -64-bit integer -| `{"height" : 42}` - -| `Date`, `Timestamp` -| native -| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` - -| `byte[]` -| native -| `{"bin" : { "$binary" : "AQIDBA==", "$type" : "00" }}` - -| `java.util.UUID` (Legacy UUID) -| native -| `{"uuid" : { "$binary" : "MEaf1CFQ6lSphaa3b9AtlA==", "$type" : "03" }}` - -| `Date` -| native -| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` - -| `ObjectId` -| native -| `{"_id" : ObjectId("5707a2690364aba3136ab870")}` - -| Array, `List`, `BasicDBList` -| native -| `{"cookies" : [ … ]}` - -| `boolean`, `Boolean` -| native -| `{"active" : true}` - -| `null` -| native -| `{"value" : null}` - -| `Document` -| native -| `{"value" : { … }}` - -| `Decimal128` -| native -| `{"value" : NumberDecimal(…)}` - -| `AtomicInteger` + -calling `get()` before the actual conversion -| converter + -32-bit integer -| `{"value" : "741" }` - -| `AtomicLong` + -calling `get()` before the actual conversion -| converter + -64-bit integer -| `{"value" : "741" }` - -| `BigInteger` -| converter + -`String` -| `{"value" : "741" }` - -| `BigDecimal` -| converter + -`String` -| `{"value" : "741.99" }` - -| `URL` -| converter -| `{"website" : "http://projects.spring.io/spring-data-mongodb/" }` - -| `Locale` -| converter -| `{"locale : "en_US" }` - -| `char`, `Character` -| converter -| `{"char" : "a" }` - -| `NamedMongoScript` -| converter + -`Code` -| `{"_id" : "script name", value: (some javascript code)`} - -| `java.util.Currency` -| converter -| `{"currencyCode" : "EUR"}` - -| `LocalDate` + -(Joda, Java 8, JSR310-BackPort) -| converter -| `{"date" : ISODate("2019-11-12T00:00:00.000Z")}` - -| `LocalDateTime`, `LocalTime`, `Instant` + -(Joda, Java 8, JSR310-BackPort) -| converter -| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` - -| `DateTime` (Joda) -| converter -| `{"date" : ISODate("2019-11-12T23:00:00.809Z")}` - -| `ZoneId` (Java 8, JSR310-BackPort) -| converter -| `{"zoneId" : "ECT - Europe/Paris"}` - -| `Box` -| converter -| `{"box" : { "first" : { "x" : 1.0 , "y" : 2.0} , "second" : { "x" : 3.0 , "y" : 4.0}}` - -| `Polygon` -| converter -| `{"polygon" : { "points" : [ { "x" : 1.0 , "y" : 2.0} , { "x" : 3.0 , "y" : 4.0} , { "x" : 4.0 , "y" : 5.0}]}}` - -| `Circle` -| converter -| `{"circle" : { "center" : { "x" : 1.0 , "y" : 2.0} , "radius" : 3.0 , "metric" : "NEUTRAL"}}` - -| `Point` -| converter -| `{"point" : { "x" : 1.0 , "y" : 2.0}}` - -| `GeoJsonPoint` -| converter -| `{"point" : { "type" : "Point" , "coordinates" : [3.0 , 4.0] }}` - -| `GeoJsonMultiPoint` -| converter -| `{"geoJsonLineString" : {"type":"MultiPoint", "coordinates": [ [ 0 , 0 ], [ 0 , 1 ], [ 1 , 1 ] ] }}` - -| `Sphere` -| converter -| `{"sphere" : { "center" : { "x" : 1.0 , "y" : 2.0} , "radius" : 3.0 , "metric" : "NEUTRAL"}}` - -| `GeoJsonPolygon` -| converter -| `{"polygon" : { "type" : "Polygon", "coordinates" : [[ [ 0 , 0 ], [ 3 , 6 ], [ 6 , 1 ], [ 0 , 0 ] ]] }}` - -| `GeoJsonMultiPolygon` -| converter -| `{"geoJsonMultiPolygon" : { "type" : "MultiPolygon", "coordinates" : [ - [ [ [ -73.958 , 40.8003 ] , [ -73.9498 , 40.7968 ] ] ], - [ [ [ -73.973 , 40.7648 ] , [ -73.9588 , 40.8003 ] ] ] - ] }}` - -| `GeoJsonLineString` -| converter -| `{ "geoJsonLineString" : { "type" : "LineString", "coordinates" : [ [ 40 , 5 ], [ 41 , 6 ] ] }}` - -| `GeoJsonMultiLineString` -| converter -| `{"geoJsonLineString" : { "type" : "MultiLineString", coordinates: [ - [ [ -73.97162 , 40.78205 ], [ -73.96374 , 40.77715 ] ], - [ [ -73.97880 , 40.77247 ], [ -73.97036 , 40.76811 ] ] - ] }}` -|=== - - -[[mapping-configuration]] -== Mapping Configuration - -Unless explicitly configured, an instance of `MappingMongoConverter` is created by default when creating a `MongoTemplate`. You can create your own instance of the `MappingMongoConverter` so as to tell it where to scan the classpath at startup your domain classes in order to extract metadata and construct indexes. Also, by creating your own instance you can register Spring converters to use for mapping specific classes to and from the database. - -You can configure the `MappingMongoConverter` as well as `com.mongodb.MongoClient` and MongoTemplate either using Java or XML based metadata. Here is an example using Spring's Java based configuration - -.@Configuration class to configure MongoDB mapping support -==== -[source,java] ----- -@Configuration -public class GeoSpatialAppConfig extends AbstractMongoConfiguration { - - @Bean - public MongoClient mongoClient() { - return new MongoClient("localhost"); - } - - @Override - public String getDatabaseName() { - return "database"; - } - - @Override - public String getMappingBasePackage() { - return "com.bigbank.domain"; - } - - // the following are optional - - - @Bean - @Override - public CustomConversions customConversions() throws Exception { - List> converterList = new ArrayList>(); - converterList.add(new org.springframework.data.mongodb.test.PersonReadConverter()); - converterList.add(new org.springframework.data.mongodb.test.PersonWriteConverter()); - return new CustomConversions(converterList); - } - - @Bean - public LoggingEventListener mappingEventsListener() { - return new LoggingEventListener(); - } -} ----- -==== - -`AbstractMongoConfiguration` requires you to implement methods that define a `com.mongodb.MongoClient` as well as provide a database name. `AbstractMongoConfiguration` also has a method you can override named `getMappingBasePackage(…)` which tells the converter where to scan for classes annotated with the `@Document` annotation. - -You can add additional converters to the converter by overriding the method afterMappingMongoConverterCreation. Also shown in the above example is a `LoggingEventListener` which logs `MongoMappingEvent` s that are posted onto Spring's `ApplicationContextEvent` infrastructure. - -NOTE: AbstractMongoConfiguration will create a MongoTemplate instance and registered with the container under the name `mongoTemplate`. - -You can also override the method `UserCredentials getUserCredentials()` to provide the username and password information to connect to the database. - -Spring's MongoDB namespace enables you to easily enable mapping functionality in XML - -.XML schema to configure MongoDB mapping support -==== -[source,xml] ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ----- -==== - -The `base-package` property tells it where to scan for classes annotated with the `@org.springframework.data.mongodb.core.mapping.Document` annotation. - -[[mapping-usage]] -== Metadata based Mapping - -To take full advantage of the object mapping functionality inside the Spring Data/MongoDB support, you should annotate your mapped objects with the `@Document` annotation. Although it is not necessary for the mapping framework to have this annotation (your POJOs will be mapped correctly, even without any annotations), it allows the classpath scanner to find and pre-process your domain objects to extract the necessary metadata. If you don't use this annotation, your application will take a slight performance hit the first time you store a domain object because the mapping framework needs to build up its internal metadata model so it knows about the properties of your domain object and how to persist them. - -.Example domain object -==== -[source,java] ----- -package com.mycompany.domain; - -@Document -public class Person { - - @Id - private ObjectId id; - - @Indexed - private Integer ssn; - - private String firstName; - - @Indexed - private String lastName; -} ----- -==== - -IMPORTANT: The `@Id` annotation tells the mapper which property you want to use for the MongoDB `_id` property and the `@Indexed` annotation tells the mapping framework to call `createIndex(…)` on that property of your document, making searches faster. - -IMPORTANT: Automatic index creation is only done for types annotated with `@Document`. - -[[mapping-usage-annotations]] -=== Mapping annotation overview - -The MappingMongoConverter can use metadata to drive the mapping of objects to documents. An overview of the annotations is provided below - -* `@Id` - applied at the field level to mark the field used for identity purpose. -* `@Document` - applied at the class level to indicate this class is a candidate for mapping to the database. You can specify the name of the collection where the database will be stored. -* `@DBRef` - applied at the field to indicate it is to be stored using a com.mongodb.DBRef. -* `@Indexed` - applied at the field level to describe how to index the field. -* `@CompoundIndex` - applied at the type level to declare Compound Indexes -* `@GeoSpatialIndexed` - applied at the field level to describe how to geoindex the field. -* `@TextIndexed` - applied at the field level to mark the field to be included in the text index. -* `@Language` - applied at the field level to set the language override property for text index. -* `@Transient` - by default all private fields are mapped to the document, this annotation excludes the field where it is applied from being stored in the database -* `@PersistenceConstructor` - marks a given constructor - even a package protected one - to use when instantiating the object from the database. Constructor arguments are mapped by name to the key values in the retrieved Document. -* `@Value` - this annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document. -* `@Field` - applied at the field level and described the name of the field as it will be represented in the MongoDB BSON document thus allowing the name to be different than the fieldname of the class. -* `@Version` - applied at field level is used for optimistic locking and checked for modification on save operations. The initial value is `zero` which is bumped automatically on every update. - -The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic. Specific subclasses are using in the MongoDB support to support annotation based metadata. Other strategies are also possible to put in place if there is demand. - -Here is an example of a more complex mapping. - -[source,java] ----- -@Document -@CompoundIndexes({ - @CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") -}) -public class Person { - - @Id - private String id; - - @Indexed(unique = true) - private Integer ssn; - - @Field("fName") - private String firstName; - - @Indexed - private String lastName; - - private Integer age; - - @Transient - private Integer accountTotal; - - @DBRef - private List accounts; - - private T address; - - - public Person(Integer ssn) { - this.ssn = ssn; - } - - @PersistenceConstructor - public Person(Integer ssn, String firstName, String lastName, Integer age, T address) { - this.ssn = ssn; - this.firstName = firstName; - this.lastName = lastName; - this.age = age; - this.address = address; - } - - public String getId() { - return id; - } - - // no setter for Id. (getter is only exposed for some unit testing) - - public Integer getSsn() { - return ssn; - } - -// other getters/setters omitted ----- - -[[mapping-custom-object-construction]] -=== Customized Object Construction - -The mapping subsystem allows the customization of the object construction by annotating a constructor with the `@PersistenceConstructor` annotation. The values to be used for the constructor parameters are resolved in the following way: - -* If a parameter is annotated with the `@Value` annotation, the given expression is evaluated and the result is used as the parameter value. -* If the Java type has a property whose name matches the given field of the input document, then it's property information is used to select the appropriate constructor parameter to pass the input field value to. This works only if the parameter name information is present in the java `.class` files which can be achieved by compiling the source with debug information or using the new `-parameters` command-line switch for javac in Java 8. -* Otherwise a `MappingException` will be thrown indicating that the given constructor parameter could not be bound. - -[source,java] ----- -class OrderItem { - - private @Id String id; - private int quantity; - private double unitPrice; - - OrderItem(String id, @Value("#root.qty ?: 0") int quantity, double unitPrice) { - this.id = id; - this.quantity = quantity; - this.unitPrice = unitPrice; - } - - // getters/setters ommitted -} - -Document input = new Document("id", "4711"); -input.put("unitPrice", 2.5); -input.put("qty",5); -OrderItem item = converter.read(OrderItem.class, input); ----- - -NOTE: The SpEL expression in the `@Value` annotation of the `quantity` parameter falls back to the value `0` if the given property path cannot be resolved. - -Additional examples for using the `@PersistenceConstructor` annotation can be found in the https://github.com/spring-projects/spring-data-mongodb/blob/master/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java[MappingMongoConverterUnitTests] test suite. - -[[mapping-usage-indexes.compound-index]] -=== Compound Indexes - -Compound indexes are also supported. They are defined at the class level, rather than on individual properties. - -NOTE: Compound indexes are very important to improve the performance of queries that involve criteria on multiple fields - -Here's an example that creates a compound index of `lastName` in ascending order and `age` in descending order: - -.Example Compound Index Usage -==== -[source,java] ----- -package com.mycompany.domain; - -@Document -@CompoundIndexes({ - @CompoundIndex(name = "age_idx", def = "{'lastName': 1, 'age': -1}") -}) -public class Person { - - @Id - private ObjectId id; - private Integer age; - private String firstName; - private String lastName; - -} ----- -==== - -[[mapping-usage-indexes.text-index]] -=== Text Indexes - -NOTE: The text index feature is disabled by default for mongodb v.2.4. - -Creating a text index allows accumulating several fields into a searchable full text index. It is only possible to have one text index per collection so all fields marked with `@TextIndexed` are combined into this index. Properties can be weighted to influence document score for ranking results. The default language for the text index is english, to change the default language set `@Document(language="spanish")` to any language you want. Using a property called `language` or `@Language` allows to define a language override on a per document base. - -.Example Text Index Usage -==== -[source,java] ----- -@Document(language = "spanish") -class SomeEntity { - - @TextIndexed String foo; - - @Language String lang; - - Nested nested; -} - -class Nested { - - @TextIndexed(weight=5) String bar; - String roo; -} ----- -==== - -[[mapping-usage-references]] -=== Using DBRefs - -The mapping framework doesn't have to store child objects embedded within the document. You can also store them separately and use a DBRef to refer to that document. When the object is loaded from MongoDB, those references will be eagerly resolved and you will get back a mapped object that looks the same as if it had been stored embedded within your master document. - -Here's an example of using a DBRef to refer to a specific document that exists independently of the object in which it is referenced (both classes are shown in-line for brevity's sake): - -==== -[source,java] ----- -@Document -public class Account { - - @Id - private ObjectId id; - private Float total; -} - -@Document -public class Person { - - @Id - private ObjectId id; - @Indexed - private Integer ssn; - @DBRef - private List accounts; -} ----- -==== - -There's no need to use something like `@OneToMany` because the mapping framework sees that you want a one-to-many relationship because there is a List of objects. When the object is stored in MongoDB, there will be a list of DBRefs rather than the `Account` objects themselves. - -IMPORTANT: The mapping framework does not handle cascading saves. If you change an `Account` object that is referenced by a `Person` object, you must save the Account object separately. Calling `save` on the `Person` object will not automatically save the `Account` objects in the property `accounts`. - -[[mapping-usage-events]] -=== Mapping Framework Events - -Events are fired throughout the lifecycle of the mapping process. This is described in the <> section. - -Simply declaring these beans in your Spring ApplicationContext will cause them to be invoked whenever the event is dispatched. - -[[mapping-explicit-converters]] -=== Overriding Mapping with explicit Converters - -When storing and querying your objects it is convenient to have a `MongoConverter` instance handle the mapping of all Java types to Documents. However, sometimes you may want the `MongoConverter` s do most of the work but allow you to selectively handle the conversion for a particular type or to optimize performance. - -To selectively handle the conversion yourself, register one or more one or more `org.springframework.core.convert.converter.Converter` instances with the MongoConverter. - -NOTE: Spring 3.0 introduced a core.convert package that provides a general type conversion system. This is described in detail in the Spring reference documentation section entitled http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#validation[Spring Type Conversion]. - -The method `customConversions` in `AbstractMongoConfiguration` can be used to configure Converters. The examples <> at the beginning of this chapter show how to perform the configuration using Java and XML. - -Below is an example of a Spring Converter implementation that converts from a Document to a Person POJO. - -[source,java] ----- -@ReadingConverter - public class PersonReadConverter implements Converter { - - public Person convert(Document source) { - Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); - p.setAge((Integer) source.get("age")); - return p; - } -} ----- - -Here is an example that converts from a Person to a Document. - -[source,java] ----- -@WritingConverter -public class PersonWriteConverter implements Converter { - - public Document convert(Person source) { - Document document = new Document(); - document.put("_id", source.getId()); - document.put("name", source.getFirstName()); - document.put("age", source.getAge()); - return document; - } -} ----- diff --git a/src/main/asciidoc/reference/mongo-3.adoc b/src/main/asciidoc/reference/mongo-3.adoc deleted file mode 100644 index 461f0365ba..0000000000 --- a/src/main/asciidoc/reference/mongo-3.adoc +++ /dev/null @@ -1,127 +0,0 @@ -[[mongo.mongo-3]] -= MongoDB 3.0 Support - -Spring Data MongoDB allows usage of both MongoDB Java driver generations 2 and 3 when connecting to a MongoDB 2.6/3.0 server running _MMap.v1_ or a MongoDB server 3.0 using _MMap.v1_ or the _WiredTiger_ storage engine. - -NOTE: Please refer to the driver and database specific documentation for major differences between those. - -NOTE: Operations that are no longer valid using a 3.x MongoDB Java driver have been deprecated within Spring Data and will be removed in a subsequent release. - -== Using Spring Data MongoDB with MongoDB 3.0 - -[[mongo.mongo-3.configuration]] -=== Configuration Options - -Some of the configuration options have been changed / removed for the _mongo-java-driver_. The following options will be ignored using the generation 3 driver: - - * autoConnectRetry - * maxAutoConnectRetryTime - * slaveOk - -Generally it is recommended to use the `` and `` elements instead of `` when doing XML based configuration, since those elements will only provide you with attributes valid for the 3 generation java driver. - -[source,xml] ----- - - - - - - - - ----- - -[[mongo.mongo-3.write-concern]] -=== WriteConcern and WriteConcernChecking - -The `WriteConcern.NONE`, which had been used as default by Spring Data MongoDB, was removed in 3.0. Therefore in a MongoDB 3 environment the `WriteConcern` will be defaulted to `WriteConcern.UNACKNOWLEGED`. In case `WriteResultChecking.EXCEPTION` is enabled the `WriteConcern` will be altered to `WriteConcern.ACKNOWLEDGED` for write operations, as otherwise errors during execution would not be throw correctly, since simply not raised by the driver. - -[[mongo.mongo-3.authentication]] -=== Authentication - -MongoDB Server generation 3 changed the authentication model when connecting to the DB. Therefore some of the configuration options available for authentication are no longer valid. Please use the `MongoClient` specific options for setting credentials via `MongoCredential` to provide authentication data. - -[source,java] ----- -@Configuration -public class ApplicationContextEventTestsAppConfig extends AbstractMongoConfiguration { - - @Override - public String getDatabaseName() { - return "database"; - } - - @Override - @Bean - public MongoClient mongoClient() { - return new MongoClient(singletonList(new ServerAddress("127.0.0.1", 27017)), - singletonList(MongoCredential.createCredential("name", "db", "pwd".toCharArray()))); - } -} ----- - -In order to use authentication with XML configuration use the `credentials` attribue on ``. - -[source,xml] ----- - - - - - - ----- - -[[mongo.mongo-3.validation]] -=== Server-side Validation - -MongoDB supports https://docs.mongodb.com/manual/core/schema-validation/[Schema Validation] as of version 3.2 with query operators -and as of version 3.6 JSON-schema based validation. - -This chapter will point out the specialties for validation in MongoDB and how to apply JSON schema validation. - -[[mongo.mongo-3.validation.json-schema]] -==== JSON Schema Validation - -MongoDB 3.6 allows validation and querying of documents using JSON schema draft 4 including core specification and validation specification, with some differences. `$jsonSchema` can be used in a document validator (when creating a collection), which enforces that inserted or updated documents are valid against the schema. It can also be used to query for documents with the `find` command or `$match` aggregation stage. - -Spring Data MongoDB supports MongoDB's specific JSON schema implementation to define and use schemas. See <> for further details. - -[[mongo.mongo-3.validation.query-expression]] -==== Query Expression Validation - -Next to the <>, MongoDB supports as of version 3.2 validating documents against a given structure described by a query. The structure can be built using `Criteria` objects just the same way as they are used for defining queries. - -[source,java] ----- -Criteria queryExpression = Criteria.where("lastname").ne(null).type(2) - .and("age").ne(null).type(16).gt(0).lte(150); - -Validator validator = Validator.criteria(queryExpression); - -template.createCollection(Person.class, CollectionOptions.empty().validator(validator)); ----- - -NOTE: Field names used within the query expression are mapped to the domain types property names taking potential `@Field` annotations into account. - -[[mongo.mongo-3.misc]] -=== Other things to be aware of - -This section covers additional things to keep in mind when using the 3.0 driver. - -* `IndexOperations.resetIndexCache()` is no longer supported. -* Any `MapReduceOptions.extraOption` is silently ignored. -* `WriteResult` does not longer hold error information but throws an Exception. -* `MongoOperations.executeInSession(…)` no longer calls `requestStart` / `requestDone`. -* Index name generation has become a driver internal operations, still we use the 2.x schema to generate names. -* Some Exception messages differ between the generation 2 and 3 servers as well as between _MMap.v1_ and _WiredTiger_ storage engine. - diff --git a/src/main/asciidoc/reference/mongo-auditing.adoc b/src/main/asciidoc/reference/mongo-auditing.adoc deleted file mode 100644 index 25cce58b3f..0000000000 --- a/src/main/asciidoc/reference/mongo-auditing.adoc +++ /dev/null @@ -1,33 +0,0 @@ -[[mongo.auditing]] -== General auditing configuration - -Activating auditing functionality is just a matter of adding the Spring Data Mongo `auditing` namespace element to your configuration: - -.Activating auditing using XML configuration -==== -[source,xml] ----- - ----- -==== - -Since Spring Data MongoDB 1.4 auditing can be enabled by annotating a configuration class with the `@EnableMongoAuditing` annotation. - -.Activating auditing using JavaConfig -==== -[source,java] ----- -@Configuration -@EnableMongoAuditing -class Config { - - @Bean - public AuditorAware myAuditorProvider() { - return new AuditorAwareImpl(); - } -} ----- -==== - -If you expose a bean of type `AuditorAware` to the `ApplicationContext`, the auditing infrastructure will pick it up automatically and use it to determine the current user to be set on domain types. If you have multiple implementations registered in the `ApplicationContext`, you can select the one to be used by explicitly setting the `auditorAwareRef` attribute of `@EnableMongoAuditing`. - diff --git a/src/main/asciidoc/reference/mongo-repositories.adoc b/src/main/asciidoc/reference/mongo-repositories.adoc deleted file mode 100644 index 567e087900..0000000000 --- a/src/main/asciidoc/reference/mongo-repositories.adoc +++ /dev/null @@ -1,578 +0,0 @@ -[[mongo.repositories]] -= MongoDB repositories - -[[mongo-repo-intro]] -== Introduction - -This chapter will point out the specialties for repository support for MongoDB. This builds on the core repository support explained in <>. So make sure you've got a sound understanding of the basic concepts explained there. - -[[mongo-repo-usage]] -== Usage - -To access domain entities stored in a MongoDB you can leverage our sophisticated repository support that eases implementing those quite significantly. To do so, simply create an interface for your repository: - -.Sample Person entity -==== -[source,java] ----- -public class Person { - - @Id - private String id; - private String firstname; - private String lastname; - private Address address; - - // … getters and setters omitted -} ----- -==== - -We have a quite simple domain object here. Note that it has a property named `id` of type `ObjectId`. The default serialization mechanism used in `MongoTemplate` (which is backing the repository support) regards properties named id as document id. Currently we support `String`, `ObjectId` and `BigInteger` as id-types. - -.Basic repository interface to persist Person entities -==== -[source] ----- -public interface PersonRepository extends PagingAndSortingRepository { - - // additional custom finder methods go here -} ----- -==== - -Right now this interface simply serves typing purposes but we will add additional methods to it later. In your Spring configuration simply add - -.General MongoDB repository Spring configuration -==== -[source,xml] ----- - - - - - - - - - - - - - ----- -==== - -This namespace element will cause the base packages to be scanned for interfaces extending `MongoRepository` and create Spring beans for each of them found. By default the repositories will get a `MongoTemplate` Spring bean wired that is called `mongoTemplate`, so you only need to configure `mongo-template-ref` explicitly if you deviate from this convention. - -If you'd rather like to go with JavaConfig use the `@EnableMongoRepositories` annotation. The annotation carries the very same attributes like the namespace element. If no base package is configured the infrastructure will scan the package of the annotated configuration class. - -.JavaConfig for repositories -==== -[source,java] ----- -@Configuration -@EnableMongoRepositories -class ApplicationConfig extends AbstractMongoConfiguration { - - @Override - protected String getDatabaseName() { - return "e-store"; - } - - @Override - public MongoClient mongoClient() { - return new MongoClient(); - } - - @Override - protected String getMappingBasePackage() { - return "com.oreilly.springdata.mongodb" - } -} ----- -==== - -As our domain repository extends `PagingAndSortingRepository` it provides you with CRUD operations as well as methods for paginated and sorted access to the entities. Working with the repository instance is just a matter of dependency injecting it into a client. So accessing the second page of `Person` s at a page size of 10 would simply look something like this: - -.Paging access to Person entities -==== -[source,java] ----- -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class PersonRepositoryTests { - - @Autowired PersonRepository repository; - - @Test - public void readsFirstPageCorrectly() { - - Page persons = repository.findAll(PageRequest.of(0, 10)); - assertThat(persons.isFirstPage(), is(true)); - } -} ----- -==== - -The sample creates an application context with Spring's unit test support which will perform annotation based dependency injection into test cases. Inside the test method we simply use the repository to query the datastore. We hand the repository a `PageRequest` instance that requests the first page of persons at a page size of 10. - -[[mongodb.repositories.queries]] -== Query methods - -Most of the data access operations you usually trigger on a repository result a query being executed against the MongoDB databases. Defining such a query is just a matter of declaring a method on the repository interface - -.PersonRepository with query methods -==== -[source,java] ----- -public interface PersonRepository extends PagingAndSortingRepository { - - List findByLastname(String lastname); <1> - - Page findByFirstname(String firstname, Pageable pageable); <2> - - Person findByShippingAddresses(Address address); <3> - - Person findFirstByLastname(String lastname) <4> - - Stream findAllBy(); <5> -} ----- -<1> The method shows a query for all people with the given lastname. The query will be derived parsing the method name for constraints which can be concatenated with `And` and `Or`. Thus the method name will result in a query expression of `{"lastname" : lastname}`. -<2> Applies pagination to a query. Just equip your method signature with a `Pageable` parameter and let the method return a `Page` instance and we will automatically page the query accordingly. -<3> Shows that you can query based on properties which are not a primitive type. Throws `IncorrectResultSizeDataAccessException` if more than one match found. -<4> Uses the `First` keyword to restrict the query to the very first result. Unlike <3> this method does not throw an exception if more than one match was found. -<5> Uses a Java 8 `Stream` which reads and converts individual elements while iterating the stream. -==== - -NOTE: Note that for version 1.0 we currently don't support referring to parameters that are mapped as `DBRef` in the domain class. - -[cols="1,2,3", options="header"] -.Supported keywords for query methods -|=== -| Keyword -| Sample -| Logical result - -| `After` -| `findByBirthdateAfter(Date date)` -| `{"birthdate" : {"$gt" : date}}` - -| `GreaterThan` -| `findByAgeGreaterThan(int age)` -| `{"age" : {"$gt" : age}}` - -| `GreaterThanEqual` -| `findByAgeGreaterThanEqual(int age)` -| `{"age" : {"$gte" : age}}` - -| `Before` -| `findByBirthdateBefore(Date date)` -| `{"birthdate" : {"$lt" : date}}` - -| `LessThan` -| `findByAgeLessThan(int age)` -| `{"age" : {"$lt" : age}}` - -| `LessThanEqual` -| `findByAgeLessThanEqual(int age)` -| `{"age" : {"$lte" : age}}` - -| `Between` -| `findByAgeBetween(int from, int to)` -| `{"age" : {"$gt" : from, "$lt" : to}}` - -| `In` -| `findByAgeIn(Collection ages)` -| `{"age" : {"$in" : [ages...]}}` - -| `NotIn` -| `findByAgeNotIn(Collection ages)` -| `{"age" : {"$nin" : [ages...]}}` - -| `IsNotNull`, `NotNull` -| `findByFirstnameNotNull()` -| `{"firstname" : {"$ne" : null}}` - -| `IsNull`, `Null` -| `findByFirstnameNull()` -| `{"firstname" : null}` - -| `Like`, `StartingWith`, `EndingWith` -| `findByFirstnameLike(String name)` -| `{"firstname" : name} (name as regex)` - -| `NotLike`, `IsNotLike` -| `findByFirstnameNotLike(String name)` -| `{"firstname" : { "$not" : name }} (name as regex)` - -| `Containing` on String -| `findByFirstnameContaining(String name)` -| `{"firstname" : name} (name as regex)` - -| `NotContaining` on String -| `findByFirstnameNotContaining(String name)` -| `{"firstname" : { "$not" : name}} (name as regex)` - -| `Containing` on Collection -| `findByAddressesContaining(Address address)` -| `{"addresses" : { "$in" : address}}` - -| `NotContaining` on Collection -| `findByAddressesNotContaining(Address address)` -| `{"addresses" : { "$not" : { "$in" : address}}}` - -| `Regex` -| `findByFirstnameRegex(String firstname)` -| `{"firstname" : {"$regex" : firstname }}` - -| `(No keyword)` -| `findByFirstname(String name)` -| `{"firstname" : name}` - -| `Not` -| `findByFirstnameNot(String name)` -| `{"firstname" : {"$ne" : name}}` - -| `Near` -| `findByLocationNear(Point point)` -| `{"location" : {"$near" : [x,y]}}` - -| `Near` -| `findByLocationNear(Point point, Distance max)` -| `{"location" : {"$near" : [x,y], "$maxDistance" : max}}` - -| `Near` -| `findByLocationNear(Point point, Distance min, Distance max)` -| `{"location" : {"$near" : [x,y], "$minDistance" : min, "$maxDistance" : max}}` - -| `Within` -| `findByLocationWithin(Circle circle)` -| `{"location" : {"$geoWithin" : {"$center" : [ [x, y], distance]}}}` - -| `Within` -| `findByLocationWithin(Box box)` -| `{"location" : {"$geoWithin" : {"$box" : [ [x1, y1], x2, y2]}}}` - -| `IsTrue`, `True` -| `findByActiveIsTrue()` -| `{"active" : true}` - -| `IsFalse`, `False` -| `findByActiveIsFalse()` -| `{"active" : false}` - -| `Exists` -| `findByLocationExists(boolean exists)` -| `{"location" : {"$exists" : exists }}` -|=== - -NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. - -[[mongodb.repositories.queries.delete]] -=== Repository delete queries - -The above keywords can be used in conjunction with `delete…By` or `remove…By` to create queries deleting matching documents. - -.`Delete…By` Query -==== -[source,java] ----- -public interface PersonRepository extends MongoRepository { - - List deleteByLastname(String lastname); - - Long deletePersonByLastname(String lastname); -} ----- -==== - -Using return type `List` will retrieve and return all matching documents before actually deleting them. A numeric return type directly removes the matching documents returning the total number of documents removed. - -[[mongodb.repositories.queries.geo-spatial]] -=== Geo-spatial repository queries - -As you've just seen there are a few keywords triggering geo-spatial operations within a MongoDB query. The `Near` keyword allows some further modification. Let's have a look at some examples: - -.Advanced `Near` queries -==== -[source,java] ----- -public interface PersonRepository extends MongoRepository - - // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} - List findByLocationNear(Point location, Distance distance); -} ----- -==== - -Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. If the `Distance` was set up containing a `Metric` we will transparently use `$nearSphere` instead of $code. - -.Using `Distance` with `Metrics` -==== -[source,java] ----- -Point point = new Point(43.7, 48.8); -Distance distance = new Distance(200, Metrics.KILOMETERS); -… = repository.findByLocationNear(point, distance); -// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}} ----- -==== - -As you can see using a `Distance` equipped with a `Metric` causes `$nearSphere` clause to be added instead of a plain `$near`. Beyond that the actual distance gets calculated according to the `Metrics` used. - -NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of `$nearSphere` operator. - -==== Geo-near queries - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - // {'geoNear' : 'location', 'near' : [x, y] } - GeoResults findByLocationNear(Point location); - - // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } - // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, - // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } - GeoResults findByLocationNear(Point location, Distance distance); - - // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, - // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, - // 'spherical' : true } - GeoResults findByLocationNear(Point location, Distance min, Distance max); - - // {'geoNear' : 'location', 'near' : [x, y] } - GeoResults findByLocationNear(Point location); -} ----- - -[[mongodb.repositories.queries.json-based]] -=== MongoDB JSON based query methods and field restriction - -By adding the annotation `org.springframework.data.mongodb.repository.Query` repository finder methods you can specify a MongoDB JSON query string to use instead of having the query derived from the method name. For example - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - @Query("{ 'firstname' : ?0 }") - List findByThePersonsFirstname(String firstname); - -} ----- - -The placeholder `?0` lets you substitute the value from the method arguments into the JSON query string. - -NOTE: `String` parameter values are escaped during the binding process, which means that it is not possible to add MongoDB specific operators via the argument. - -You can also use the filter property to restrict the set of properties that will be mapped into the Java object. For example, - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - @Query(value="{ 'firstname' : ?0 }", fields="{ 'firstname' : 1, 'lastname' : 1}") - List findByThePersonsFirstname(String firstname); - -} ----- - -This will return only the firstname, lastname and Id properties of the Person objects. The age property, a java.lang.Integer, will not be set and its value will therefore be null. - -[[mongodb.repositories.queries.json-spel]] -=== JSON based queries with SpEL expressions - -Query strings and field definitions can be used together with SpEL expressions to create dynamic queries at runtime. -SpEL expressions can provide predicate values and can be used to extend predicates with subdocuments. - -Expressions expose method arguments through an array that contains all arguments. The the following query uses `[0]` -to declare the predicate value for `lastname` that is equivalent to the `?0` parameter binding. - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - @Query("{'lastname': ?#{[0]} }") - List findByQueryWithExpression(String param0); -} ----- - -Expressions can be used to invoke functions, evaluate conditionals and construct values. SpEL expressions -reveal in conjunction with JSON a side-effect as Map-like declarations inside of SpEL read like JSON. - -[source,java] ----- -public interface PersonRepository extends MongoRepository - - @Query("{'id': ?#{ [0] ? {$exists :true} : [1] }}") - List findByQueryWithExpressionAndNestedObject(boolean param0, String param1); -} ----- - -SpEL in query strings can be a powerful way to enhance queries and can accept a broad range of unwanted arguments. -You should make sure to sanitize strings before passing these to the query to avoid unwanted changes to your query. - -Expression support is extensible through the Query SPI `org.springframework.data.repository.query.spi.EvaluationContextExtension` -than can contribute properties, functions and customize the root object. Extensions are retrieved from the application context -at the time of SpEL evaluation when the query is build. - -[source,java] ----- -public class SampleEvaluationContextExtension extends EvaluationContextExtensionSupport { - - @Override - public String getExtensionId() { - return "security"; - } - - @Override - public Map getProperties() { - return Collections.singletonMap("principal", SecurityContextHolder.getCurrent().getPrincipal()); - } -} ----- - -NOTE: Bootstrapping `MongoRepositoryFactory` yourself is not application context-aware and requires further configuration -to pick up Query SPI extensions. - -[[mongodb.repositories.queries.type-safe]] -=== Type-safe Query methods - -MongoDB repository support integrates with the http://www.querydsl.com/[QueryDSL] project which provides a means to perform type-safe queries in Java. To quote from the project description, "Instead of writing queries as inline strings or externalizing them into XML files they are constructed via a fluent API." It provides the following features - -* Code completion in IDE (all properties, methods and operations can be expanded in your favorite Java IDE) -* Almost no syntactically invalid queries allowed (type-safe on all levels) -* Domain types and properties can be referenced safely (no Strings involved!) -* Adopts better to refactoring changes in domain types -* Incremental query definition is easier - -Please refer to the http://www.querydsl.com/static/querydsl/latest/reference/html/[QueryDSL documentation] which describes how to bootstrap your environment for APT based code generation using Maven or Ant. - -Using QueryDSL you will be able to write queries as shown below - -[source,java] ----- -QPerson person = new QPerson("person"); -List result = repository.findAll(person.address.zipCode.eq("C0123")); - -Page page = repository.findAll(person.lastname.contains("a"), - PageRequest.of(0, 2, Direction.ASC, "lastname")); ----- - -`QPerson` is a class that is generated (via the Java annotation post processing tool) which is a `Predicate` that allows you to write type safe queries. Notice that there are no strings in the query other than the value "C0123". - -You can use the generated `Predicate` class via the interface `QueryDslPredicateExecutor` which is shown below - -[source,java] ----- -public interface QueryDslPredicateExecutor { - - T findOne(Predicate predicate); - - List findAll(Predicate predicate); - - List findAll(Predicate predicate, OrderSpecifier... orders); - - Page findAll(Predicate predicate, Pageable pageable); - - Long count(Predicate predicate); -} ----- - -To use this in your repository implementation, simply inherit from it in addition to other repository interfaces. This is shown below - -[source,java] ----- -public interface PersonRepository extends MongoRepository, QueryDslPredicateExecutor { - - // additional finder methods go here -} ----- - -We think you will find this an extremely powerful tool for writing MongoDB queries. - -[[mongodb.repositories.queries.full-text]] -=== Full-text search queries -MongoDBs full text search feature is very store specific and therefore can rather be found on `MongoRepository` than on the more general `CrudRepository`. What we need is a document with a full-text index defined for (Please see section <> for creating). - -Additional methods on `MongoRepository` take `TextCriteria` as input parameter. In addition to those explicit methods, it is also possible to add a `TextCriteria` derived repository method. The criteria will be added as an additional `AND` criteria. Once the entity contains a `@TextScore` annotated property the documents full-text score will be retrieved. Furthermore the `@TextScore` annotated property will also make it possible to sort by the documents score. - -[source, java] ----- -@Document -class FullTextDocument { - - @Id String id; - @TextIndexed String title; - @TextIndexed String content; - @TextScore Float score; -} - -interface FullTextRepository extends Repository { - - // Execute a full-text search and define sorting dynamically - List findAllBy(TextCriteria criteria, Sort sort); - - // Paginate over a full-text search result - Page findAllBy(TextCriteria criteria, Pageable pageable); - - // Combine a derived query with a full-text search - List findByTitleOrderByScoreDesc(String title, TextCriteria criteria); -} - - -Sort sort = Sort.by("score"); -TextCriteria criteria = TextCriteria.forDefaultLanguage().matchingAny("spring", "data"); -List result = repository.findAllBy(criteria, sort); - -criteria = TextCriteria.forDefaultLanguage().matching("film"); -Page page = repository.findAllBy(criteria, PageRequest.of(1, 1, sort)); -List result = repository.findByTitleOrderByScoreDesc("mongodb", criteria); ----- - -include::../{spring-data-commons-docs}/repository-projections.adoc[leveloffset=+2] - -[[mongodb.repositories.misc]] -== Miscellaneous - -[[mongodb.repositories.misc.cdi-integration]] -=== CDI Integration - -Instances of the repository interfaces are usually created by a container, which Spring is the most natural choice when working with Spring Data. As of version 1.3.0 Spring Data MongoDB ships with a custom CDI extension that allows using the repository abstraction in CDI environments. The extension is part of the JAR so all you need to do to activate it is dropping the Spring Data MongoDB JAR into your classpath. You can now set up the infrastructure by implementing a CDI Producer for the `MongoTemplate`: - -[source,java] ----- -class MongoTemplateProducer { - - @Produces - @ApplicationScoped - public MongoOperations createMongoTemplate() { - - MongoDbFactory factory = new SimpleMongoDbFactory(new MongoClient(), "database"); - return new MongoTemplate(factory); - } -} ----- - -The Spring Data MongoDB CDI extension will pick up the `MongoTemplate` available as CDI bean and create a proxy for a Spring Data repository whenever a bean of a repository type is requested by the container. Thus obtaining an instance of a Spring Data repository is a matter of declaring an `@Inject`-ed property: - -[source,java] ----- -class RepositoryClient { - - @Inject - PersonRepository repository; - - public void businessMethod() { - List people = repository.findAll(); - } -} ----- diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc deleted file mode 100644 index 685ce9ba12..0000000000 --- a/src/main/asciidoc/reference/mongodb.adoc +++ /dev/null @@ -1,3011 +0,0 @@ -[[mongo.core]] -= MongoDB support - -The MongoDB support contains a wide range of features which are summarized below. - -* Spring configuration support using Java based @Configuration classes or an XML namespace for a Mongo driver instance and replica sets -* MongoTemplate helper class that increases productivity performing common Mongo operations. Includes integrated object mapping between documents and POJOs. -* Exception translation into Spring's portable Data Access Exception hierarchy -* Feature Rich Object Mapping integrated with Spring's Conversion Service -* Annotation based mapping metadata but extensible to support other metadata formats -* Persistence and mapping lifecycle events -* Java based Query, Criteria, and Update DSLs -* Automatic implementation of Repository interfaces including support for custom finder methods. -* QueryDSL integration to support type-safe queries. -* Cross-store persistence - support for JPA Entities with fields transparently persisted/retrieved using MongoDB (deprecated - will be removed without replacement) -* GeoSpatial integration - -For most tasks you will find yourself using `MongoTemplate` or the Repository support that both leverage the rich mapping functionality. `MongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. `MongoTemplate` also provides callback methods so that it is easy for you to get a hold of the low level API artifacts such as `com.mongo.DB` to communicate directly with MongoDB. The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs. - -[[mongodb-getting-started]] -== Getting Started - -Spring MongoDB support requires MongoDB 2.6 or higher and Java SE 8 or higher. An easy way to bootstrap setting up a working environment is to create a Spring based project in http://spring.io/tools/sts[STS]. - -First you need to set up a running Mongodb server. Refer to the http://docs.mongodb.org/manual/core/introduction/[Mongodb Quick Start guide] for an explanation on how to startup a MongoDB instance. Once installed starting MongoDB is typically a matter of executing the following command: `MONGO_HOME/bin/mongod` - -To create a Spring project in STS go to File -> New -> Spring Template Project -> Simple Spring Utility Project -> press Yes when prompted. Then enter a project and a package name such as org.spring.mongodb.example. - -Then add the following to pom.xml dependencies section. - -[source,xml] ----- - - - - - - org.springframework.data - spring-data-mongodb - {version} - - - ----- - -Also change the version of Spring in the pom.xml to be - -[source,xml] ----- -{springVersion} ----- - -You will also need to add the location of the Spring Milestone repository for maven to your `pom.xml` which is at the same level of your `` element - -[source,xml] ----- - - - spring-milestone - Spring Maven MILESTONE Repository - http://repo.spring.io/libs-milestone - - ----- - -The repository is also http://repo.spring.io/milestone/org/springframework/data/[browseable here]. - -You may also want to set the logging level to `DEBUG` to see some additional information, edit the `log4j.properties` file to have - -[source] ----- -log4j.category.org.springframework.data.mongodb=DEBUG -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %40.40c:%4L - %m%n ----- - -Create a simple Person class to persist: - -[source,java] ----- -package org.spring.mongodb.example; - -public class Person { - - private String id; - private String name; - private int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } -} ----- - -And a main application to run - -[source,java] ----- -package org.spring.mongodb.example; - -import static org.springframework.data.mongodb.core.query.Criteria.where; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.query.Query; - -import com.mongodb.MongoClient; - -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) throws Exception { - - MongoOperations mongoOps = new MongoTemplate(new MongoClient(), "database"); - mongoOps.insert(new Person("Joe", 34)); - - log.info(mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)); - - mongoOps.dropCollection("person"); - } -} ----- - -This will produce the following output - -[source] ----- -10:01:32,062 DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information. -10:01:32,265 DEBUG ramework.data.mongodb.core.MongoTemplate: 631 - insert Document containing fields: [_class, age, name] in collection: Person -10:01:32,765 DEBUG ramework.data.mongodb.core.MongoTemplate:1243 - findOne using query: { "name" : "Joe"} in db.collection: database.Person -10:01:32,953 INFO org.spring.mongodb.example.MongoApp: 25 - Person [id=4ddbba3c0be56b7e1b210166, name=Joe, age=34] -10:01:32,984 DEBUG ramework.data.mongodb.core.MongoTemplate: 375 - Dropped collection [database.person] ----- - -Even in this simple example, there are few things to take notice of - -* You can instantiate the central helper class of Spring Mongo, <>, using the standard `com.mongodb.MongoClient` object and the name of the database to use. -* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.). -* Conventions are used for handling the id field, converting it to be a `ObjectId` when stored in the database. -* Mapping conventions can use field access. Notice the Person class has only getters. -* If the constructor argument names match the field names of the stored document, they will be used to instantiate the object - -[[mongo.examples-repo]] -== Examples Repository - -There is an https://github.com/spring-projects/spring-data-examples[github repository with several examples] that you can download and play around with to get a feel for how the library works. - -[[mongodb-connectors]] -== Connecting to MongoDB with Spring - -One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.MongoClient` object using the IoC container. There are two main ways to do this, either using Java based bean metadata or XML based bean metadata. These are discussed in the following sections. - -NOTE: For those not familiar with how to configure the Spring container using Java based bean metadata instead of XML based metadata see the high level introduction in the reference docs http://docs.spring.io/spring/docs/3.2.x/spring-framework-reference/html/new-in-3.0.html#new-java-configuration[here ] as well as the detailed documentation http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#beans-java-instantiating-container[ here]. - -[[mongo.mongo-java-config]] -=== Registering a Mongo instance using Java based metadata - -An example of using Java based bean metadata to register an instance of a `com.mongodb.MongoClient` is shown below - -.Registering a com.mongodb.MongoClient object using Java based bean metadata -==== -[source,java] ----- -@Configuration -public class AppConfig { - - /* - * Use the standard Mongo driver API to create a com.mongodb.MongoClient instance. - */ - public @Bean MongoClient mongoClient() { - return new MongoClient("localhost"); - } -} ----- -==== - -This approach allows you to use the standard `com.mongodb.MongoClient` instance with the container using Spring's `MongoClientFactoryBean`. As compared to instantiating a `com.mongodb.MongoClient` instance directly, the FactoryBean has the added advantage of also providing the container with an ExceptionTranslator implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html[Spring's DAO support features]. - -An example of a Java based bean metadata that supports exception translation on `@Repository` annotated classes is shown below: - -.Registering a com.mongodb.MongoClient object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support -==== -[source,java] ----- -@Configuration -public class AppConfig { - - /* - * Factory bean that creates the com.mongodb.MongoClient instance - */ - public @Bean MongoClientFactoryBean mongo() { - MongoClientFactoryBean mongo = new MongoClientFactoryBean(); - mongo.setHost("localhost"); - return mongo; - } -} ----- -==== - -To access the `com.mongodb.MongoClient` object created by the `MongoClientFactoryBean` in other `@Configuration` or your own classes, use a "`private @Autowired Mongo mongo;`" field. - -[[mongo.mongo-xml-config]] -=== Registering a Mongo instance using XML based metadata - -While you can use Spring's traditional `` XML namespace to register an instance of `com.mongodb.MongoClient` with the container, the XML can be quite verbose as it is general purpose. XML namespaces are a better alternative to configuring commonly used objects such as the Mongo instance. The mongo namespace allows you to create a Mongo instance server location, replica-sets, and options. - -To use the Mongo namespace elements you will need to reference the Mongo schema: - -.XML schema to configure MongoDB -==== -[source,xml] ----- - - - - - ** - - ----- -==== - -A more advanced configuration with `MongoClientOptions` is shown below (note these are not recommended values) - -.XML schema to configure a com.mongodb.MongoClient object with MongoClientOptions -==== -[source,xml] ----- - - - - - - - ----- -==== - -A configuration using replica sets is shown below. - -.XML schema to configure com.mongodb.MongoClient object with Replica Sets -==== -[source,xml] ----- - ----- -==== - -[[mongo.mongo-db-factory]] -=== The MongoDbFactory interface - -While `com.mongodb.MongoClient` is the entry point to the MongoDB driver API, connecting to a specific MongoDB database instance requires additional information such as the database name and an optional username and password. With that information you can obtain a com.mongodb.DB object and access all the functionality of a specific MongoDB database instance. Spring provides the `org.springframework.data.mongodb.core.MongoDbFactory` interface shown below to bootstrap connectivity to the database. - -[source,java] ----- -public interface MongoDbFactory { - - MongoDatabase getDb() throws DataAccessException; - - MongoDatabase getDb(String dbName) throws DataAccessException; -} ----- - -The following sections show how you can use the container with either Java or the XML based metadata to configure an instance of the `MongoDbFactory` interface. In turn, you can use the `MongoDbFactory` instance to configure `MongoTemplate`. - -Instead of using the IoC container to create an instance of MongoTemplate, you can just use them in standard Java code as shown below. - -[source,java] ----- -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) throws Exception { - - MongoOperations mongoOps = new MongoTemplate(*new SimpleMongoDbFactory(new MongoClient(), "database")*); - - mongoOps.insert(new Person("Joe", 34)); - - log.info(mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)); - - mongoOps.dropCollection("person"); - } -} ----- - -The code in bold highlights the use of SimpleMongoDbFactory and is the only difference between the listing shown in the <>. - -[[mongo.mongo-db-factory-java]] -=== Registering a MongoDbFactory instance using Java based metadata - -To register a MongoDbFactory instance with the container, you write code much like what was highlighted in the previous code listing. A simple example is shown below - -[source,java] ----- -@Configuration -public class MongoConfiguration { - - public @Bean MongoDbFactory mongoDbFactory() { - return new SimpleMongoDbFactory(new MongoClient(), "database"); - } -} ----- - -MongoDB Server generation 3 changed the authentication model when connecting to the DB. Therefore some of the configuration options available for authentication are no longer valid. Please use the `MongoClient` specific options for setting credentials via `MongoCredential` to provide authentication data. - -[source,java] ----- -@Configuration -public class ApplicationContextEventTestsAppConfig extends AbstractMongoConfiguration { - - @Override - public String getDatabaseName() { - return "database"; - } - - @Override - @Bean - public MongoClient mongoClient() { - return new MongoClient(singletonList(new ServerAddress("127.0.0.1", 27017)), - singletonList(MongoCredential.createCredential("name", "db", "pwd".toCharArray()))); - } -} ----- - -In order to use authentication with XML configuration use the `credentials` attribue on ``. - -NOTE: Username/password credentials used in XML configuration must be URL encoded when these contain reserved characters such as `:`, `%`, `@`, `,`. -Example: `m0ng0@dmin:mo_res:bw6},Qsdxx@admin@database` -> `m0ng0%40dmin:mo_res%3Abw6%7D%2CQsdxx%40admin@database` -See https://tools.ietf.org/html/rfc3986#section-2.2[section 2.2 of RFC 3986] for further details. - - -[[mongo.mongo-db-factory-xml]] -=== Registering a MongoDbFactory instance using XML based metadata - -The mongo namespace provides a convenient way to create a `SimpleMongoDbFactory` as compared to using the `` namespace. Simple usage is shown below - -[source,xml] ----- - ----- - -If you need to configure additional options on the `com.mongodb.MongoClient` instance that is used to create a `SimpleMongoDbFactory` you can refer to an existing bean using the `mongo-ref` attribute as shown below. To show another common usage pattern, this listing shows the use of a property placeholder to parametrise the configuration and creating `MongoTemplate`. - -[source,xml] ----- - - - - - - - - - - - ----- - -[[mongo-template]] -== Introduction to MongoTemplate - -The class `MongoTemplate`, located in the package `org.springframework.data.mongodb.core`, is the central class of the Spring's MongoDB support providing a rich feature set to interact with the database. The template offers convenience operations to create, update, delete and query for MongoDB documents and provides a mapping between your domain objects and MongoDB documents. - -NOTE: Once configured, `MongoTemplate` is thread-safe and can be reused across multiple instances. - -The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the interface `MongoConverter`. Spring provides the `MappingMongoConverter`, but you can also write your own converter. Please refer to the section on MongoConverters for more detailed information. - -The `MongoTemplate` class implements the interface `MongoOperations`. In as much as possible, the methods on `MongoOperations` are named after methods available on the MongoDB driver `Collection` object to make the API familiar to existing MongoDB developers who are used to the driver API. For example, you will find methods such as "find", "findAndModify", "findOne", "insert", "remove", "save", "update" and "updateMulti". The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `MongoOperations`. A major difference in between the two APIs is that MongoOperations can be passed domain objects instead of `Document` and there are fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations. - -NOTE: The preferred way to reference the operations on `MongoTemplate` instance is via its interface `MongoOperations`. - -The default converter implementation used by `MongoTemplate` is MappingMongoConverter. While the `MappingMongoConverter` can make use of additional metadata to specify the mapping of objects to documents it is also capable of converting objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. These conventions as well as the use of mapping annotations is explained in the <>. - -Another central feature of MongoTemplate is exception translation of exceptions thrown in the MongoDB Java driver into Spring's portable Data Access Exception hierarchy. Refer to the section on <> for more information. - -While there are many convenience methods on `MongoTemplate` to help you easily perform common tasks if you should need to access the MongoDB driver API directly to access functionality not explicitly exposed by the MongoTemplate you can use one of several Execute callback methods to access underlying driver APIs. The execute callbacks will give you a reference to either a `com.mongodb.Collection` or a `com.mongodb.DB` object. Please see the section mongo.executioncallback[Execution Callbacks] for more information. - -Now let's look at an example of how to work with the `MongoTemplate` in the context of the Spring container. - -[[mongo-template.instantiating]] -=== Instantiating MongoTemplate - -You can use Java to create and register an instance of `MongoTemplate` as shown below. - -.Registering a com.mongodb.MongoClient object and enabling Spring's exception translation support -==== -[source,java] ----- -@Configuration -public class AppConfig { - - public @Bean MongoClient mongoClient() { - return new MongoClient("localhost"); - } - - public @Bean MongoTemplate mongoTemplate() { - return new MongoTemplate(mongoClient(), "mydatabase"); - } -} ----- -==== - -There are several overloaded constructors of MongoTemplate. These are - -* `MongoTemplate(MongoClient mongo, String databaseName)` - takes the `com.mongodb.MongoClient` object and the default database name to operate against. -* `MongoTemplate(MongoDbFactory mongoDbFactory)` - takes a MongoDbFactory object that encapsulated the `com.mongodb.MongoClient` object, database name, and username and password. -* `MongoTemplate(MongoDbFactory mongoDbFactory, MongoConverter mongoConverter)` - adds a MongoConverter to use for mapping. - -You can also configure a MongoTemplate using Spring's XML schema. - -[source,java] ----- - - - - - - ----- - -Other optional properties that you might like to set when creating a `MongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, and `ReadPreference`. - -NOTE: The preferred way to reference the operations on `MongoTemplate` instance is via its interface `MongoOperations`. - -[[mongo-template.writeresultchecking]] -=== WriteResultChecking Policy - -When in development it is very handy to either log or throw an exception if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully but in fact the database was not modified according to your expectations. Set MongoTemplate's property to an enum with the following values, `EXCEPTION`, or `NONE` to either throw an Exception or do nothing. The default is to use a `WriteResultChecking` value of `NONE`. - -[[mongo-template.writeconcern]] -=== WriteConcern - -You can set the `com.mongodb.WriteConcern` property that the `MongoTemplate` will use for write operations if it has not yet been specified via the driver at a higher level such as `com.mongodb.MongoClient`. If MongoTemplate's `WriteConcern` property is not set it will default to the one set in the MongoDB driver's DB or Collection setting. - -[[mongo-template.writeconcernresolver]] -=== WriteConcernResolver - -For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert and save operations), a strategy interface called `WriteConcernResolver` can be configured on `MongoTemplate`. Since `MongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The `WriteConcernResolver` interface is shown below. - -[source,java] ----- -public interface WriteConcernResolver { - WriteConcern resolve(MongoAction action); -} ----- - -The passed in argument, MongoAction, is what you use to determine the `WriteConcern` value to be used or to use the value of the Template itself as a default. `MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `Document`, as well as the operation as an enumeration (`MongoActionOperation`: REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE) and a few other pieces of contextual information. For example, - -[source] ----- -private class MyAppWriteConcernResolver implements WriteConcernResolver { - - public WriteConcern resolve(MongoAction action) { - if (action.getEntityClass().getSimpleName().contains("Audit")) { - return WriteConcern.NONE; - } else if (action.getEntityClass().getSimpleName().contains("Metadata")) { - return WriteConcern.JOURNAL_SAFE; - } - return action.getDefaultWriteConcern(); - } -} ----- - -[[mongo-template.save-update-remove]] -== Saving, Updating, and Removing Documents - -`MongoTemplate` provides a simple way for you to save, update, and delete your domain objects and map those objects to documents stored in MongoDB. - -Given a simple class such as Person - -[source,java] ----- -public class Person { - - private String id; - private String name; - private int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } - -} ----- - -You can save, update and delete the object as shown below. - -NOTE: `MongoOperations` is the interface that `MongoTemplate` implements. - -[source,java] ----- -package org.spring.example; - -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Update.update; -import static org.springframework.data.mongodb.core.query.Query.query; - -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; - -import com.mongodb.MongoClient; - -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) { - - MongoOperations mongoOps = new MongoTemplate(new SimpleMongoDbFactory(new MongoClient(), "database")); - - Person p = new Person("Joe", 34); - - // Insert is used to initially store the object into the database. - mongoOps.insert(p); - log.info("Insert: " + p); - - // Find - p = mongoOps.findById(p.getId(), Person.class); - log.info("Found: " + p); - - // Update - mongoOps.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class); - p = mongoOps.findOne(query(where("name").is("Joe")), Person.class); - log.info("Updated: " + p); - - // Delete - mongoOps.remove(p); - - // Check that deletion worked - List people = mongoOps.findAll(Person.class); - log.info("Number of people = : " + people.size()); - - - mongoOps.dropCollection(Person.class); - } -} ----- - -This would produce the following log output (including debug messages from `MongoTemplate` itself) - -[source] ----- -DEBUG apping.MongoPersistentEntityIndexCreator: 80 - Analyzing class class org.spring.example.Person for index information. -DEBUG work.data.mongodb.core.MongoTemplate: 632 - insert Document containing fields: [_class, age, name] in collection: person -INFO org.spring.example.MongoApp: 30 - Insert: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] -DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "_id" : { "$oid" : "4ddc6e784ce5b1eba3ceaf5c"}} in db.collection: database.person -INFO org.spring.example.MongoApp: 34 - Found: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=34] -DEBUG work.data.mongodb.core.MongoTemplate: 778 - calling update using query: { "name" : "Joe"} and update: { "$set" : { "age" : 35}} in collection: person -DEBUG work.data.mongodb.core.MongoTemplate:1246 - findOne using query: { "name" : "Joe"} in db.collection: database.person -INFO org.spring.example.MongoApp: 39 - Updated: Person [id=4ddc6e784ce5b1eba3ceaf5c, name=Joe, age=35] -DEBUG work.data.mongodb.core.MongoTemplate: 823 - remove using query: { "id" : "4ddc6e784ce5b1eba3ceaf5c"} in collection: person -INFO org.spring.example.MongoApp: 46 - Number of people = : 0 -DEBUG work.data.mongodb.core.MongoTemplate: 376 - Dropped collection [database.person] ----- - -There was implicit conversion using the `MongoConverter` between a `String` and `ObjectId` as stored in the database and recognizing a convention of the property "Id" name. - -NOTE: This example is meant to show the use of save, update and remove operations on MongoTemplate and not to show complex mapping functionality - -The query syntax used in the example is explained in more detail in the section <>. - -[[mongo-template.id-handling]] -=== How the `_id` field is handled in the mapping layer - -MongoDB requires that you have an `_id` field for all documents. If you don't provide one the driver will assign a `ObjectId` with a generated value. When using the `MappingMongoConverter` there are certain rules that govern how properties from the Java class is mapped to this `_id` field. - -The following outlines what property will be mapped to the `_id` document field: - -* A property or field annotated with `@Id` (`org.springframework.data.annotation.Id`) will be mapped to the `_id` field. -* A property or field without an annotation but named `id` will be mapped to the `_id` field. - -The following outlines what type conversion, if any, will be done on the property mapped to the _id document field when using the `MappingMongoConverter`, the default for `MongoTemplate`. - -* An id property or field declared as a String in the Java class will be converted to and stored as an `ObjectId` if possible using a Spring `Converter`. Valid conversion rules are delegated to the MongoDB Java driver. If it cannot be converted to an ObjectId, then the value will be stored as a string in the database. -* An id property or field declared as `BigInteger` in the Java class will be converted to and stored as an `ObjectId` using a Spring `Converter`. - -If no field or property specified above is present in the Java class then an implicit `_id` file will be generated by the driver but not mapped to a property or field of the Java class. - -When querying and updating `MongoTemplate` will use the converter to handle conversions of the `Query` and `Update` objects that correspond to the above rules for saving documents so field names and types used in your queries will be able to match what is in your domain classes. - -[[mongo-template.type-mapping]] -=== Type mapping - -As MongoDB collections can contain documents that represent instances of a variety of types. A great example here is if you store a hierarchy of classes or simply have a class with a property of type `Object`. In the latter case the values held inside that property have to be read in correctly when retrieving the object. Thus we need a mechanism to store type information alongside the actual document. - -To achieve that the `MappingMongoConverter` uses a `MongoTypeMapper` abstraction with `DefaultMongoTypeMapper` as it's main implementation. Its default behavior is storing the fully qualified classname under `_class` inside the document. Type hints are written for top-level documents as well as for every value if it's a complex type and a subtype of the property type declared. - - -.Type mapping -==== -[source,java] ----- -public class Sample { - Contact value; -} - -public abstract class Contact { … } - -public class Person extends Contact { … } - -Sample sample = new Sample(); -sample.value = new Person(); - -mongoTemplate.save(sample); - -{ - "value" : { "_class" : "com.acme.Person" }, - "_class" : "com.acme.Sample" -} ----- -==== - -As you can see we store the type information as last field for the actual root class as well as for the nested type as it is complex and a subtype of `Contact`. So if you're now using `mongoTemplate.findAll(Object.class, "sample")` we are able to find out that the document stored shall be a `Sample` instance. We are also able to find out that the value property shall be a `Person` actually. - -==== Customizing type mapping - -In case you want to avoid writing the entire Java class name as type information but rather like to use some key you can use the `@TypeAlias` annotation at the entity class being persisted. If you need to customize the mapping even more have a look at the `TypeInformationMapper` interface. An instance of that interface can be configured at the `DefaultMongoTypeMapper` which can be configured in turn on `MappingMongoConverter`. - -.Defining a TypeAlias for an Entity -==== -[source,java] ----- -@TypeAlias("pers") -class Person { - -} ----- -==== - -Note that the resulting document will contain `"pers"` as the value in the `_class` Field. - -==== Configuring custom type mapping - -The following example demonstrates how to configure a custom `MongoTypeMapper` in `MappingMongoConverter`. - -.Configuring a custom MongoTypeMapper via Spring Java Config -==== -[source,java] ----- -class CustomMongoTypeMapper extends DefaultMongoTypeMapper { - //implement custom type mapping here -} ----- -==== - -[source,java] ----- -@Configuration -class SampleMongoConfiguration extends AbstractMongoConfiguration { - - @Override - protected String getDatabaseName() { - return "database"; - } - - @Override - public MongoClient mongoClient() { - return new MongoClient(); - } - - @Bean - @Override - public MappingMongoConverter mappingMongoConverter() throws Exception { - MappingMongoConverter mmc = super.mappingMongoConverter(); - mmc.setTypeMapper(customTypeMapper()); - return mmc; - } - - @Bean - public MongoTypeMapper customTypeMapper() { - return new CustomMongoTypeMapper(); - } -} ----- - -Note that we are extending the `AbstractMongoConfiguration` class and override the bean definition of the `MappingMongoConverter` where we configure our custom `MongoTypeMapper`. - -.Configuring a custom MongoTypeMapper via XML -==== -[source,xml] ----- - - - ----- -==== - -[[mongo-template.save-insert]] -=== Methods for saving and inserting documents - -There are several convenient methods on `MongoTemplate` for saving and inserting your objects. To have more fine-grained control over the conversion process you can register Spring converters with the `MappingMongoConverter`, for example `Converter` and `Converter`. - -NOTE: The difference between insert and save operations is that a save operation will perform an insert if the object is not already present. - -The simple case of using the save operation is to save a POJO. In this case the collection name will be determined by name (not fully qualified) of the class. You may also call the save operation with a specific collection name. The collection to store the object can be overridden using mapping metadata. - -When inserting or saving, if the Id property is not set, the assumption is that its value will be auto-generated by the database. As such, for auto-generation of an ObjectId to succeed the type of the Id property/field in your class must be either a `String`, `ObjectId`, or `BigInteger`. - -Here is a basic example of using the save operation and retrieving its contents. - -.Inserting and retrieving documents using the MongoTemplate -==== -[source,java] ----- -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Criteria.query; -… - -Person p = new Person("Bob", 33); -mongoTemplate.insert(p); - -Person qp = mongoTemplate.findOne(query(where("age").is(33)), Person.class); ----- -==== - -The insert/save operations available to you are listed below. - -* `void` *save* `(Object objectToSave)` Save the object to the default collection. -* `void` *save* `(Object objectToSave, String collectionName)` Save the object to the specified collection. - -A similar set of insert operations is listed below - -* `void` *insert* `(Object objectToSave)` Insert the object to the default collection. -* `void` *insert* `(Object objectToSave, String collectionName)` Insert the object to the specified collection. - -[[mongo-template.save-insert.collection]] -==== Which collection will my documents be saved into? - -There are two ways to manage the collection name that is used for operating on the documents. The default collection name that is used is the class name changed to start with a lower-case letter. So a `com.test.Person` class would be stored in the "person" collection. You can customize this by providing a different collection name using the @Document annotation. You can also override the collection name by providing your own collection name as the last parameter for the selected MongoTemplate method calls. - -[[mongo-template.save-insert.individual]] -==== Inserting or saving individual objects - -The MongoDB driver supports inserting a collection of documents in one operation. The methods in the MongoOperations interface that support this functionality are listed below - -* *insert* inserts an object. If there is an existing document with the same id then an error is generated. -* *insertAll* takes a `Collection` of objects as the first parameter. This method inspects each object and inserts it to the appropriate collection based on the rules specified above. -* *save* saves the object overwriting any object that might exist with the same id. - -[[mongo-template.save-insert.batch]] -==== Inserting several objects in a batch - -The MongoDB driver supports inserting a collection of documents in one operation. The methods in the MongoOperations interface that support this functionality are listed below - -* *insert* methods that take a `Collection` as the first argument. This inserts a list of objects in a single batch write to the database. - -[[mongodb-template-update]] -=== Updating documents in a collection - -For updates we can elect to update the first document found using `MongoOperation` 's method `updateFirst` or we can update all documents that were found to match the query using the method `updateMulti`. Here is an example of an update of all SAVINGS accounts where we are adding a one-time $50.00 bonus to the balance using the `$inc` operator. - -.Updating documents using the MongoTemplate -==== -[source,java] ----- -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Query; -import static org.springframework.data.mongodb.core.query.Update; - -... - -WriteResult wr = mongoTemplate.updateMulti(new Query(where("accounts.accountType").is(Account.Type.SAVINGS)), - new Update().inc("accounts.$.balance", 50.00), Account.class); ----- -==== - -In addition to the `Query` discussed above we provide the update definition using an `Update` object. The `Update` class has methods that match the update modifiers available for MongoDB. - -As you can see most methods return the `Update` object to provide a fluent style for the API. - -[[mongodb-template-update.methods]] -==== Methods for executing updates for documents - -* *updateFirst* Updates the first document that matches the query document criteria with the provided updated document. -* *updateMulti* Updates all objects that match the query document criteria with the provided updated document. - -[[mongodb-template-update.update]] -==== Methods for the Update class - -The Update class can be used with a little 'syntax sugar' as its methods are meant to be chained together and you can kick-start the creation of a new Update instance via the static method `public static Update update(String key, Object value)` and using static imports. - -Here is a listing of methods on the Update class - -* `Update` *addToSet* `(String key, Object value)` Update using the `$addToSet` update modifier -* `Update` *currentDate* `(String key)` Update using the `$currentDate` update modifier -* `Update` *currentTimestamp* `(String key)` Update using the `$currentDate` update modifier with `$type` `timestamp` -* `Update` *inc* `(String key, Number inc)` Update using the `$inc` update modifier -* `Update` *max* `(String key, Object max)` Update using the `$max` update modifier -* `Update` *min* `(String key, Object min)` Update using the `$min` update modifier -* `Update` *multiply* `(String key, Number multiplier)` Update using the `$mul` update modifier -* `Update` *pop* `(String key, Update.Position pos)` Update using the `$pop` update modifier -* `Update` *pull* `(String key, Object value)` Update using the `$pull` update modifier -* `Update` *pullAll* `(String key, Object[] values)` Update using the `$pullAll` update modifier -* `Update` *push* `(String key, Object value)` Update using the `$push` update modifier -* `Update` *pushAll* `(String key, Object[] values)` Update using the `$pushAll` update modifier -* `Update` *rename* `(String oldName, String newName)` Update using the `$rename` update modifier -* `Update` *set* `(String key, Object value)` Update using the `$set` update modifier -* `Update` *setOnInsert* `(String key, Object value)` Update using the `$setOnInsert` update modifier -* `Update` *unset* `(String key)` Update using the `$unset` update modifier - -Some update modifiers like `$push` and `$addToSet` allow nesting of additional operators. - -[source] ----- -// { $push : { "category" : { "$each" : [ "spring" , "data" ] } } } -new Update().push("category").each("spring", "data") - -// { $push : { "key" : { "$position" : 0 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } } -new Update().push("key").atPosition(Position.FIRST).each(Arrays.asList("Arya", "Arry", "Weasel")); - -// { $push : { "key" : { "$slice" : 5 , "$each" : [ "Arya" , "Arry" , "Weasel" ] } } } -new Update().push("key").slice(5).each(Arrays.asList("Arya", "Arry", "Weasel")); ----- - -[source] ----- -// { $addToSet : { "values" : { "$each" : [ "spring" , "data" , "mongodb" ] } } } -new Update().addToSet("values").each("spring", "data", "mongodb"); ----- - -[[mongo-template.upserts]] -=== Upserting documents in a collection - -Related to performing an `updateFirst` operations, you can also perform an upsert operation which will perform an insert if no document is found that matches the query. The document that is inserted is a combination of the query document and the update document. Here is an example - -[source] ----- -template.upsert(query(where("ssn").is(1111).and("firstName").is("Joe").and("Fraizer").is("Update")), update("address", addr), Person.class); ----- - -[[mongo-template.find-and-upsert]] -=== Finding and Upserting documents in a collection - -The `findAndModify(…)` method on DBCollection can update a document and return either the old or newly updated document in a single operation. `MongoTemplate` provides a findAndModify method that takes `Query` and `Update` classes and converts from `Document` to your POJOs. Here are the methods - -[source,java] ----- - T findAndModify(Query query, Update update, Class entityClass); - - T findAndModify(Query query, Update update, Class entityClass, String collectionName); - - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); - - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, String collectionName); ----- - -As an example usage, we will insert of few `Person` objects into the container and perform a simple findAndUpdate operation - -[source,java] ----- -mongoTemplate.insert(new Person("Tom", 21)); -mongoTemplate.insert(new Person("Dick", 22)); -mongoTemplate.insert(new Person("Harry", 23)); - -Query query = new Query(Criteria.where("firstName").is("Harry")); -Update update = new Update().inc("age", 1); -Person p = mongoTemplate.findAndModify(query, update, Person.class); // return's old person object - -assertThat(p.getFirstName(), is("Harry")); -assertThat(p.getAge(), is(23)); -p = mongoTemplate.findOne(query, Person.class); -assertThat(p.getAge(), is(24)); - -// Now return the newly updated document when updating -p = template.findAndModify(query, update, new FindAndModifyOptions().returnNew(true), Person.class); -assertThat(p.getAge(), is(25)); ----- - -The `FindAndModifyOptions` lets you set the options of returnNew, upsert, and remove. An example extending off the previous code snippet is shown below - -[source,java] ----- -Query query2 = new Query(Criteria.where("firstName").is("Mary")); -p = mongoTemplate.findAndModify(query2, update, new FindAndModifyOptions().returnNew(true).upsert(true), Person.class); -assertThat(p.getFirstName(), is("Mary")); -assertThat(p.getAge(), is(1)); ----- - -[[mongo-template.delete]] -=== Methods for removing documents - -You can use several overloaded methods to remove an object from the database. - -==== -[source,java] ----- -template.remove(tywin, "GOT"); <1> - -template.remove(query(where("lastname").is("lannister")), "GOT"); <2> - -template.remove(new Query().limit(3), "GOT"); <3> - -template.findAllAndRemove(query(where("lastname").is("lannister"), "GOT"); <4> - -template.findAllAndRemove(new Query().limit(3), "GOT"); <5> ----- -<1> Remove a single entity via its `_id` from the associated collection. -<2> Remove all documents matching the criteria of the query from the `GOT` collection. -<3> Remove the first 3 documents in the `GOT` collection. Unlike <2>, the documents to remove are identified via their `_id` executing the given query applying `sort`, `limit` and `skip` options first and then remove all at once in a separate step. -<4> Remove all documents matching the criteria of the query from the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one. -<5> Remove the first 3 documents in the `GOT` collection. Unlike <3>, documents do not get deleted in a batch but one by one. -==== - -[[mongo-template.optimistic-locking]] -=== Optimistic locking - -The `@Version` annotation provides a JPA similar semantic in the context of MongoDB and makes sure updates are only applied to documents with matching version. Therefore the actual value of the version property is added to the update query in a way that the update won't have any effect if another operation altered the document in between. In that case an `OptimisticLockingFailureException` is thrown. - -==== -[source,java] ----- -@Document -class Person { - - @Id String id; - String firstname; - String lastname; - @Version Long version; -} - -Person daenerys = template.insert(new Person("Daenerys")); <1> - -Person tmp = teplate.findOne(query(where("id").is(daenerys.getId())), Person.class); <2> - -daenerys.setLastname("Targaryen"); -template.save(daenerys); <3> - -template.save(tmp); // throws OptimisticLockingFailureException <4> ----- -<1> Intially insert document. `version` is set to `0`. -<2> Load the just inserted document `version` is still `0`. -<3> Update document with `version = 0`. Set the `lastname` and bump `version` to `1`. -<4> Try to update previously loaded document sill having `version = 0` fails with `OptimisticLockingFailureException` as the current `version` is `1`. -==== - -IMPORTANT: Using MongoDB driver version 3 requires to set the `WriteConcern` to `ACKNOWLEDGED`. Otherwise `OptimisticLockingFailureException` can be silently swallowed. - -[[mongo.query]] -== Querying Documents - -You can express your queries using the `Query` and `Criteria` classes which have method names that mirror the native MongoDB operator names such as `lt`, `lte`, `is`, and others. The `Query` and `Criteria` classes follow a fluent API style so that you can easily chain together multiple method criteria and queries while having easy to understand the code. Static imports in Java are used to help remove the need to see the 'new' keyword for creating `Query` and `Criteria` instances so as to improve readability. If you like to create `Query` instances from a plain JSON String use `BasicQuery`. - -.Creating a Query instance from a plain JSON String -==== -[source,java] ----- -BasicQuery query = new BasicQuery("{ age : { $lt : 50 }, accounts.balance : { $gt : 1000.00 }}"); -List result = mongoTemplate.find(query, Person.class); ----- -==== - -GeoSpatial queries are also supported and are described more in the section <>. - -Map-Reduce operations are also supported and are described more in the section <>. - -[[mongodb-template-query]] -=== Querying documents in a collection - -We saw how to retrieve a single document using the findOne and findById methods on MongoTemplate in previous sections which return a single domain object. We can also query for a collection of documents to be returned as a list of domain objects. Assuming that we have a number of Person objects with name and age stored as documents in a collection and that each person has an embedded account document with a balance. We can now run a query using the following code. - -.Querying for documents using the MongoTemplate -==== -[source,java] ----- -import static org.springframework.data.mongodb.core.query.Criteria.where; -import static org.springframework.data.mongodb.core.query.Query.query; - -… - -List result = mongoTemplate.find(query(where("age").lt(50) - .and("accounts.balance").gt(1000.00d)), Person.class); ----- -==== - -All find methods take a `Query` object as a parameter. This object defines the criteria and options used to perform the query. The criteria is specified using a `Criteria` object that has a static factory method named `where` used to instantiate a new `Criteria` object. We recommend using a static import for `org.springframework.data.mongodb.core.query.Criteria.where` and `Query.query` to make the query more readable. - -This query should return a list of `Person` objects that meet the specified criteria. The `Criteria` class has the following methods that correspond to the operators provided in MongoDB. - -As you can see most methods return the `Criteria` object to provide a fluent style for the API. - -[[mongodb-template-query.criteria]] -==== Methods for the Criteria class - -* `Criteria` *all* `(Object o)` Creates a criterion using the `$all` operator -* `Criteria` *and* `(String key)` Adds a chained `Criteria` with the specified `key` to the current `Criteria` and returns the newly created one -* `Criteria` *andOperator* `(Criteria... criteria)` Creates an and query using the `$and` operator for all of the provided criteria (requires MongoDB 2.0 or later) -* `Criteria` *elemMatch* `(Criteria c)` Creates a criterion using the `$elemMatch` operator -* `Criteria` *exists* `(boolean b)` Creates a criterion using the `$exists` operator -* `Criteria` *gt* `(Object o)` Creates a criterion using the `$gt` operator -* `Criteria` *gte* `(Object o)` Creates a criterion using the `$gte` operator -* `Criteria` *in* `(Object... o)` Creates a criterion using the `$in` operator for a varargs argument. -* `Criteria` *in* `(Collection collection)` Creates a criterion using the `$in` operator using a collection -* `Criteria` *is* `(Object o)` Creates a criterion using field matching (`{ key:value }`). If the specified value is a document, the order of the fields and exact equality in the document matters. -* `Criteria` *lt* `(Object o)` Creates a criterion using the `$lt` operator -* `Criteria` *lte* `(Object o)` Creates a criterion using the `$lte` operator -* `Criteria` *mod* `(Number value, Number remainder)` Creates a criterion using the `$mod` operator -* `Criteria` *ne* `(Object o)` Creates a criterion using the `$ne` operator -* `Criteria` *nin* `(Object... o)` Creates a criterion using the `$nin` operator -* `Criteria` *norOperator* `(Criteria... criteria)` Creates an nor query using the `$nor` operator for all of the provided criteria -* `Criteria` *not* `()` Creates a criterion using the `$not` meta operator which affects the clause directly following -* `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria -* `Criteria` *regex* `(String re)` Creates a criterion using a `$regex` -* `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator -* `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator -* `Criteria` *matchingDocumentStructure* `(MongoJsonSchema schema)` Creates a criterion using the `$jsonSchema` operator for <>. `$jsonSchema` can only be applied on the top level of a query and not property specific. Use the `properties` attribute of the schema to match against nested fields. - - -There are also methods on the Criteria class for geospatial queries. Here is a listing but look at the section on <> to see them in action. - -* `Criteria` *within* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. -* `Criteria` *within* `(Box box)` Creates a geospatial criterion using a `$geoWithin $box` operation. -* `Criteria` *withinSphere* `(Circle circle)` Creates a geospatial criterion using `$geoWithin $center` operators. -* `Criteria` *near* `(Point point)` Creates a geospatial criterion using a `$near` operation -* `Criteria` *nearSphere* `(Point point)` Creates a geospatial criterion using `$nearSphere$center` operations. This is only available for MongoDB 1.7 and higher. -* `Criteria` *minDistance* `(double minDistance)` Creates a geospatial criterion using the `$minDistance` operation, for use with $near. -* `Criteria` *maxDistance* `(double maxDistance)` Creates a geospatial criterion using the `$maxDistance` operation, for use with $near. - -The `Query` class has some additional methods used to provide options for the query. - -[[mongodb-template-query.query]] -==== Methods for the Query class - -* `Query` *addCriteria* `(Criteria criteria)` used to add additional criteria to the query -* `Field` *fields* `()` used to define fields to be included in the query results -* `Query` *limit* `(int limit)` used to limit the size of the returned results to the provided limit (used for paging) -* `Query` *skip* `(int skip)` used to skip the provided number of documents in the results (used for paging) -* `Query` *with* `(Sort sort)` used to provide sort definition for the results - -[[mongo-template.querying]] -=== Methods for querying for documents - -The query methods need to specify the target type T that will be returned and they are also overloaded with an explicit collection name for queries that should operate on a collection other than the one indicated by the return type. - -* *findAll* Query for a list of objects of type T from the collection. -* *findOne* Map the results of an ad-hoc query on the collection to a single instance of an object of the specified type. -* *findById* Return an object of the given id and target class. -* *find* Map the results of an ad-hoc query on the collection to a List of the specified type. -* *findAndRemove* Map the results of an ad-hoc query on the collection to a single instance of an object of the specified type. The first document that matches the query is returned and also removed from the collection in the database. - -[[mongo-template.query.distinct]] -=== Query distinct values - -MongoDB provides an operation to obtain distinct values for a single field using a query from the resulting documents. -Resulting values are not required to have the same data type, nor is the feature limited to simple types. -For retriaval the actual result type does matter for the sake of conversion and typing. - -.Retrieving distinct values -==== -[source,java] ----- -template.query(Person.class) <1> - .distinct("lastname") <2> - .all(); <3> ----- -<1> Query the collection of `Person`. -<2> Select _distinct_ values of the `lastname` field. The fieldname will be mapped according to the domain types property declaration, taking potential `@Field` annotations into account. -<3> Retrieve all distinct values as `List` of `Object` due to no explicit result type specification. -==== - -Retrieving distinct values into a `Collection` of `Object` is the most flexible way as it will try to determine the property value of the domain type converting results to the desired type or mapping `Document` structures. - -Sometimes, when all values of the desired field are fixed to a certain type, it is more convenient to directly obtain a correctly typed `Collection` - -.Retrieving strongly typed distinct values -==== -[source,java] ----- -template.query(Person.class) <1> - .distinct("lastname") <2> - .as(String.class) <3> - .all(); <4> ----- -<1> Query the collection of `Person`. -<2> Select _distinct_ values of the `lastname` field. The fieldname will be mapped according to the domain types property declaration, taking potential `@Field` annotations into account. -<3> Retrieved values will be converted into the desired target type. In this case `String`. It would also be possible to map the values to a more complex type if the stored field contains a document. -<4> Retrieve all distinct values as a `List` of `String`. Throws a `DataAccessException` if the type cannot be converted into the desired target type. -==== - -[[mongo.geospatial]] -=== GeoSpatial Queries - -MongoDB supports GeoSpatial queries through the use of operators such as `$near`, `$within`, `geoWithin` and `$nearSphere`. Methods specific to geospatial queries are available on the `Criteria` class. There are also a few shape classes, `Box`, `Circle`, and `Point` that are used in conjunction with geospatial related `Criteria` methods. - -To understand how to perform GeoSpatial queries we will use the following Venue class taken from the integration tests which relies on using the rich `MappingMongoConverter`. - -[source,java] ----- -@Document(collection="newyork") -public class Venue { - - @Id - private String id; - private String name; - private double[] location; - - @PersistenceConstructor - Venue(String name, double[] location) { - super(); - this.name = name; - this.location = location; - } - - public Venue(String name, double x, double y) { - super(); - this.name = name; - this.location = new double[] { x, y }; - } - - public String getName() { - return name; - } - - public double[] getLocation() { - return location; - } - - @Override - public String toString() { - return "Venue [id=" + id + ", name=" + name + ", location=" - + Arrays.toString(location) + "]"; - } -} ----- - -To find locations within a `Circle`, the following query can be used. - -[source,java] ----- -Circle circle = new Circle(-73.99171, 40.738868, 0.01); -List venues = - template.find(new Query(Criteria.where("location").within(circle)), Venue.class); ----- - -To find venues within a `Circle` using spherical coordinates the following query can be used - -[source,java] ----- -Circle circle = new Circle(-73.99171, 40.738868, 0.003712240453784); -List venues = - template.find(new Query(Criteria.where("location").withinSphere(circle)), Venue.class); ----- - -To find venues within a `Box` the following query can be used - -[source,java] ----- -//lower-left then upper-right -Box box = new Box(new Point(-73.99756, 40.73083), new Point(-73.988135, 40.741404)); -List venues = - template.find(new Query(Criteria.where("location").within(box)), Venue.class); ----- - -To find venues near a `Point`, the following queries can be used - -[source,java] ----- -Point point = new Point(-73.99171, 40.738868); -List venues = - template.find(new Query(Criteria.where("location").near(point).maxDistance(0.01)), Venue.class); ----- - -[source,java] ----- -Point point = new Point(-73.99171, 40.738868); -List venues = - template.find(new Query(Criteria.where("location").near(point).minDistance(0.01).maxDistance(100)), Venue.class); ----- - -To find venues near a `Point` using spherical coordinates the following query can be used - -[source,java] ----- -Point point = new Point(-73.99171, 40.738868); -List venues = - template.find(new Query( - Criteria.where("location").nearSphere(point).maxDistance(0.003712240453784)), - Venue.class); ----- - -[[mongo.geo-near]] -==== Geo near queries - -MongoDB supports querying the database for geo locations and calculation the distance from a given origin at the very same time. With geo-near queries it's possible to express queries like: "find all restaurants in the surrounding 10 miles". To do so `MongoOperations` provides `geoNear(…)` methods taking a `NearQuery` as argument as well as the already familiar entity type and collection - -[source,java] ----- -Point location = new Point(-73.99171, 40.738868); -NearQuery query = NearQuery.near(location).maxDistance(new Distance(10, Metrics.MILES)); - -GeoResults = operations.geoNear(query, Restaurant.class); ----- - -As you can see we use the `NearQuery` builder API to set up a query to return all `Restaurant` instances surrounding the given `Point` by 10 miles maximum. The `Metrics` enum used here actually implements an interface so that other metrics could be plugged into a distance as well. A `Metric` is backed by a multiplier to transform the distance value of the given metric into native distances. The sample shown here would consider the 10 to be miles. Using one of the pre-built in metrics (miles and kilometers) will automatically trigger the spherical flag to be set on the query. If you want to avoid that, simply hand in plain `double` values into `maxDistance(…)`. For more information see the JavaDoc of `NearQuery` and `Distance`. - -The geo near operations return a `GeoResults` wrapper object that encapsulates `GeoResult` instances. The wrapping `GeoResults` allows accessing the average distance of all results. A single `GeoResult` object simply carries the entity found plus its distance from the origin. - -[[mongo.geo-json]] -=== GeoJSON Support - -MongoDB supports http://geojson.org/[GeoJSON] and simple (legacy) coordinate pairs for geospatial data. Those formats can both be used for storing as well as querying data. - -NOTE: Please refer to the http://docs.mongodb.org/manual/core/2dsphere/#geospatial-indexes-store-geojson/[MongoDB manual on GeoJSON support] to learn about requirements and restrictions. - -==== GeoJSON types in domain classes - -Usage of http://geojson.org/[GeoJSON] types in domain classes is straight forward. The `org.springframework.data.mongodb.core.geo` package contains types like `GeoJsonPoint`, `GeoJsonPolygon` and others. Those are extensions to the existing `org.springframework.data.geo` types. - -==== -[source,java] ----- -public class Store { - - String id; - - /** - * location is stored in GeoJSON format. - * { - * "type" : "Point", - * "coordinates" : [ x, y ] - * } - */ - GeoJsonPoint location; -} ----- -==== - -==== GeoJSON types in repository query methods - -Using GeoJSON types as repository query parameters forces usage of the `$geometry` operator when creating the query. - -==== -[source,java] ----- -public interface StoreRepository extends CrudRepository { - - List findByLocationWithin(Polygon polygon); <1> - -} - -/* - * { - * "location": { - * "$geoWithin": { - * "$geometry": { - * "type": "Polygon", - * "coordinates": [ - * [ - * [-73.992514,40.758934], - * [-73.961138,40.760348], - * [-73.991658,40.730006], - * [-73.992514,40.758934] - * ] - * ] - * } - * } - * } - * } - */ -repo.findByLocationWithin( <2> - new GeoJsonPolygon( - new Point(-73.992514, 40.758934), - new Point(-73.961138, 40.760348), - new Point(-73.991658, 40.730006), - new Point(-73.992514, 40.758934))); <3> - -/* - * { - * "location" : { - * "$geoWithin" : { - * "$polygon" : [ [-73.992514,40.758934] , [-73.961138,40.760348] , [-73.991658,40.730006] ] - * } - * } - * } - */ -repo.findByLocationWithin( <4> - new Polygon( - new Point(-73.992514, 40.758934), - new Point(-73.961138, 40.760348), - new Point(-73.991658, 40.730006)); ----- -<1> Repository method definition using the commons type allows calling it with both GeoJSON and legacy format. -<2> Use GeoJSON type the make use of `$geometry` operator. -<3> Plase note that GeoJSON polygons need the define a closed ring. -<4> Use legacy format `$polygon` operator. -==== - -[[mongo.textsearch]] -=== Full Text Queries - -Since MongoDB 2.6 full text queries can be executed using the `$text` operator. Methods and operations specific for full text queries are available in `TextQuery` and `TextCriteria`. When doing full text search please refer to the http://docs.mongodb.org/manual/reference/operator/query/text/#behavior[MongoDB reference] for its behavior and limitations. - -==== Full Text Search - -Before we are actually able to use full text search we have to ensure to set up the search index correctly. Please refer to section <> for creating index structures. - -[source,javascript] ----- -db.foo.createIndex( -{ - title : "text", - content : "text" -}, -{ - weights : { - title : 3 - } -} -) ----- - -A query searching for `coffee cake`, sorted by relevance according to the `weights` can be defined and executed as: - -[source,java] ----- -Query query = TextQuery.searching(new TextCriteria().matchingAny("coffee", "cake")).sortByScore(); -List page = template.find(query, Document.class); ----- - -Exclusion of search terms can directly be done by prefixing the term with `-` or using `notMatching` - -[source,java] ----- -// search for 'coffee' and not 'cake' -TextQuery.searching(new TextCriteria().matching("coffee").matching("-cake")); -TextQuery.searching(new TextCriteria().matching("coffee").notMatching("cake")); ----- - -As `TextCriteria.matching` takes the provided term as is. Therefore phrases can be defined by putting them between double quotes (eg. `\"coffee cake\")` or using `TextCriteria.phrase.` - -[source,java] ----- -// search for phrase 'coffee cake' -TextQuery.searching(new TextCriteria().matching("\"coffee cake\"")); -TextQuery.searching(new TextCriteria().phrase("coffee cake")); ----- - -The flags for `$caseSensitive` and `$diacriticSensitive` can be set via the according methods on `TextCriteria`. Please note that these two optional flags have been introduced in MongoDB 3.2 and will not be included in the query unless explicitly set. - -[[mongo.collation]] -=== Collations - -MongoDB supports since 3.4 collations for collection and index creation and various query operations. Collations define string comparison rules based on the http://userguide.icu-project.org/collation/concepts[ICU collations]. A collation document consists of various properties that are encapsulated in `Collation`: - -==== -[source,java] ----- -Collation collation = Collation.of("fr") <1> - - .strength(ComparisonLevel.secondary() <2> - .includeCase()) - - .numericOrderingEnabled() <3> - - .alternate(Alternate.shifted().punct()) <4> - - .forwardDiacriticSort() <5> - - .normalizationEnabled(); <6> ----- -<1> `Collation` requires a locale for creation. This can be either a string representation of the locale, a `Locale` (considering language, country and variant) or a `CollationLocale`. The locale is mandatory for creation. -<2> Collation strength defines comparison levels denoting differences between characters. You can configure various options (case-sensitivity, case-ordering) depending on the selected strength. -<3> Specify whether to compare numeric strings as numbers or as strings. -<4> Specify whether the collation should consider whitespace and punctuation as base characters for purposes of comparison. -<5> Specify whether strings with diacritics sort from back of the string, such as with some French dictionary ordering. -<6> Specify whether to check if text requires normalization and to perform normalization. -==== - -Collations can be used to create collections and indexes. If you create a collection specifying a collation, the collation is applied to index creation and queries unless you specify a different collation. A collation is valid for a whole operation and cannot be specified on a per-field basis. - -[source,java] ----- -Collation french = Collation.of("fr"); -Collation german = Collation.of("de"); - -template.createCollection(Person.class, CollectionOptions.just(collation)); - -template.indexOps(Person.class).ensureIndex(new Index("name", Direction.ASC).collation(german)); ----- - -NOTE: MongoDB uses simple binary comparison if no collation is specified (`Collation.simple()`). - -Using collations with collection operations is a matter of specifying a `Collation` instance in your query or operation options. - -.Using collation with `find` -==== -[source,java] ----- -Collation collation = Collation.of("de"); - -Query query = new Query(Criteria.where("firstName").is("Amél")).collation(collation); - -List results = template.find(query, Person.class); ----- -==== - -.Using collation with `aggregate` -==== -[source,java] ----- -Collation collation = Collation.of("de"); - -AggregationOptions options = AggregationOptions.builder().collation(collation).build(); - -Aggregation aggregation = newAggregation( - project("tags"), - unwind("tags"), - group("tags") - .count().as("count") -).withOptions(options); - -AggregationResults results = template.aggregate(aggregation, "tags", TagCount.class); ----- -==== - -WARNING: Indexes are only used if the collation used for the operation and the index collation matches. - -[[mongo.jsonSchema]] -=== JSON Schema - -As of version 3.6 MongoDB supports collections that validate ``Document``s against a provided JSON Schema. -The schema itself and both validation action and level can be defined when creating the collection. - -.Sample JSON schema -==== -[source,json] ----- -{ - "type": "object", <1> - - "required": [ "firstname", "lastname" ], <2> - - "properties": { <3> - - "firstname": { <4> - "type": "string", - "enum": [ "luke", "han" ] - }, - "address": { <5> - "type": "object", - "properties": { - "postCode": { "type": "string", "minLength": 4, "maxLength": 5 } - } - } - } -} ----- -<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain -embedded schema objects describing properties and subdocuments. -<2> `required` is a property describing which properties are required in a document. It can be specified optionally along of other -schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords]. -<3> `properties` is related to a schema object describing an `object` type. It contains property-specific schema constraints. -<4> `firstname` specifies constrains for the `firsname` field inside the document. Here it's a string-based properties declaring - possible field values. -<5> `address` is a subdocument defining a schema for values in its `postCode` field. -==== - -You can provide a schema either by specifying a schema document (i.e. using the `Document` API by parsing or building a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. - -.Creating a JSON schema -==== -[source,java] ----- -MongoJsonSchema.builder() <1> - .required("firstname", "lastname") <2> - - .properties( - string("firstname").possibleValues("luke", "han"), <3> - - object("address") - .properties(string("postCode").minLength(4).maxLength(5))) - - .build(); <4> ----- -<1> Obtain a schema builder to configure the schema with a fluent API. -<2> Configure required properties. -<3> Configure the String-typed `firstname` field allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entrypoints like `string(…)`. -<4> Build the schema object. Use the schema to either create a collection or <>. -==== - -`CollectionOptions` provides the entry point to schema support for collections. - -.Create collection with `$jsonSchema` -==== -[source,java] ----- -MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); - -template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); ----- -==== - -You can use a schema to query any collection for documents that match a given structure defined by a JSON schema. - -.Query for Documents matching a `$jsonSchema` -==== -[source,java] ----- -MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); - -template.find(query(matchingDocumentStructure(schema)), Person.class); ----- -==== - -[cols="3,1,6", options="header"] -.Supported JSON schema types -|=== -| Schema Type -| Java Type -| Schema Properties - -| `untyped` -| - -| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not` - -| `object` -| `Object` -| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties` - -| `array` -| any array except `byte[]` -| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems` - -| `string` -| `String` -| `minLength`, `maxLentgth`, `pattern` - -| `int` -| `int`, `Integer` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `long` -| `long`, `Long` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `double` -| `float`, `Float`, `double`, `Double` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `decimal` -| `BigDecimal` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `number` -| `Number` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `binData` -| `byte[]` -| - -| `boolean` -| `boolean`, `Boolean` -| - -| `null` -| `null` -| - -| `objectId` -| `ObjectId` -| - -| `date` -| `java.util.Date` -| - -| `timestamp` -| `BsonTimestamp` -| - -| `regex` -| `java.util.regex.Pattern` -| - -|=== - -NOTE: `untyped` is a generic type that is inherited by all typed schema types providing all `untyped` schema properties to typed schema types. - -For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema]. - -[[mongo.query.fluent-template-api]] -=== Fluent Template API - -The `MongoOperations` interface is one of the central components when it comes to more low level interaction with MongoDB. It offers a wide range of methods covering needs from collection / index creation and CRUD operations to more advanced functionality like map-reduce and aggregations. -One can find multiple overloads for each and every method. Most of them just cover optional / nullable parts of the API. - -`FluentMongoOperations` provide a more narrow interface for common methods of `MongoOperations` providing a more readable, fluent API. -The entry points `insert(…)`, `find(…)`, `update(…)`, etc. follow a natural naming schema based on the operation to execute. Moving on from the entry point the API is designed to only offer context dependent methods guiding towards a terminating method that invokes the actual `MongoOperations` counterpart. - -==== -[source,java] ----- -List all = ops.find(SWCharacter.class) - .inCollection("star-wars") <1> - .all(); ----- -<1> Skip this step if `SWCharacter` defines the collection via `@Document` or if using the class name as the collection name is just fine. -==== - -Sometimes a collection in MongoDB holds entities of different types. Like a `Jedi` within a collection of `SWCharacters`. -To use different types for `Query` and return value mapping one can use `as(Class targetType)` map results differently. - -==== -[source,java] ----- -List all = ops.find(SWCharacter.class) <1> - .as(Jedi.class) <2> - .matching(query(where("jedi").is(true))) - .all(); ----- -<1> The query fields are mapped against the `SWCharacter` type. -<2> Resulting documents are mapped into `Jedi`. -==== - -TIP: It is possible to directly apply <> to resulting documents by providing just the `interface` type via `as(Class)`. - -Switching between retrieving a single entity, multiple ones as `List` or `Stream` like is done via the terminating methods `first()`, `one()`, `all()` or `stream()`. - -When writing a geo-spatial query via `near(NearQuery)` the number of terminating methods is altered to just the ones valid for executing a `geoNear` command in MongoDB fetching entities as `GeoResult` within `GeoResults`. - -==== -[source,java] ----- -GeoResults results = mongoOps.query(SWCharacter.class) - .as(Jedi.class) - .near(alderaan) // NearQuery.near(-73.9667, 40.78).maxDis… - .all(); ----- -==== - -include::../{spring-data-commons-docs}/query-by-example.adoc[leveloffset=+1] -include::query-by-example.adoc[leveloffset=+1] - -[[mongo.mapreduce]] -== Map-Reduce Operations - -You can query MongoDB using Map-Reduce which is useful for batch processing, data aggregation, and for when the query language doesn't fulfill your needs. - -Spring provides integration with MongoDB's map reduce by providing methods on MongoOperations to simplify the creation and execution of Map-Reduce operations. It can convert the results of a Map-Reduce operation to a POJO also integrates with Spring's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#resources[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files is often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer. - -[[mongo.mapreduce.example]] -=== Example Usage - -To understand how to perform Map-Reduce operations an example from the book 'MongoDB - The definitive guide' is used. In this example we will create three documents that have the values [a,b], [b,c], and [c,d] respectfully. The values in each document are associated with the key 'x' as shown below. For this example assume these documents are in the collection named "jmr1". - -[source] ----- -{ "_id" : ObjectId("4e5ff893c0277826074ec533"), "x" : [ "a", "b" ] } -{ "_id" : ObjectId("4e5ff893c0277826074ec534"), "x" : [ "b", "c" ] } -{ "_id" : ObjectId("4e5ff893c0277826074ec535"), "x" : [ "c", "d" ] } ----- - -A map function that will count the occurrence of each letter in the array for each document is shown below - -[source,java] ----- -function () { - for (var i = 0; i < this.x.length; i++) { - emit(this.x[i], 1); - } -} ----- - -The reduce function that will sum up the occurrence of each letter across all the documents is shown below - -[source,java] ----- -function (key, values) { - var sum = 0; - for (var i = 0; i < values.length; i++) - sum += values[i]; - return sum; -} ----- - -Executing this will result in a collection as shown below. - -[source] ----- -{ "_id" : "a", "value" : 1 } -{ "_id" : "b", "value" : 2 } -{ "_id" : "c", "value" : 2 } -{ "_id" : "d", "value" : 1 } ----- - -Assuming that the map and reduce functions are located in `map.js` and `reduce.js` and bundled in your jar so they are available on the classpath, you can execute a map-reduce operation and obtain the results as shown below - -[source,java] ----- -MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", ValueObject.class); -for (ValueObject valueObject : results) { - System.out.println(valueObject); -} ----- - -The output of the above code is - -[source] ----- -ValueObject [id=a, value=1.0] -ValueObject [id=b, value=2.0] -ValueObject [id=c, value=2.0] -ValueObject [id=d, value=1.0] ----- - -The MapReduceResults class implements `Iterable` and provides access to the raw output, as well as timing and count statistics. The `ValueObject` class is simply - -[source,java] ----- -public class ValueObject { - - private String id; - private float value; - - public String getId() { - return id; - } - - public float getValue() { - return value; - } - - public void setValue(float value) { - this.value = value; - } - - @Override - public String toString() { - return "ValueObject [id=" + id + ", value=" + value + "]"; - } -} ----- - -By default the output type of INLINE is used so you don't have to specify an output collection. To specify additional map-reduce options use an overloaded method that takes an additional `MapReduceOptions` argument. The class `MapReduceOptions` has a fluent API so adding additional options can be done in a very compact syntax. Here an example that sets the output collection to "jmr1_out". Note that setting only the output collection assumes a default output type of REPLACE. - -[source,java] ----- -MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", - new MapReduceOptions().outputCollection("jmr1_out"), ValueObject.class); ----- - -There is also a static import `import static org.springframework.data.mongodb.core.mapreduce.MapReduceOptions.options;` that can be used to make the syntax slightly more compact - -[source,java] ----- -MapReduceResults results = mongoOperations.mapReduce("jmr1", "classpath:map.js", "classpath:reduce.js", - options().outputCollection("jmr1_out"), ValueObject.class); ----- - -You can also specify a query to reduce the set of data that will be used to feed into the map-reduce operation. This will remove the document that contains [a,b] from consideration for map-reduce operations. - -[source,java] ----- -Query query = new Query(where("x").ne(new String[] { "a", "b" })); -MapReduceResults results = mongoOperations.mapReduce(query, "jmr1", "classpath:map.js", "classpath:reduce.js", - options().outputCollection("jmr1_out"), ValueObject.class); ----- - -Note that you can specify additional limit and sort values as well on the query but not skip values. - -[[mongo.server-side-scripts]] -== Script Operations - -MongoDB allows executing JavaScript functions on the server by either directly sending the script or calling a stored one. `ScriptOperations` can be accessed via `MongoTemplate` and provides basic abstraction for `JavaScript` usage. - -=== Example Usage - -==== -[source,java] ----- -ScriptOperations scriptOps = template.scriptOps(); - -ExecutableMongoScript echoScript = new ExecutableMongoScript("function(x) { return x; }"); -scriptOps.execute(echoScript, "directly execute script"); <1> - -scriptOps.register(new NamedMongoScript("echo", echoScript)); <2> -scriptOps.call("echo", "execute script via name"); <3> ----- -<1> Execute the script directly without storing the function on server side. -<2> Store the script using 'echo' as its name. The given name identifies the script and allows calling it later. -<3> Execute the script with name 'echo' using the provided parameters. -==== - -[[mongo.group]] -== Group Operations - -As an alternative to using Map-Reduce to perform data aggregation, you can use the http://www.mongodb.org/display/DOCS/Aggregation#Aggregation-Group[`group` operation] which feels similar to using SQL's group by query style, so it may feel more approachable vs. using Map-Reduce. Using the group operations does have some limitations, for example it is not supported in a shared environment and it returns the full result set in a single BSON object, so the result should be small, less than 10,000 keys. - -Spring provides integration with MongoDB's group operation by providing methods on MongoOperations to simplify the creation and execution of group operations. It can convert the results of the group operation to a POJO and also integrates with Spring's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#resources[Resource abstraction] abstraction. This will let you place your JavaScript files on the file system, classpath, http server or any other Spring Resource implementation and then reference the JavaScript resources via an easy URI style syntax, e.g. 'classpath:reduce.js;. Externalizing JavaScript code in files if often preferable to embedding them as Java strings in your code. Note that you can still pass JavaScript code as Java strings if you prefer. - -[[mongo.group.example]] -=== Example Usage - -In order to understand how group operations work the following example is used, which is somewhat artificial. For a more realistic example consult the book 'MongoDB - The definitive guide'. A collection named `group_test_collection` created with the following rows. - -[source] ----- -{ "_id" : ObjectId("4ec1d25d41421e2015da64f1"), "x" : 1 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f2"), "x" : 1 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f3"), "x" : 2 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f4"), "x" : 3 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f5"), "x" : 3 } -{ "_id" : ObjectId("4ec1d25d41421e2015da64f6"), "x" : 3 } ----- - -We would like to group by the only field in each row, the `x` field and aggregate the number of times each specific value of `x` occurs. To do this we need to create an initial document that contains our count variable and also a reduce function which will increment it each time it is encountered. The Java code to execute the group operation is shown below - -[source,java] ----- -GroupByResults results = mongoTemplate.group("group_test_collection", - GroupBy.key("x").initialDocument("{ count: 0 }").reduceFunction("function(doc, prev) { prev.count += 1 }"), - XObject.class); ----- - -The first argument is the name of the collection to run the group operation over, the second is a fluent API that specifies properties of the group operation via a `GroupBy` class. In this example we are using just the `intialDocument` and `reduceFunction` methods. You can also specify a key-function, as well as a finalizer as part of the fluent API. If you have multiple keys to group by, you can pass in a comma separated list of keys. - -The raw results of the group operation is a JSON document that looks like this - -[source] ----- -{ - "retval" : [ { "x" : 1.0 , "count" : 2.0} , - { "x" : 2.0 , "count" : 1.0} , - { "x" : 3.0 , "count" : 3.0} ] , - "count" : 6.0 , - "keys" : 3 , - "ok" : 1.0 -} ----- - -The document under the "retval" field is mapped onto the third argument in the group method, in this case XObject which is shown below. - -[source,java] ----- -public class XObject { - - private float x; - - private float count; - - - public float getX() { - return x; - } - - public void setX(float x) { - this.x = x; - } - - public float getCount() { - return count; - } - - public void setCount(float count) { - this.count = count; - } - - @Override - public String toString() { - return "XObject [x=" + x + " count = " + count + "]"; - } -} ----- - -You can also obtain the raw result as a `Document` by calling the method `getRawResults` on the `GroupByResults` class. - -There is an additional method overload of the group method on `MongoOperations` which lets you specify a `Criteria` object for selecting a subset of the rows. An example which uses a `Criteria` object, with some syntax sugar using static imports, as well as referencing a key-function and reduce function javascript files via a Spring Resource string is shown below. - -[source] ----- -import static org.springframework.data.mongodb.core.mapreduce.GroupBy.keyFunction; -import static org.springframework.data.mongodb.core.query.Criteria.where; - -GroupByResults results = mongoTemplate.group(where("x").gt(0), - "group_test_collection", - keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class); ----- - -[[mongo.aggregation]] -== Aggregation Framework Support - -Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. - -The MongoDB Documentation describes the http://docs.mongodb.org/manual/core/aggregation/[Aggregation Framework] as follows: - -For further information see the full http://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. - -[[mongo.aggregation.basic-concepts]] -=== Basic Concepts - -The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions `Aggregation`, `AggregationOperation` and `AggregationResults`. - -* `Aggregation` -+ -An Aggregation represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory Method of the `Aggregation` class which takes the list of `AggregateOperation` as a parameter next to the optional input class. -+ -The actual aggregate operation is executed by the `aggregate` method of the `MongoTemplate` which also takes the desired output class as parameter. -+ -* `AggregationOperation` -+ -An `AggregationOperation` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although one could manually create an `AggregationOperation` the recommended way to construct an `AggregateOperation` is to use the static factory methods provided by the `Aggregate` class. -+ -* `AggregationResults` -+ -`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result in the form of an `Document`, to the mapped objects and information which performed the aggregation. -+ -The canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework looks as follows: - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - pipelineOP1(), - pipelineOP2(), - pipelineOPn() -); - -AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); -List mappedResult = results.getMappedResults(); ----- - -Note that if you provide an input class as the first parameter to the `newAggregation` method the `MongoTemplate` will derive the name of the input collection from this class. Otherwise if you don't not specify an input class you must provide the name of the input collection explicitly. If an input-class and an input-collection is provided the latter takes precedence. - -[[mongo.aggregation.supported-aggregation-operations]] -=== Supported Aggregation Operations - -The MongoDB Aggregation Framework provides the following types of Aggregation Operations: - -* Pipeline Aggregation Operators -* Group Aggregation Operators -* Boolean Aggregation Operators -* Comparison Aggregation Operators -* Arithmetic Aggregation Operators -* String Aggregation Operators -* Date Aggregation Operators -* Array Aggregation Operators -* Conditional Aggregation Operators -* Lookup Aggregation Operators - -At the time of this writing we provide support for the following Aggregation Operations in Spring Data MongoDB. - -.Aggregation Operations currently supported by Spring Data MongoDB -[cols="2*"] -|=== -| Pipeline Aggregation Operators -| bucket, bucketAuto, count, facet, geoNear, graphLookup, group, limit, lookup, match, project, replaceRoot, skip, sort, unwind - -| Set Aggregation Operators -| setEquals, setIntersection, setUnion, setDifference, setIsSubset, anyElementTrue, allElementsTrue - -| Group Aggregation Operators -| addToSet, first, last, max, min, avg, push, sum, (*count), stdDevPop, stdDevSamp - -| Arithmetic Aggregation Operators -| abs, add (*via plus), ceil, divide, exp, floor, ln, log, log10, mod, multiply, pow, sqrt, subtract (*via minus), trunc - -| String Aggregation Operators -| concat, substr, toLower, toUpper, stcasecmp, indexOfBytes, indexOfCP, split, strLenBytes, strLenCP, substrCP, - -| Comparison Aggregation Operators -| eq (*via: is), gt, gte, lt, lte, ne - -| Array Aggregation Operators -| arrayElementAt, concatArrays, filter, in, indexOfArray, isArray, range, reverseArray, reduce, size, slice, zip - -| Literal Operators -| literal - -| Date Aggregation Operators -| dayOfYear, dayOfMonth, dayOfWeek, year, month, week, hour, minute, second, millisecond, dateToString, isoDayOfWeek, isoWeek, isoWeekYear - -| Variable Operators -| map - -| Conditional Aggregation Operators -| cond, ifNull, switch - -| Type Aggregation Operators -| type - -|=== - -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. - -*) The operation is mapped or added by Spring Data MongoDB. - -[[mongo.aggregation.projection]] -=== Projection Expressions - -Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined via the `project` method of the `Aggregation` class either by passing a list of ``String``'s or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API via the `and(String)` method and aliased via the `as(String)` method. -Note that one can also define fields with aliases via the static factory method `Fields.field` of the aggregation framework that can then be used to construct a new `Fields` instance. References to projected fields in later aggregation stages are only valid by using the field name of included fields or their alias of aliased or newly defined fields. Fields not included in the projection cannot be referenced in later aggregation stages. - -.Projection expression examples -==== -[source,java] ----- -// will generate {$project: {name: 1, netPrice: 1}} -project("name", "netPrice") - -// will generate {$project: {bar: $foo}} -project().and("foo").as("bar") - -// will generate {$project: {a: 1, b: 1, bar: $foo}} -project("a","b").and("foo").as("bar") ----- -==== - -.Multi-Stage Aggregation using Projection and Sorting -==== -[source,java] ----- -// will generate {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} -project("name", "netPrice"), sort(ASC, "name") - -// will generate {$project: {bar: $foo}}, {$sort: {bar: 1}} -project().and("foo").as("bar"), sort(ASC, "bar") - -// this will not work -project().and("foo").as("bar"), sort(ASC, "foo") ----- -==== - -More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.facet]] -=== Faceted classification - -MongoDB supports as of Version 3.4 faceted classification using the Aggregation Framework. A faceted classification uses semantic categories, either general or subject-specific, that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classificated into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. - -==== Buckets - -Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or grouping expression. They can be defined via the `bucket()`/`bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. The bucket operation can be extended with additional parameters through a fluent API via the `with…()` methods, the `andOutput(String)` method and aliased via the `as(String)` method. Each bucket is represented as a document in the output. - -`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. - -.Bucket operation examples -==== -[source,java] ----- -// will generate {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} -bucket("price").withBoundaries(0, 100, 400); - -// will generate {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} -bucket("price").withBoundaries(0, 100).withDefault("Other"); - -// will generate {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} -bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); - -// will generate {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} -bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); ----- -==== - -`BucketAutoOperation` determines boundaries itself in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or their powers of 10. - -.Bucket operation examples -==== -[source,java] ----- -// will generate {$bucketAuto: {groupBy: $price, buckets: 5}} -bucketAuto("price", 5) - -// will generate {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} -bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); - -// will generate {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} -bucketAuto("price", 5).andOutput("title").push().as("titles"); ----- -==== - -Bucket operations can use `AggregationExpression` via `andOutput()` and <> via `andOutputExpression()` to create output fields in buckets. - -Note that further details regarding bucket expressions can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and -http://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. - -==== Multi-faceted aggregation - -Multiple aggregation pipelines can be used to create multi-faceted aggregations which characterize data across multiple dimensions, or facets, within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, etc. - -A `FacetOperation` can be defined via the `facet()` method of the `Aggregation` class. It can be customized with multiple aggregation pipelines via the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. - -Sub-pipelines can project and filter input documents prior grouping. Common cases are extraction of date parts or calculations before categorization. - -.Facet operation examples -==== -[source,java] ----- -// will generate {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} -facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) - -// will generate {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} -facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) - -// will generate {$facet: {categorizedByYear: [ -// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, -// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} -// ]}} -facet(project("title").and("publicationDate").extractYear().as("publicationYear"), - bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) - .as("categorizedByYear")) ----- -==== - -Note that further details regarding facet operation can be found in the http://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.sort-by-count]] -==== SortByCount - -Sort by count operations group incoming documents based on the value of a specified expression, then compute the count of documents in each distinct group and sort the results by count. It's a handy shortcut to apply sorting for when using <>. Sort by count operations require a grouping field or grouping expression. - -.Sort by count example -==== -[source,java] ----- -// will generate { $sortByCount: "$country" } -sortByCount("country"); ----- -==== - -A sort by count operation is equivalent to the following BSON: - ----- -{ $group: { _id: , count: { $sum: 1 } } }, -{ $sort: { count: -1 } } ----- - -[[mongo.aggregation.projection.expressions]] -==== Spring Expression Support in Projection Expressions - -We support the use of SpEL expression in projection expressions via the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This allows you to define the desired expression as a SpEL expression which is translated into a corresponding MongoDB projection expression part on query execution. This makes it much easier to express complex calculations. - -===== Complex calculations with SpEL expressions - -The following SpEL expression: - -[source,java] ----- -1 + (q + 1) / (q - 1) ----- - -will be translated into the following projection expression part: - -[source,javascript] ----- -{ "$add" : [ 1, { - "$divide" : [ { - "$add":["$q", 1]}, { - "$subtract":[ "$q", 1]} - ] -}]} ----- - -Have a look at an example in more context in <> and <>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. - -.Supported SpEL transformations -[cols="2"] -|=== -| a == b -| { $eq : [$a, $b] } -| a != b -| { $ne : [$a , $b] } -| a > b -| { $gt : [$a, $b] } -| a >= b -| { $gte : [$a, $b] } -| a < b -| { $lt : [$a, $b] } -| a <= b -| { $lte : [$a, $b] } -| a + b -| { $add : [$a, $b] } -| a - b -| { $subtract : [$a, $b] } -| a * b -| { $multiply : [$a, $b] } -| a / b -| { $divide : [$a, $b] } -| a^b -| { $pow : [$a, $b] } -| a % b -| { $mod : [$a, $b] } -| a && b -| { $and : [$a, $b] } -| a \|\| b -| { $or : [$a, $b] } -| !a -| { $not : [$a] } -|=== - -Next to the transformations shown in Supported SpEL transformations it is possible to use standard SpEL operations like `new` to eg. create arrays and reference expressions via their name followed by the arguments to use in brackets. - -[source,java] ----- -// { $setEquals : [$a, [5, 8, 13] ] } -.andExpression("setEquals(a, new int[]{5, 8, 13})"); ----- - -[[mongo.aggregation.examples]] -==== Aggregation Framework Examples - -The following examples demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. - -[[mongo.aggregation.examples.example1]] -.Aggregation Framework Example 1 - -In this introductory example we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection called `"tags"` sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection) and unwinding (result splitting). - -[source,java] ----- -class TagCount { - String tag; - int n; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - project("tags"), - unwind("tags"), - group("tags").count().as("n"), - project("n").and("tag").previousOperation(), - sort(DESC, "n") -); - -AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); -List tagCount = results.getMappedResults(); ----- - -* In order to do this we first create a new aggregation via the `newAggregation` static factory method to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. -* As a second step we select the `"tags"` field (which is an array of strings) from the input collection with the `project` operation. -* In a third step we use the `unwind` operation to generate a new document for each tag within the `"tags"` array. -* In the forth step we use the `group` operation to define a group for each `"tags"`-value for which we aggregate the occurrence count via the `count` aggregation operator and collect the result in a new field called `"n"`. -* As a fifth step we select the field `"n"` and create an alias for the id-field generated from the previous group operation (hence the call to `previousOperation()`) with the name `"tag"`. -* As the sixth step we sort the resulting list of tags by their occurrence count in descending order via the `sort` operation. -* Finally we call the `aggregate` Method on the MongoTemplate in order to let MongoDB perform the actual aggregation operation with the created `Aggregation` as an argument. - -Note that the input collection is explicitly specified as the `"tags"` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input-class passed as first parameter to the `newAggreation` Method. - -[[mongo.aggregation.examples.example2]] -.Aggregation Framework Example 2 - -This example is based on the http://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state, using the aggregation framework. This example demonstrates the usage of grouping, sorting and projections (selection). - -[source,java] ----- -class ZipInfo { - String id; - String city; - String state; - @Field("pop") int population; - @Field("loc") double[] location; -} - -class City { - String name; - int population; -} - -class ZipInfoStats { - String id; - String state; - City biggestCity; - City smallestCity; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation aggregation = newAggregation(ZipInfo.class, - group("state", "city") - .sum("population").as("pop"), - sort(ASC, "pop", "state", "city"), - group("state") - .last("city").as("biggestCity") - .last("pop").as("biggestPop") - .first("city").as("smallestCity") - .first("pop").as("smallestPop"), - project() - .and("state").previousOperation() - .and("biggestCity") - .nested(bind("name", "biggestCity").and("population", "biggestPop")) - .and("smallestCity") - .nested(bind("name", "smallestCity").and("population", "smallestPop")), - sort(ASC, "state") -); - -AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); -ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); ----- - -* The class `ZipInfo` maps the structure of the given input-collection. The class `ZipInfoStats` defines the structure in the desired output format. -* As a first step we use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the fields `"state"` and `"city"` which forms the id structure of the group. We aggregate the value of the `"population"` property from the grouped elements with by using the `sum` operator saving the result in the field `"pop"`. -* In a second step we use the `sort` operation to sort the intermediate-result by the fields `"pop"`, `"state"` and `"city"` in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `"state"` and `"city"` is implicitly performed against the group id fields which Spring Data MongoDB took care of. -* In the third step we use a `group` operation again to group the intermediate result by `"state"`. Note that `"state"` again implicitly references an group-id field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operator respectively via the `project` operation. -* As the forth step we select the `"state"` field from the previous `group` operation. Note that `"state"` again implicitly references an group-id field. As we do not want an implicitly generated id to appear, we exclude the id from the previous operation via `and(previousOperation()).exclude()`. As we want to populate the nested `City` structures in our output-class accordingly we have to emit appropriate sub-documents with the nested method. -* Finally as the fifth step we sort the resulting list of `StateStats` by their state name in ascending order via the `sort` operation. - -Note that we derive the name of the input-collection from the `ZipInfo`-class passed as first parameter to the `newAggregation`-Method. - -[[mongo.aggregation.examples.example3]] -.Aggregation Framework Example 3 - -This example is based on the http://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million ]example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates the usage of grouping, sorting and matching (filtering). - -[source,java] ----- -class StateStats { - @Id String id; - String state; - @Field("totalPop") int totalPopulation; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(ZipInfo.class, - group("state").sum("population").as("totalPop"), - sort(ASC, previousOperation(), "totalPop"), - match(where("totalPop").gte(10 * 1000 * 1000)) -); - -AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); -List stateStatsList = result.getMappedResults(); ----- - -* As a first step we group the input collection by the `"state"` field and calculate the sum of the `"population"` field and store the result in the new field `"totalPop"`. -* In the second step we sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. -* Finally in the third step we filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. - -Note that we derive the name of the input-collection from the `ZipInfo`-class passed as first parameter to the `newAggregation`-Method. - -[[mongo.aggregation.examples.example4]] -.Aggregation Framework Example 4 - -This example demonstrates the use of simple arithmetic operations in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .and("netPrice").plus(1).as("netPricePlus1") - .and("netPrice").minus(1).as("netPriceMinus1") - .and("netPrice").multiply(1.19).as("grossPrice") - .and("netPrice").divide(2).as("netPriceDiv2") - .and("spaceUnits").mod(2).as("spaceUnitsMod2") -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -Note that we derive the name of the input-collection from the `Product`-class passed as first parameter to the `newAggregation`-Method. - -[[mongo.aggregation.examples.example5]] -.Aggregation Framework Example 5 - -This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("netPrice + 1").as("netPricePlus1") - .andExpression("netPrice - 1").as("netPriceMinus1") - .andExpression("netPrice / 2").as("netPriceDiv2") - .andExpression("netPrice * 1.19").as("grossPrice") - .andExpression("spaceUnits % 2").as("spaceUnitsMod2") - .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") - -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -[[mongo.aggregation.examples.example6]] -.Aggregation Framework Example 6 - -This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. - -Note: The additional parameters passed to the `addExpression` Method can be referenced via indexer expressions according to their position. In this example we reference the parameter which is the first parameter of the parameters array via `[0]`. External parameter expressions are replaced with their respective values when the SpEL expression is transformed into a MongoDB aggregation framework expression. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -double shippingCosts = 1.2; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -Note that we can also refer to other fields of the document within the SpEL expression. - -[[mongo.aggregation.examples.example7]] -.Aggregation Framework Example 7 - -This example uses conditional projection. It's derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. - -[source,java] ----- -public class InventoryItem { - - @Id int id; - String item; - String description; - int qty; -} - -public class InventoryItemProjection { - - @Id int id; - String item; - String description; - int qty; - int discount -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(InventoryItem.class, - project("item").and("discount") - .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) - .then(30) - .otherwise(20)) - .and(ifNull("description", "Unspecified")).as("description") -); - -AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); -List stateStatsList = result.getMappedResults(); ----- - -* This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field using a conditional operation for all inventory items that have a `qty` greater or equal to `250`. A second conditional projection is performed for the `description` field. We apply the description `Unspecified` to all items that either do not have a `description` field of items that have a `null` description. - - -[[mongo.custom-converters]] -== Overriding default mapping with custom converters - -In order to have more fine-grained control over the mapping process you can register Spring converters with the `MongoConverter` implementations such as the `MappingMongoConverter`. - -The `MappingMongoConverter` checks to see if there are any Spring converters that can handle a specific class before attempting to map the object itself. To 'hijack' the normal mapping strategies of the `MappingMongoConverter`, perhaps for increased performance or other custom mapping needs, you first need to create an implementation of the Spring `Converter` interface and then register it with the MappingConverter. - -NOTE: For more information on the Spring type conversion service see the reference docs http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#validation[here]. - -[[mongo.custom-converters.writer]] -=== Saving using a registered Spring Converter - -An example implementation of the `Converter` that converts from a Person object to a `org.bson.Document` is shown below - -[source,java] ----- -import org.springframework.core.convert.converter.Converter; - -import org.bson.Document; - -public class PersonWriteConverter implements Converter { - - public Document convert(Person source) { - Document document = new Document(); - document.put("_id", source.getId()); - document.put("name", source.getFirstName()); - document.put("age", source.getAge()); - return document; - } -} ----- - -[[mongo.custom-converters.reader]] -=== Reading using a Spring Converter - -An example implementation of a Converter that converts from a Document to a Person object is shown below. - -[source,java] ----- -public class PersonReadConverter implements Converter { - - public Person convert(Document source) { - Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); - p.setAge((Integer) source.get("age")); - return p; - } -} ----- - -[[mongo.custom-converters.xml]] -=== Registering Spring Converters with the MongoConverter - -The Mongo Spring namespace provides a convenience way to register Spring `Converter` s with the `MappingMongoConverter`. The configuration snippet below shows how to manually register converter beans as well as configuring the wrapping `MappingMongoConverter` into a `MongoTemplate`. - -[source,xml] ----- - - - - - - - - - - - - - - - - - ----- - -You can also use the base-package attribute of the custom-converters element to enable classpath scanning for all `Converter` and `GenericConverter` implementations below the given package. - -[source,xml] ----- - - - ----- - -[[mongo.converter-disambiguation]] -=== Converter disambiguation - -Generally we inspect the `Converter` implementations for the source and target types they convert from and to. Depending on whether one of those is a type MongoDB can handle natively we will register the converter instance as reading or writing one. Have a look at the following samples: - -[source,java] ----- -// Write converter as only the target type is one Mongo can handle natively -class MyConverter implements Converter { … } - -// Read converter as only the source type is one Mongo can handle natively -class MyConverter implements Converter { … } ----- - -In case you write a `Converter` whose source and target type are native Mongo types there's no way for us to determine whether we should consider it as reading or writing converter. Registering the converter instance as both might lead to unwanted results then. E.g. a `Converter` is ambiguous although it probably does not make sense to try to convert all `String` instances into `Long` instances when writing. To be generally able to force the infrastructure to register a converter for one way only we provide `@ReadingConverter` as well as `@WritingConverter` to be used in the converter implementation. - -[[mongo-template.index-and-collections]] -== Index and Collection management - -`MongoTemplate` provides a few methods for managing indexes and collections. These are collected into a helper interface called `IndexOperations`. You access these operations by calling the method `indexOps` and pass in either the collection name or the `java.lang.Class` of your entity (the collection name will be derived from the .class either by name or via annotation metadata). - -The `IndexOperations` interface is shown below - -[source,java] ----- -public interface IndexOperations { - - void ensureIndex(IndexDefinition indexDefinition); - - void dropIndex(String name); - - void dropAllIndexes(); - - void resetIndexCache(); - - List getIndexInfo(); -} ----- - -[[mongo-template.index-and-collections.index]] -=== Methods for creating an Index - -We can create an index on a collection to improve query performance. - -==== Creating an index using the MongoTemplate - -[source,java] ----- -mongoTemplate.indexOps(Person.class).ensureIndex(new Index().on("name",Order.ASCENDING)); ----- - -* *ensureIndex* Ensure that an index for the provided IndexDefinition exists for the collection. - -You can create standard, geospatial and text indexes using the classes `IndexDefinition`, `GeoSpatialIndex` and `TextIndexDefinition`. For example, given the Venue class defined in a previous section, you would declare a geospatial query as shown below. - -[source,java] ----- -mongoTemplate.indexOps(Venue.class).ensureIndex(new GeospatialIndex("location")); ----- - -NOTE: `Index` and `GeospatialIndex` support configuration of <>. - -[[mongo-template.index-and-collections.access]] -=== Accessing index information - -The IndexOperations interface has the method getIndexInfo that returns a list of IndexInfo objects. This contains all the indexes defined on the collection. Here is an example that defines an index on the Person class that has age property. - -[source,java] ----- -template.indexOps(Person.class).ensureIndex(new Index().on("age", Order.DESCENDING).unique(Duplicates.DROP)); - -List indexInfoList = template.indexOps(Person.class).getIndexInfo(); - -// Contains -// [IndexInfo [fieldSpec={_id=ASCENDING}, name=_id_, unique=false, dropDuplicates=false, sparse=false], -// IndexInfo [fieldSpec={age=DESCENDING}, name=age_-1, unique=true, dropDuplicates=true, sparse=false]] ----- - -[[mongo-template.index-and-collections.collection]] -=== Methods for working with a Collection - -It's time to look at some code examples showing how to use the `MongoTemplate`. First we look at creating our first collection. - -.Working with collections using the MongoTemplate -==== -[source,java] ----- -DBCollection collection = null; -if (!mongoTemplate.getCollectionNames().contains("MyNewCollection")) { - collection = mongoTemplate.createCollection("MyNewCollection"); -} - -mongoTemplate.dropCollection("MyNewCollection"); ----- -==== - -* *getCollectionNames* Returns a set of collection names. -* *collectionExists* Check to see if a collection with a given name exists. -* *createCollection* Create an uncapped collection -* *dropCollection* Drop the collection -* *getCollection* Get a collection by name, creating it if it doesn't exist. - -NOTE: Collection creation allows customization via `CollectionOptions` and supports <>. - -[[mongo-template.commands]] -== Executing Commands - -You can also get at the MongoDB driver's `MongoDatabase.runCommand( )` method using the `executeCommand(…)` methods on `MongoTemplate`. These will also perform exception translation into Spring's `DataAccessException` hierarchy. - -[[mongo-template.commands.execution]] -=== Methods for executing commands - -* `Document` *executeCommand* `(Document command)` Execute a MongoDB command. -* `Document` *executeCommand* `(Document command, ReadPreference readPreference)` Execute a MongoDB command using the given nullable MongoDB `ReadPreference`. -* `Document` *executeCommand* `(String jsonCommand)` Execute the a MongoDB command expressed as a JSON string. - -[[mongodb.mapping-usage.events]] -== Lifecycle Events - -Built into the MongoDB mapping framework are several `org.springframework.context.ApplicationEvent` events that your application can respond to by registering special beans in the `ApplicationContext`. By being based off Spring's ApplicationContext event infrastructure this enables other products, such as Spring Integration, to easily receive these events as they are a well known eventing mechanism in Spring based applications. - -To intercept an object before it goes through the conversion process (which turns your domain object into a `org.bson.Document`), you'd register a subclass of `AbstractMongoEventListener` that overrides the `onBeforeConvert` method. When the event is dispatched, your listener will be called and passed the domain object before it goes into the converter. - -==== -[source,java] ----- -public class BeforeConvertListener extends AbstractMongoEventListener { - @Override - public void onBeforeConvert(BeforeConvertEvent event) { - ... does some auditing manipulation, set timestamps, whatever ... - } -} ----- -==== - -To intercept an object before it goes into the database, you'd register a subclass of `org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener` that overrides the `onBeforeSave` method. When the event is dispatched, your listener will be called and passed the domain object and the converted `com.mongodb.Document`. - -==== -[source,java] ----- -public class BeforeSaveListener extends AbstractMongoEventListener { - @Override - public void onBeforeSave(BeforeSaveEvent event) { - … change values, delete them, whatever … - } -} ----- -==== - -Simply declaring these beans in your Spring ApplicationContext will cause them to be invoked whenever the event is dispatched. - -The list of callback methods that are present in AbstractMappingEventListener are - -* `onBeforeConvert` - called in MongoTemplate insert, insertList and save operations before the object is converted to a Document using a MongoConveter. -* `onBeforeSave` - called in MongoTemplate insert, insertList and save operations *before* inserting/saving the Document in the database. -* `onAfterSave` - called in MongoTemplate insert, insertList and save operations *after* inserting/saving the Document in the database. -* `onAfterLoad` - called in MongoTemplate find, findAndRemove, findOne and getCollection methods after the Document is retrieved from the database. -* `onAfterConvert` - called in MongoTemplate find, findAndRemove, findOne and getCollection methods after the Document retrieved from the database was converted to a POJO. - -NOTE: Lifecycle events are only emitted for root level types. Complex types used as properties within a document root are not subject of event publication unless they are document references annotated with `@DBRef`. - -[[mongo.exception]] -== Exception Translation - -The Spring framework provides exception translation for a wide variety of database and mapping technologies. This has traditionally been for JDBC and JPA. The Spring support for MongoDB extends this feature to the MongoDB Database by providing an implementation of the `org.springframework.dao.support.PersistenceExceptionTranslator` interface. - -The motivation behind mapping to Spring's http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html#dao-exceptions[consistent data access exception hierarchy] is that you are then able to write portable and descriptive exception handling code without resorting to coding against MongoDB error codes. All of Spring's data access exceptions are inherited from the root `DataAccessException` class so you can be sure that you will be able to catch all database related exception within a single try-catch block. Note, that not all exceptions thrown by the MongoDB driver inherit from the MongoException class. The inner exception and message are preserved so no information is lost. - -Some of the mappings performed by the `MongoExceptionTranslator` are: com.mongodb.Network to DataAccessResourceFailureException and `MongoException` error codes 1003, 12001, 12010, 12011, 12012 to `InvalidDataAccessApiUsageException`. Look into the implementation for more details on the mapping. - -[[mongo.executioncallback]] -== Execution callbacks - -One common design feature of all Spring template classes is that all functionality is routed into one of the templates execute callback methods. This helps ensure that exceptions and any resource management that maybe required are performed consistency. While this was of much greater need in the case of JDBC and JMS than with MongoDB, it still offers a single spot for exception translation and logging to occur. As such, using these execute callback is the preferred way to access the MongoDB driver's `DB` and `DBCollection` objects to perform uncommon operations that were not exposed as methods on `MongoTemplate`. - -Here is a list of execute callback methods. - -* ` T` *execute* `(Class entityClass, CollectionCallback action)` Executes the given CollectionCallback for the entity collection of the specified class. - -* ` T` *execute* `(String collectionName, CollectionCallback action)` Executes the given CollectionCallback on the collection of the given name. - -* ` T` *execute* `(DbCallback action) Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2.` Executes a DbCallback translating any exceptions as necessary. - -* ` T` *execute* `(String collectionName, DbCallback action)` Executes a DbCallback on the collection of the given name translating any exceptions as necessary. - -* ` T` *executeInSession* `(DbCallback action)` Executes the given DbCallback within the same connection to the database so as to ensure consistency in a write heavy environment where you may read the data that you wrote. - -Here is an example that uses the `CollectionCallback` to return information about an index - -[source,java] ----- -boolean hasIndex = template.execute("geolocation", new CollectionCallbackBoolean>() { - public Boolean doInCollection(Venue.class, DBCollection collection) throws MongoException, DataAccessException { - List indexes = collection.getIndexInfo(); - for (Document document : indexes) { - if ("location_2d".equals(document.get("name"))) { - return true; - } - } - return false; - } -}); ----- - -[[gridfs]] -== GridFS support - -MongoDB supports storing binary files inside it's filesystem GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the according implementation `GridFsTemplate` to easily interact with the filesystem. You can setup a `GridFsTemplate` instance by handing it a `MongoDbFactory` as well as a `MongoConverter`: - -.JavaConfig setup for a GridFsTemplate -==== -[source,java] ----- -class GridFsConfiguration extends AbstractMongoConfiguration { - - // … further configuration omitted - - @Bean - public GridFsTemplate gridFsTemplate() { - return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); - } -} ----- -==== - -An according XML configuration looks like this: - -.XML configuration for a GridFsTemplate -==== -[source,xml] ----- - - - - - - - - - - - - ----- -==== - -The template can now be injected and used to perform storage and retrieval operations. - -.Using GridFsTemplate to store files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void storeFileToGridFs() { - - FileMetadata metadata = new FileMetadata(); - // populate metadata - Resource file = … // lookup File or Resource - - operations.store(file.getInputStream(), "filename.txt", metadata); - } -} ----- -==== - -The `store(…)` operations take an `InputStream`, a filename and optionally metadata information about the file to store. The metadata can be an arbitrary object which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively you can also provide a `Document` as well. - -Reading files from the filesystem can either be achieved through the `find(…)` or `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file matching a `Query` or multiple ones. To easily define file queries we provide the `GridFsCriteria` helper class. It provides static factory methods to encapsulate default metadata fields (e.g. `whereFilename()`, `whereContentType()`) or the custom one through `whereMetaData()`. - -.Using GridFsTemplate to query for files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void findFilesInGridFs() { - GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))) - } -} ----- -==== - -NOTE: Currently MongoDB does not support defining sort criteria when retrieving files from GridFS. Thus any sort criteria defined on the `Query` instance handed into the `find(…)` method will be disregarded. - -The other option to read files from the GridFs is using the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method ar thus retrieve files matching the given pattern. - -.Using GridFsTemplate to read files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void readFilesFromGridFs() { - GridFsResources[] txtFiles = operations.getResources("*.txt"); - } -} ----- -==== - -`GridFsOperations` extending `ResourcePatternResolver` allows the `GridFsTemplate` e.g. to be plugged into an `ApplicationContext` to read Spring Config files from a MongoDB. - -[[change-streams]] -== Change Streams - -As of MongoDB 3.6, https://docs.mongodb.com/manual/changeStreams/[Change Streams] allow application to get notified about changes without having to tail the oplog. - -NOTE: Change Stream support is only possible for replica sets or a sharded cluster. - -Change Streams can be subscribed to with both the imperative and the reactive MongoDB Java driver. It is highly recommended to use the reactive variant as it is less resource intensive. However if you do not feel comfortable using the reactive API for whatever reason, you can sill obtain the change events via a Messaging concept already common in the Spring ecosystem. - -=== Change Streams using MessageListener - -Listening to a https://docs.mongodb.com/manual/tutorial/change-streams-example/[Change Stream using a Sync Driver] is a long running, blocking task that needs to be delegated to a separate component. -In this case we need to create a `MessageListenerContainer` first which will be the main entry point for running the specific ``SubscriptionRequest``s. -Spring Data MongoDB already ships with a default implementation that operates upon `MongoTemplate` and is capable of creating and executing ``Task``s for a `ChangeStreamRequest`. - -.Change Streams with `MessageListeners` -==== -[source,java] ----- -MessageListenerContainer container = new DefaultMessageListenerContainer(template); -container.start(); <1> - -MessageListener, User> listener = System.out::println; <2> -ChangeStreamRequestOptions options = new ChangeStreamRequestOptions("user", ChangeStreamOptions.empty()); <3> - -Subscription subscription = container.register(new ChangeStreamRequest<>(listener, options), User.class); <4> - -// ... - -container.stop(); <5> ----- -<1> Starting the container intializes the resources and starts the ``Task``s for already registered ``SubscriptionRequest``s. Requests added after the statup are run immediately. -<2> Define the listener called when a `Message` is received. The `Message#getBody()` is converted to the requested domain type. Use `Document` to receive raw results without conversion. -<3> Set the collection to listen to and provide additional options via `ChangeStreamOptions`. -<4> Register the request. The returned `Subscription` can be used to check the current `Task` state and cancel the execution to free resources. -<5> Do not forget to stop the container once you're sure you won't need it any more. This will stop all running ``Task``s within the container. -==== - -=== Change Streams - Reactive - -Subscribing to Change Stream via the reactive API is clearly more straight forward. Still the building blocks like `ChangeStreamOptions` remain the same. - -.Change Streams with `MessageListeners` -==== -[source,java] ----- -Aggregation filter = newAggregation(User.class, match(where("age").gte(38)); <1> -Flux> flux = reactiveTemplate.changeStream(filter), User.class, ChangeStreamOptions.empty()); <2> ----- -<1> Use an aggregation pipeline to filter events. -<2> Obtain a `Flux` of change stream events. The `ChangeStreamEvent#getBody()` is converted to requested domain type. Use `Document` to receive raw results without conversion. -==== diff --git a/src/main/asciidoc/reference/query-by-example.adoc b/src/main/asciidoc/reference/query-by-example.adoc deleted file mode 100644 index 283bd8592a..0000000000 --- a/src/main/asciidoc/reference/query-by-example.adoc +++ /dev/null @@ -1,97 +0,0 @@ -[[query-by-example.execution]] -== Executing an example - -.Query by Example using a Repository -==== -[source, java] ----- -public interface PersonRepository extends QueryByExampleExecutor { - -} - -public class PersonService { - - @Autowired PersonRepository personRepository; - - public List findPeople(Person probe) { - return personRepository.findAll(Example.of(probe)); - } -} ----- -==== - -An `Example` containing an untyped `ExampleSpec` uses the Repository type and its collection name. Typed `ExampleSpec` use their type as result type and the collection name from the Repository. - -NOTE: When including `null` values in the `ExampleSpec` Spring Data Mongo uses embedded document matching instead of dot notation property matching. This forces exact document matching for all property values and the property order in the embedded document. - -Spring Data MongoDB provides support for the following matching options: - -[cols="1,2", options="header"] -.`StringMatcher` options -|=== -| Matching -| Logical result - -| `DEFAULT` (case-sensitive) -| `{"firstname" : firstname}` - -| `DEFAULT` (case-insensitive) -| `{"firstname" : { $regex: firstname, $options: 'i'}}` - -| `EXACT` (case-sensitive) -| `{"firstname" : { $regex: /^firstname$/}}` - -| `EXACT` (case-insensitive) -| `{"firstname" : { $regex: /^firstname$/, $options: 'i'}}` - -| `STARTING` (case-sensitive) -| `{"firstname" : { $regex: /^firstname/}}` - -| `STARTING` (case-insensitive) -| `{"firstname" : { $regex: /^firstname/, $options: 'i'}}` - -| `ENDING` (case-sensitive) -| `{"firstname" : { $regex: /firstname$/}}` - -| `ENDING` (case-insensitive) -| `{"firstname" : { $regex: /firstname$/, $options: 'i'}}` - -| `CONTAINING` (case-sensitive) -| `{"firstname" : { $regex: /.\*firstname.*/}}` - -| `CONTAINING` (case-insensitive) -| `{"firstname" : { $regex: /.\*firstname.*/, $options: 'i'}}` - -| `REGEX` (case-sensitive) -| `{"firstname" : { $regex: /firstname/}}` - -| `REGEX` (case-insensitive) -| `{"firstname" : { $regex: /firstname/, $options: 'i'}}` - -|=== - -[[query-by-example.untyped]] -== Untyped Example - -By default `Example` is strictly typed. This means the mapped query will have a type match included restricting it to probe assignable types. Eg. when sticking with the default type key `_class` the query has restrictions like `_class : { $in : [ com.acme.Person] }`. - -By using the `UntypedExampleMatcher` it is possible bypasses the default behavior and skip the type restriction. So as long as field names match nearly any domain type can be used as the probe for creating the reference. - -.Untyped Example Query -==== -[source, java] ----- - -class JustAnArbitraryClassWithMatchingFieldName { - @Field("lastname") String value; -} - -JustAnArbitraryClassWithMatchingFieldNames probe = new JustAnArbitraryClassWithMatchingFieldNames(); -probe.value = "stark"; - -Example example = Example.of(probe, UntypedExampleMatcher.matching()); - -Query query = new Query(new Criteria().alike(example)); -List result = template.find(query, Person.class); ----- -==== \ No newline at end of file diff --git a/src/main/asciidoc/reference/reactive-mongo-repositories.adoc b/src/main/asciidoc/reference/reactive-mongo-repositories.adoc deleted file mode 100644 index 489191d230..0000000000 --- a/src/main/asciidoc/reference/reactive-mongo-repositories.adoc +++ /dev/null @@ -1,245 +0,0 @@ -[[mongo.reactive.repositories]] -= Reactive MongoDB repositories - -[[mongo.reactive.repositories.intro]] -== Introduction - -This chapter will point out the specialties for reactive repository support for MongoDB. This builds on the core repository support explained in <>. So make sure you've got a sound understanding of the basic concepts explained there. - -[[mongo.reactive.repositories.libraries]] -== Reactive Composition Libraries - -The reactive space offers various reactive composition libraries. The most common libraries are https://github.com/ReactiveX/RxJava[RxJava] and https://projectreactor.io/[Project Reactor]. - -Spring Data MongoDB is built on top of the https://mongodb.github.io/mongo-java-driver-reactivestreams/[MongoDB Reactive Streams] driver to provide maximal interoperability relying on the http://www.reactive-streams.org/[Reactive Streams] initiative. Static APIs such as `ReactiveMongoOperations` are provided by using Project Reactor's `Flux` and `Mono` types. Project Reactor offers various adapters to convert reactive wrapper types (`Flux` to `Observable` and vice versa) but conversion can easily clutter your code. - -Spring Data's Repository abstraction is a dynamic API, mostly defined by you and your requirements, as you're declaring query methods. Reactive MongoDB repositories can be either implemented using RxJava or Project Reactor wrapper types by simply extending from one of the library-specific repository interfaces: - -* `ReactiveCrudRepository` -* `ReactiveSortingRepository` -* `RxJava2CrudRepository` -* `RxJava2SortingRepository` - -Spring Data converts reactive wrapper types behind the scenes so that you can stick to your favorite composition library. - -[[mongo.reactive.repositories.usage]] -== Usage - -To access domain entities stored in a MongoDB you can leverage our sophisticated repository support that eases implementing those quite significantly. To do so, simply create an interface for your repository: - -.Sample Person entity -==== -[source,java] ----- -public class Person { - - @Id - private String id; - private String firstname; - private String lastname; - private Address address; - - // … getters and setters omitted -} ----- -==== - -We have a quite simple domain object here. Note that it has a property named `id` of type `ObjectId`. The default serialization mechanism used in `MongoTemplate` (which is backing the repository support) regards properties named id as document id. Currently we support `String`, `ObjectId` and `BigInteger` as id-types. - -.Basic repository interface to persist Person entities -==== -[source] ----- -public interface ReactivePersonRepository extends ReactiveSortingRepository { - - Flux findByFirstname(String firstname); <1> - - Flux findByFirstname(Publisher firstname); <2> - - Flux findByFirstnameOrderByLastname(String firstname, Pageable pageable); <3> - - Mono findByFirstnameAndLastname(String firstname, String lastname); <4> - - Mono findFirstByLastname(String lastname); <5> -} ----- -<1> The method shows a query for all people with the given lastname. The query will be derived parsing the method name for constraints which can be concatenated with `And` and `Or`. Thus the method name will result in a query expression of `{"lastname" : lastname}`. -<2> The method shows a query for all people with the given firstname once the firstname is emitted via the given `Publisher`. -<3> Use `Pageable` to pass on offset and sorting parameters to the database. -<4> Find a single entity for given criteria. Completes with `IncorrectResultSizeDataAccessException` on non unique results. -<5> Unless <4> the first entity is always emitted even if the query yields more result documents. -==== - -For JavaConfig use the `@EnableReactiveMongoRepositories` annotation. The annotation carries the very same attributes like the namespace element. If no base package is configured the infrastructure will scan the package of the annotated configuration class. - -NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. It's required to create a connection using the Reactive Streams driver to provide the required infrastructure for Spring Data's Reactive MongoDB support hence you're required to provide a separate Configuration for MongoDB's Reactive Streams driver. Please also note that your application will operate on two different connections if using Reactive and Blocking Spring Data MongoDB Templates and Repositories. - -.JavaConfig for repositories -==== -[source,java] ----- -@Configuration -@EnableReactiveMongoRepositories -class ApplicationConfig extends AbstractReactiveMongoConfiguration { - - @Override - protected String getDatabaseName() { - return "e-store"; - } - - @Override - public MongoClient reactiveMongoClient() { - return MongoClients.create(); - } - - @Override - protected String getMappingBasePackage() { - return "com.oreilly.springdata.mongodb" - } -} ----- -==== - -As our domain repository extends `ReactiveSortingRepository` it provides you with CRUD operations as well as methods for sorted access to the entities. Working with the repository instance is just a matter of dependency injecting it into a client. - -.Sorted access to Person entities -==== -[source,java] ----- -public class PersonRepositoryTests { - - @Autowired ReactivePersonRepository repository; - - @Test - public void sortsElementsCorrectly() { - Flux persons = repository.findAll(Sort.by(new Order(ASC, "lastname"))); - } -} ----- -==== - -[[mongo.reactive.repositories.features]] -== Features - -Spring Data's Reactive MongoDB support comes with a reduced feature set compared to the blocking <>. - -Following features are supported: - -* Query Methods using <> -* <> -* <> -* <> -* <> -* <> - -WARNING: Reactive Repositories do not support Type-safe Query methods using Querydsl. - -[[mongodb.reactive.repositories.queries.geo-spatial]] -=== Geo-spatial repository queries - -As you've just seen there are a few keywords triggering geo-spatial operations within a MongoDB query. The `Near` keyword allows some further modification. Let's have look at some examples: - -.Advanced `Near` queries -==== -[source,java] ----- -public interface PersonRepository extends ReactiveMongoRepository - - // { 'location' : { '$near' : [point.x, point.y], '$maxDistance' : distance}} - Flux findByLocationNear(Point location, Distance distance); -} ----- -==== - -Adding a `Distance` parameter to the query method allows restricting results to those within the given distance. If the `Distance` was set up containing a `Metric` we will transparently use `$nearSphere` instead of $code. - -NOTE: Reactive Geo-spatial repository queries support the domain type and `GeoResult` results within a reactive wrapper type. `GeoPage` and `GeoResults` are not supported as they contradict the deferred result approach with pre-calculating the average distance. Howevery, you can still pass in a `Pageable` argument to page results yourself. - -.Using `Distance` with `Metrics` -==== -[source,java] ----- -Point point = new Point(43.7, 48.8); -Distance distance = new Distance(200, Metrics.KILOMETERS); -… = repository.findByLocationNear(point, distance); -// {'location' : {'$nearSphere' : [43.7, 48.8], '$maxDistance' : 0.03135711885774796}} ----- -==== - -As you can see using a `Distance` equipped with a `Metric` causes `$nearSphere` clause to be added instead of a plain `$near`. Beyond that the actual distance gets calculated according to the `Metrics` used. - -NOTE: Using `@GeoSpatialIndexed(type = GeoSpatialIndexType.GEO_2DSPHERE)` on the target property forces usage of `$nearSphere` operator. - -==== Geo-near queries - -[source,java] ----- -public interface PersonRepository extends ReactiveMongoRepository - - // {'geoNear' : 'location', 'near' : [x, y] } - Flux> findByLocationNear(Point location); - - // No metric: {'geoNear' : 'person', 'near' : [x, y], maxDistance : distance } - // Metric: {'geoNear' : 'person', 'near' : [x, y], 'maxDistance' : distance, - // 'distanceMultiplier' : metric.multiplier, 'spherical' : true } - Flux> findByLocationNear(Point location, Distance distance); - - // Metric: {'geoNear' : 'person', 'near' : [x, y], 'minDistance' : min, - // 'maxDistance' : max, 'distanceMultiplier' : metric.multiplier, - // 'spherical' : true } - Flux> findByLocationNear(Point location, Distance min, Distance max); - - // {'geoNear' : 'location', 'near' : [x, y] } - Flux> findByLocationNear(Point location); -} ----- - -[[mongo.reactive.repositories.infinite-streams]] -== Infinite Streams with Tailable Cursors - -By default, MongoDB will automatically close a cursor when the client exhausts all results supplied by the cursor. Closing a cursor on exhaustion turns a stream into a finite stream. For https://docs.mongodb.com/manual/core/capped-collections/[capped collections] you may use a https://docs.mongodb.com/manual/core/tailable-cursors/[Tailable Cursor] that remains open after the client consumes all initially returned data. Using tailable cursors with a reactive data types allows construction of infinite streams. A tailable cursor remains open until it is closed externally. It emits data as new documents arrive in a capped collection. - -Tailable cursors may become dead, or invalid, if either the query returns no match or the cursor returns the document at the "end" of the collection and then the application deletes that document. - - -.Infinite Stream queries with ReactiveMongoOperations -==== -[source,java] ----- -Flux stream = template.tail(query(where("name").is("Joe")), Person.class); - -Disposable subscription = stream.doOnNext(person -> System.out.println(person)).subscribe(); - -// … - -// Later: Dispose the subscription to close the stream -subscription.dispose(); ----- -==== - -Spring Data MongoDB Reactive repositories support infinite streams by annotating a query method with `@Tailable`. This works for methods returning `Flux` and other reactive types capable of emitting multiple elements. - -.Infinite Stream queries with ReactiveMongoRepository -==== -[source,java] ----- - -public interface PersonRepository extends ReactiveMongoRepository { - - @Tailable - Flux findByFirstname(String firstname); - -} - -Flux stream = repository.findByFirstname("Joe"); - -Disposable subscription = stream.doOnNext(System.out::println).subscribe(); - -// … - -// Later: Dispose the subscription to close the stream -subscription.dispose(); ----- -==== - -TIP: Capped collections can be created via `MongoOperations.createCollection`. Just provide the required `CollectionOptions.empty().capped()...` diff --git a/src/main/asciidoc/reference/reactive-mongodb.adoc b/src/main/asciidoc/reference/reactive-mongodb.adoc deleted file mode 100644 index cd59683db0..0000000000 --- a/src/main/asciidoc/reference/reactive-mongodb.adoc +++ /dev/null @@ -1,482 +0,0 @@ -[[mongo.reactive]] -= Reactive MongoDB support - -The reactive MongoDB support contains a basic set of features which are summarized below. - -* Spring configuration support using Java based `@Configuration` classes a `MongoClient` instance and replica sets. -* `ReactiveMongoTemplate` helper class that increases productivity using `MongoOperations in a reactive manner. Includes integrated object mapping between `Documents and POJOs. -* Exception translation into Spring's portable Data Access Exception hierarchy. -* Feature Rich Object Mapping integrated with Spring's `ConversionService`. -* Annotation based mapping metadata but extensible to support other metadata formats. -* Persistence and mapping lifecycle events. -* Java based `Query`, `Criteria`, and `Update` DSLs. -* Automatic implementation of reactive Repository interfaces including support for custom finder methods. - -For most tasks you will find yourself using `ReactiveMongoTemplate` or the Repository support that both leverage the rich mapping functionality. `ReactiveMongoTemplate` is the place to look for accessing functionality such as incrementing counters or ad-hoc CRUD operations. `ReactiveMongoTemplate` also provides callback methods so that it is easy for you to get a hold of the low level API artifacts such as `MongoDatabase` to communicate directly with MongoDB. The goal with naming conventions on various API artifacts is to copy those in the base MongoDB Java driver so you can easily map your existing knowledge onto the Spring APIs. - -[[mongodb-reactive-getting-started]] -== Getting Started - -Spring MongoDB support requires MongoDB 2.6 or higher and Java SE 8 or higher. - -First you need to set up a running Mongodb server. Refer to the http://docs.mongodb.org/manual/core/introduction/[Mongodb Quick Start guide] for an explanation on how to startup a MongoDB instance. Once installed starting MongoDB is typically a matter of executing the following command: `MONGO_HOME/bin/mongod` - -To create a Spring project in STS go to File -> New -> Spring Template Project -> Simple Spring Utility Project -> press Yes when prompted. Then enter a project and a package name such as org.spring.mongodb.example. - -Then add the following to pom.xml dependencies section. - -[source,xml] ----- - - - - - - org.springframework.data - spring-data-mongodb - {version} - - - - org.mongodb - mongodb-driver-reactivestreams - {mongo.reactivestreams} - - - - io.projectreactor - reactor-core - {reactor} - - - ----- - -NOTE: MongoDB uses two different drivers for blocking and reactive (non-blocking) data access. While blocking operations are provided by default, you're have to opt-in for reactive usage. - -Create a simple `Person` class to persist: - -[source,java] ----- -@Document -public class Person { - - private String id; - private String name; - private int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } -} ----- - -And a main application to run - -[source,java] ----- -public class ReactiveMongoApp { - - private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class); - - public static void main(String[] args) throws Exception { - - CountDownLatch latch = new CountDownLatch(1); - - ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database"); - - mongoOps.insert(new Person("Joe", 34)) - .flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)) - .doOnNext(person -> log.info(person.toString())) - .flatMap(person -> mongoOps.dropCollection("person")) - .doOnComplete(latch::countDown) - .subscribe(); - - latch.await(); - } -} ----- - -This will produce the following output - -[source] ----- -2016-09-20 14:56:57,373 DEBUG .index.MongoPersistentEntityIndexCreator: 124 - Analyzing class class example.ReactiveMongoApp$Person for index information. -2016-09-20 14:56:57,452 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 975 - Inserting Document containing fields: [_class, name, age] in collection: person -2016-09-20 14:56:57,541 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1503 - findOne using query: { "name" : "Joe"} fields: null for class: class example.ReactiveMongoApp$Person in collection: person -2016-09-20 14:56:57,545 DEBUG .data.mongodb.core.ReactiveMongoTemplate:1979 - findOne using query: { "name" : "Joe"} in db.collection: database.person -2016-09-20 14:56:57,567 INFO example.ReactiveMongoApp: 43 - Person [id=57e1321977ac501c68d73104, name=Joe, age=34] -2016-09-20 14:56:57,573 DEBUG .data.mongodb.core.ReactiveMongoTemplate: 528 - Dropped collection [person] ----- - -Even in this simple example, there are few things to take notice of - -* You can instantiate the central helper class of Spring Mongo, <>, using the standard `com.mongodb.reactivestreams.client.MongoClient` object and the name of the database to use. -* The mapper works against standard POJO objects without the need for any additional metadata (though you can optionally provide that information. See <>.). -* Conventions are used for handling the id field, converting it to be a ObjectId when stored in the database. -* Mapping conventions can use field access. Notice the Person class has only getters. -* If the constructor argument names match the field names of the stored document, they will be used to instantiate the object - -There is an https://github.com/spring-projects/spring-data-examples[github repository with several examples] that you can download and play around with to get a feel for how the library works. - -[[mongo.reactive.driver]] -== Connecting to MongoDB with Spring and the Reactive Streams Driver - -One of the first tasks when using MongoDB and Spring is to create a `com.mongodb.reactivestreams.client.MongoClient` object using the IoC container. - -[[mongo.reactive.mongo-java-config]] -=== Registering a MongoClient instance using Java based metadata - -An example of using Java based bean metadata to register an instance of a `com.mongodb.reactivestreams.client.MongoClient` is shown below - -.Registering a com.mongodb.MongoClient object using Java based bean metadata -==== -[source,java] ----- -@Configuration -public class AppConfig { - - /* - * Use the Reactive Streams Mongo Client API to create a com.mongodb.reactivestreams.client.MongoClient instance. - */ - public @Bean MongoClient reactiveMongoClient() { - return MongoClients.create("mongodb://localhost"); - } -} ----- -==== - -This approach allows you to use the standard `com.mongodb.reactivestreams.client.MongoClient` API that you may already be used to using. - -An alternative is to register an instance of `com.mongodb.reactivestreams.client.MongoClient` instance with the container using Spring's `ReactiveMongoClientFactoryBean`. As compared to instantiating a `com.mongodb.reactivestreams.client.MongoClient` instance directly, the FactoryBean approach has the added advantage of also providing the container with an `ExceptionTranslator` implementation that translates MongoDB exceptions to exceptions in Spring's portable `DataAccessException` hierarchy for data access classes annotated with the `@Repository` annotation. This hierarchy and use of `@Repository` is described in http://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/data-access.html[Spring's DAO support features]. - -An example of a Java based bean metadata that supports exception translation on `@Repository` annotated classes is shown below: - -.Registering a com.mongodb.MongoClient object using Spring's MongoClientFactoryBean and enabling Spring's exception translation support -==== -[source,java] ----- -@Configuration -public class AppConfig { - - /* - * Factory bean that creates the com.mongodb.reactivestreams.client.MongoClient instance - */ - public @Bean ReactiveMongoClientFactoryBean mongoClient() { - - ReactiveMongoClientFactoryBean clientFactory = new ReactiveMongoClientFactoryBean(); - clientFactory.setHost("localhost"); - - return clientFactory; - } -} ----- -==== - -To access the `com.mongodb.reactivestreams.client.MongoClient` object created by the `ReactiveMongoClientFactoryBean` in other `@Configuration` or your own classes, just obtain the `MongoClient` from the context. - - -[[mongo.reactive.mongo-db-factory]] -=== The ReactiveMongoDatabaseFactory interface - -While `com.mongodb.reactivestreams.client.MongoClient` is the entry point to the reactive MongoDB driver API, connecting to a specific MongoDB database instance requires additional information such as the database name. With that information you can obtain a `com.mongodb.reactivestreams.client.MongoDatabase` object and access all the functionality of a specific MongoDB database instance. Spring provides the `org.springframework.data.mongodb.core.ReactiveMongoDatabaseFactory` interface shown below to bootstrap connectivity to the database. - -[source,java] ----- -public interface ReactiveMongoDatabaseFactory { - - /** - * Creates a default {@link MongoDatabase} instance. - * - * @return - * @throws DataAccessException - */ - MongoDatabase getMongoDatabase() throws DataAccessException; - - /** - * Creates a {@link MongoDatabase} instance to access the database with the given name. - * - * @param dbName must not be {@literal null} or empty. - * @return - * @throws DataAccessException - */ - MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; - - /** - * Exposes a shared {@link MongoExceptionTranslator}. - * - * @return will never be {@literal null}. - */ - PersistenceExceptionTranslator getExceptionTranslator(); -} ----- - -The class `org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory` provides implements the ReactiveMongoDatabaseFactory interface and is created with a standard `com.mongodb.reactivestreams.client.MongoClient` instance and the database name. - -Instead of using the IoC container to create an instance of `ReactiveMongoTemplate`, you can just use them in standard Java code as shown below. - -[source,java] ----- -public class MongoApp { - - private static final Log log = LogFactory.getLog(MongoApp.class); - - public static void main(String[] args) throws Exception { - - ReactiveMongoOperations mongoOps = new ReactiveMongoOperations(new SimpleReactiveMongoDatabaseFactory(MongoClient.create(), "database")); - - mongoOps.insert(new Person("Joe", 34)) - .flatMap(p -> mongoOps.findOne(new Query(where("name").is("Joe")), Person.class)) - .doOnNext(person -> log.info(person.toString())) - .flatMap(person -> mongoOps.dropCollection("person")) - .subscribe(); - } -} ----- - -The use of `SimpleMongoDbFactory` is the only difference between the listing shown in the <>. - -[[mongo.reactive.mongo-db-factory-java]] -=== Registering a ReactiveMongoDatabaseFactory instance using Java based metadata - -To register a `ReactiveMongoDatabaseFactory` instance with the container, you write code much like what was highlighted in the previous code listing. A simple example is shown below - -[source,java] ----- -@Configuration -public class MongoConfiguration { - - public @Bean ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory() { - return new SimpleReactiveMongoDatabaseFactory(MongoClients.create(), "database"); - } -} ----- - -To define the username and password create MongoDB connection string and pass it into the factory method as shown below. This listing also shows using `ReactiveMongoDatabaseFactory` register an instance of `ReactiveMongoTemplate` with the container. - -[source,java] ----- -@Configuration -public class MongoConfiguration { - - public @Bean ReactiveMongoDatabaseFactory reactiveMongoDatabaseFactory() { - return new SimpleReactiveMongoDatabaseFactory(MongoClients.create("mongodb://joe:secret@localhost"), "database"); - } - - public @Bean ReactiveMongoTemplate reactiveMongoTemplate() { - return new ReactiveMongoTemplate(reactiveMongoDatabaseFactory()); - } -} ----- - -[[mongo.reactive.template]] -== Introduction to ReactiveMongoTemplate - -The class `ReactiveMongoTemplate`, located in the package `org.springframework.data.mongodb`, is the central class of the Spring's Reactive MongoDB support providing a rich feature set to interact with the database. The template offers convenience operations to create, update, delete and query for MongoDB documents and provides a mapping between your domain objects and MongoDB documents. - -NOTE: Once configured, `ReactiveMongoTemplate` is thread-safe and can be reused across multiple instances. - -The mapping between MongoDB documents and domain classes is done by delegating to an implementation of the interface `MongoConverter`. Spring provides a default implementation with `MongoMappingConverter`, but you can also write your own converter. Please refer to the section on MongoConverters for more detailed information. - -The `ReactiveMongoTemplate` class implements the interface `ReactiveMongoOperations`. In as much as possible, the methods on `ReactiveMongoOperations` are named after methods available on the MongoDB driver `Collection` object as as to make the API familiar to existing MongoDB developers who are used to the driver API. For example, you will find methods such as "find", "findAndModify", "findOne", "insert", "remove", "save", "update" and "updateMulti". The design goal was to make it as easy as possible to transition between the use of the base MongoDB driver and `ReactiveMongoOperations`. A major difference in between the two APIs is that `ReactiveMongoOperations` can be passed domain objects instead of `Document` and there are fluent APIs for `Query`, `Criteria`, and `Update` operations instead of populating a `Document` to specify the parameters for those operations. - -NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is via its interface `ReactiveMongoOperations`. - -The default converter implementation used by `ReactiveMongoTemplate` is `MappingMongoConverter`. While the `MappingMongoConverter` can make use of additional metadata to specify the mapping of objects to documents it is also capable of converting objects that contain no additional metadata by using some conventions for the mapping of IDs and collection names. These conventions as well as the use of mapping annotations is explained in the <>. - -Another central feature of `ReactiveMongoTemplate` is exception translation of exceptions thrown in the MongoDB Java driver into Spring's portable Data Access Exception hierarchy. Refer to the section on <> for more information. - -While there are many convenience methods on `ReactiveMongoTemplate` to help you easily perform common tasks if you should need to access the MongoDB driver API directly to access functionality not explicitly exposed by the MongoTemplate you can use one of several Execute callback methods to access underlying driver APIs. The execute callbacks will give you a reference to either a `com.mongodb.reactivestreams.client.MongoCollection` or a `com.mongodb.reactivestreams.client.MongoDatabase` object. Please see the section <> for more information. - -Now let's look at a examples of how to work with the `ReactiveMongoTemplate` in the context of the Spring container. - -[[mongo.reactive.template.instantiating]] -=== Instantiating ReactiveMongoTemplate - -You can use Java to create and register an instance of `ReactiveMongoTemplate` as shown below. - -.Registering a `com.mongodb.reactivestreams.client.MongoClient` object and enabling Spring's exception translation support -==== -[source,java] ----- -@Configuration -public class AppConfig { - - public @Bean MongoClient reactiveMongoClient() { - return MongoClients.create("mongodb://localhost"); - } - - public @Bean ReactiveMongoTemplate reactiveMongoTemplate() { - return new ReactiveMongoTemplate(reactiveMongoClient(), "mydatabase"); - } -} ----- -==== - -There are several overloaded constructors of `ReactiveMongoTemplate`. These are - -* `ReactiveMongoTemplate(MongoClient mongo, String databaseName)` - takes the `com.mongodb.MongoClient` object and the default database name to operate against. -* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory)` - takes a ReactiveMongoDatabaseFactory object that encapsulated the `com.mongodb.reactivestreams.client.MongoClient` object and database name. -* `ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, MongoConverter mongoConverter)` - adds a `MongoConverter` to use for mapping. - -Other optional properties that you might like to set when creating a `ReactiveMongoTemplate` are the default `WriteResultCheckingPolicy`, `WriteConcern`, and `ReadPreference`. - -NOTE: The preferred way to reference the operations on `ReactiveMongoTemplate` instance is via its interface `ReactiveMongoOperations`. - - -[[mongo.reactive.template.writeresultchecking]] -=== WriteResultChecking Policy - -When in development it is very handy to either log or throw an `Exception` if the `com.mongodb.WriteResult` returned from any MongoDB operation contains an error. It is quite common to forget to do this during development and then end up with an application that looks like it runs successfully but in fact the database was not modified according to your expectations. Set MongoTemplate's property to an enum with the following values, `LOG`, `EXCEPTION`, or `NONE` to either log the error, throw and exception or do nothing. The default is to use a `WriteResultChecking` value of `NONE`. - - -[[mongo.reactive.template.writeconcern]] -=== WriteConcern - -You can set the `com.mongodb.WriteConcern` property that the `ReactiveMongoTemplate` will use for write operations if it has not yet been specified via the driver at a higher level such as `MongoDatabase`. If ReactiveMongoTemplate's `WriteConcern` property is not set it will default to the one set in the MongoDB driver's `MongoDatabase` or `MongoCollection` setting. - - -[[mongo.reactive.template.writeconcernresolver]] -=== WriteConcernResolver - -For more advanced cases where you want to set different `WriteConcern` values on a per-operation basis (for remove, update, insert and save operations), a strategy interface called `WriteConcernResolver` can be configured on `ReactiveMongoTemplate`. Since `ReactiveMongoTemplate` is used to persist POJOs, the `WriteConcernResolver` lets you create a policy that can map a specific POJO class to a `WriteConcern` value. The `WriteConcernResolver` interface is shown below. - -[source,java] ----- -public interface WriteConcernResolver { - WriteConcern resolve(MongoAction action); -} ----- - -The passed in argument, `MongoAction`, is what you use to determine the `WriteConcern` value to be used or to use the value of the Template itself as a default. `MongoAction` contains the collection name being written to, the `java.lang.Class` of the POJO, the converted `DBObject`, as well as the operation as an enumeration (`MongoActionOperation`: REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE) and a few other pieces of contextual information. For example, - -[source] ----- -private class MyAppWriteConcernResolver implements WriteConcernResolver { - - public WriteConcern resolve(MongoAction action) { - if (action.getEntityClass().getSimpleName().contains("Audit")) { - return WriteConcern.NONE; - } else if (action.getEntityClass().getSimpleName().contains("Metadata")) { - return WriteConcern.JOURNAL_SAFE; - } - return action.getDefaultWriteConcern(); - } -} ----- - - -[[mongo.reactive.template.save-update-remove]] -== Saving, Updating, and Removing Documents - -`ReactiveMongoTemplate` provides a simple way for you to save, update, and delete your domain objects and map those objects to documents stored in MongoDB. - -Given a simple class such as Person - -[source,java] ----- -public class Person { - - private String id; - private String name; - private int age; - - public Person(String name, int age) { - this.name = name; - this.age = age; - } - - public String getId() { - return id; - } - public String getName() { - return name; - } - public int getAge() { - return age; - } - - @Override - public String toString() { - return "Person [id=" + id + ", name=" + name + ", age=" + age + "]"; - } - -} ----- - -You can save, update and delete the object as shown below. - -[source,java] ----- -public class ReactiveMongoApp { - - private static final Logger log = LoggerFactory.getLogger(ReactiveMongoApp.class); - - public static void main(String[] args) throws Exception { - - CountDownLatch latch = new CountDownLatch(1); - - ReactiveMongoTemplate mongoOps = new ReactiveMongoTemplate(MongoClients.create(), "database"); - - mongoOps.insert(new Person("Joe", 34)).doOnNext(person -> log.info("Insert: " + person)) - .flatMap(person -> mongoOps.findById(person.getId(), Person.class)) - .doOnNext(person -> log.info("Found: " + person)) - .zipWith(person -> mongoOps.updateFirst(query(where("name").is("Joe")), update("age", 35), Person.class)) - .flatMap(tuple -> mongoOps.remove(tuple.getT1())).flatMap(deleteResult -> mongoOps.findAll(Person.class)) - .count().doOnSuccess(count -> { - log.info("Number of people: " + count); - latch.countDown(); - }) - - .subscribe(); - - latch.await(); - } -} ----- - -There was implicit conversion using the `MongoConverter` between a `String` and `ObjectId` as stored in the database and recognizing a convention of the property "Id" name. - -NOTE: This example is meant to show the use of save, update and remove operations on `ReactiveMongoTemplate` and not to show complex mapping or functional chaining functionality - -The query syntax used in the example is explained in more detail in the section <>. Additional documentation can be found in <> section. - -[[mongo.reactive.executioncallback]] -== Execution callbacks - -One common design feature of all Spring template classes is that all functionality is routed into one of the templates execute callback methods. This helps ensure that exceptions and any resource management that maybe required are performed consistency. While this was of much greater need in the case of JDBC and JMS than with MongoDB, it still offers a single spot for exception translation and logging to occur. As such, using the execute callback is the preferred way to access the MongoDB driver's `MongoDatabase` and `MongoCollection` objects to perform uncommon operations that were not exposed as methods on `ReactiveMongoTemplate`. - -Here is a list of execute callback methods. - -* ` Flux` *execute* `(Class entityClass, ReactiveCollectionCallback action)` Executes the given ReactiveCollectionCallback for the entity collection of the specified class. - -* ` Flux` *execute* `(String collectionName, ReactiveCollectionCallback action)` Executes the given ReactiveCollectionCallback on the collection of the given name. - -* ` Flux` *execute* `(ReactiveDatabaseCallback action)` Executes a ReactiveDatabaseCallback translating any exceptions as necessary. - -Here is an example that uses the `ReactiveCollectionCallback` to return information about an index - -[source,java] ----- -Flux hasIndex = operations.execute("geolocation", - collection -> Flux.from(collection.listIndexes(Document.class)) - .filter(document -> document.get("name").equals("fancy-index-name")) - .flatMap(document -> Mono.just(true)) - .defaultIfEmpty(false)); ----- diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt deleted file mode 100644 index 9bef94a744..0000000000 --- a/src/main/resources/changelog.txt +++ /dev/null @@ -1,2162 +0,0 @@ -Spring Data MongoDB Changelog -============================= - -Changes in version 2.0.5.RELEASE (2018-02-28) ---------------------------------------------- -* DATAMONGO-1882 - Release 2.0.5 (Kay SR5). - - -Changes in version 2.0.4.RELEASE (2018-02-19) ---------------------------------------------- -* DATAMONGO-1872 - SpEL Expressions in @Document annotations are not re-evaluated for repository query executions. -* DATAMONGO-1871 - AggregationExpression rendering does not consider nested property aliasing. -* DATAMONGO-1870 - Skip parameter not working in MongoTemplate#remove(Query, Class). -* DATAMONGO-1865 - findFirst query method throws IncorrectResultSizeDataAccessException on non-unique result. -* DATAMONGO-1860 - Mongo count operation called twice in QuerydslMongoPredicateExecutor.findAll(Predicate, Pageable). -* DATAMONGO-1859 - Release 2.0.4 (Kay SR4). - - -Changes in version 2.1.0.M1 (2018-02-06) ----------------------------------------- -* DATAMONGO-1864 - Upgrade to MongoDB Java Driver 3.6.2. -* DATAMONGO-1858 - Fix line endings. -* DATAMONGO-1850 - GridFsResource.getContentType() throws NullPointerException on absent metadata. -* DATAMONGO-1846 - Upgrade to MongoDB Java Driver 3.6. -* DATAMONGO-1844 - Update copyright years to 2018. -* DATAMONGO-1843 - Aggregation operator $reduce with ArrayOperators.Reduce produce a wrong Document. -* DATAMONGO-1835 - Add support for $jsonSchema to Criteria API. -* DATAMONGO-1831 - Failure to read Scala collection types in MappingMongoConverter. -* DATAMONGO-1824 - Assert compatibility with MongoDB Server 3.6. -* DATAMONGO-1823 - AfterConvertEvent is not published when using custom methods in repository interface. -* DATAMONGO-1822 - Adapt repository readme to changed configuration support. -* DATAMONGO-1821 - Fix method ambiguity in tests when compiling against MongoDB 3.6. -* DATAMONGO-1820 - Investigate failing TravisCI build. -* DATAMONGO-1818 - Reference documentation mentions @TailableCursor instead of @Tailable. -* DATAMONGO-1817 - Kotlin extensions should return nullable types. -* DATAMONGO-1815 - Adapt API changes in Property in test cases. -* DATAMONGO-1814 - Missing documentation on Faceted classification. -* DATAMONGO-1812 - Temporarily add milestone repository to plugin repositories. -* DATAMONGO-1811 - Reference Documentation doesn't match with API Documentation 2.X vesrion. -* DATAMONGO-1809 - Type hint usage broken when using positional parameters with more than one digit. -* DATAMONGO-1806 - GridFsResource wrong type in javaDoc. -* DATAMONGO-1805 - Documentation for operations.find uses wrong result type. -* DATAMONGO-1803 - Add support for MongoDB 3.6 change streams. -* DATAMONGO-1802 - No converter found capable of converting from type org.bson.types.Binary to type byte[]. -* DATAMONGO-1795 - Remove obsolete Kotlin build configuration. -* DATAMONGO-1794 - Release 2.1 M1 (Lovelace). -* DATAMONGO-1761 - Add distinct operation to MongoTemplate. -* DATAMONGO-1696 - Reference documentation uses JPA Annotations. -* DATAMONGO-1553 - Add $sortByCount aggregation stage. -* DATAMONGO-1322 - Add support for validator when creating collection. - - -Changes in version 2.0.3.RELEASE (2018-01-24) ---------------------------------------------- -* DATAMONGO-1858 - Fix line endings. -* DATAMONGO-1850 - GridFsResource.getContentType() throws NullPointerException on absent metadata. -* DATAMONGO-1843 - Aggregation operator $reduce with ArrayOperators.Reduce produce a wrong Document. -* DATAMONGO-1831 - Failure to read Scala collection types in MappingMongoConverter. -* DATAMONGO-1830 - Release 2.0.3 (Kay SR3). -* DATAMONGO-1824 - Assert compatibility with MongoDB Server 3.6. - - -Changes in version 1.10.10.RELEASE (2018-01-24) ------------------------------------------------ -* DATAMONGO-1843 - Aggregation operator $reduce with ArrayOperators.Reduce produce a wrong Document. -* DATAMONGO-1831 - Failure to read Scala collection types in MappingMongoConverter. -* DATAMONGO-1829 - Release 1.10.10 (Ingalls SR10). -* DATAMONGO-1824 - Assert compatibility with MongoDB Server 3.6. - - -Changes in version 2.0.2.RELEASE (2017-11-27) ---------------------------------------------- -* DATAMONGO-1823 - AfterConvertEvent is not published when using custom methods in repository interface. -* DATAMONGO-1821 - Fix method ambiguity in tests when compiling against MongoDB 3.6. -* DATAMONGO-1820 - Investigate failing TravisCI build. -* DATAMONGO-1818 - Reference documentation mentions @TailableCursor instead of @Tailable. -* DATAMONGO-1817 - Kotlin extensions should return nullable types. -* DATAMONGO-1816 - Release 2.0.2 (Kay SR2). - - -Changes in version 1.10.9.RELEASE (2017-11-27) ----------------------------------------------- -* DATAMONGO-1809 - Type hint usage broken when using positional parameters with more than one digit. -* DATAMONGO-1799 - Release 1.10.9 (Ingalls SR9). -* DATAMONGO-1696 - Reference documentation uses JPA Annotations. - - -Changes in version 2.0.1.RELEASE (2017-10-27) ---------------------------------------------- -* DATAMONGO-1815 - Adapt API changes in Property in test cases. -* DATAMONGO-1814 - Missing documentation on Faceted classification. -* DATAMONGO-1811 - Reference Documentation doesn't match with API Documentation 2.X vesrion. -* DATAMONGO-1809 - Type hint usage broken when using positional parameters with more than one digit. -* DATAMONGO-1806 - GridFsResource wrong type in javaDoc. -* DATAMONGO-1805 - Documentation for operations.find uses wrong result type. -* DATAMONGO-1802 - No converter found capable of converting from type org.bson.types.Binary to type byte[]. -* DATAMONGO-1795 - Remove obsolete Kotlin build configuration. -* DATAMONGO-1793 - Release 2.0.1 (Kay SR1). -* DATAMONGO-1696 - Reference documentation uses JPA Annotations. - - -Changes in version 1.10.8.RELEASE (2017-10-11) ----------------------------------------------- -* DATAMONGO-1784 - Add support for AggregationExpression in GroupOperation.sum. -* DATAMONGO-1782 - CyclicPropertyReferenceException on index resolution. -* DATAMONGO-1775 - Release 1.10.8 (Ingalls SR8). - - -Changes in version 2.0.0.RELEASE (2017-10-02) ---------------------------------------------- -* DATAMONGO-1791 - Adapt to changed Spring Framework 5 documentation structure. -* DATAMONGO-1787 - Add explicit automatic module name for Java 9. -* DATAMONGO-1786 - Adapt tests to nullability enforcement in repository query methods. -* DATAMONGO-1785 - Upgrade to OpenWebBeans 2.0.1. -* DATAMONGO-1784 - Add support for AggregationExpression in GroupOperation.sum. -* DATAMONGO-1782 - CyclicPropertyReferenceException on index resolution. -* DATAMONGO-1781 - Update what's new in reference documentation. -* DATAMONGO-1779 - Query.limit(N) on empty query is not applied. -* DATAMONGO-1778 - Update.equals(…) fails to recognize an equal Update object. -* DATAMONGO-1777 - Update.toString(…) does not pretty-print modifiers. -* DATAMONGO-1776 - Release 2.0 GA (Kay). - - -Changes in version 2.0.0.RC3 (2017-09-11) ------------------------------------------ -* DATAMONGO-1774 - ReactiveMongoOperations#remove(Mono, java.lang.String) ends up in an infinite loop. -* DATAMONGO-1772 - Type hint not added when updating nested list elements with inheritance. -* DATAMONGO-1771 - Fix MongoDB setup for travis-ci. -* DATAMONGO-1770 - Upgrade to mongo-java-driver 3.5.0 and mongodb-driver-reactivestreams 1.6.0. -* DATAMONGO-1768 - QueryByExample FindOne : probe type. -* DATAMONGO-1765 - Duplicate elements in DBRefs list not correctly mapped. -* DATAMONGO-1762 - Introduce nullable annotations for API validation. -* DATAMONGO-1758 - Remove non existing spring-data-mongodb-log4j from benchmarks profile. -* DATAMONGO-1757 - Improve exception message if type to read doesn't match declared type on Map value. -* DATAMONGO-1756 - Aggregation project and arithmetic operation not working with nested fields. -* DATAMONGO-1754 - Release 2.0 RC3 (Kay). -* DATAMONGO-1706 - Adapt to deprecated RxJava 1 CRUD repositories. -* DATAMONGO-1631 - AbstractReactiveMongoConfiguration.mongoDbFactory conflicts with blocking MongoDbFactory. - - -Changes in version 1.10.7.RELEASE (2017-09-11) ----------------------------------------------- -* DATAMONGO-1772 - Type hint not added when updating nested list elements with inheritance. -* DATAMONGO-1768 - QueryByExample FindOne : probe type. -* DATAMONGO-1765 - Duplicate elements in DBRefs list not correctly mapped. -* DATAMONGO-1756 - Aggregation project and arithmetic operation not working with nested fields. -* DATAMONGO-1755 - Release 1.10.7 (Ingalls SR7). - - -Changes in version 1.10.6.RELEASE (2017-07-26) ----------------------------------------------- -* DATAMONGO-1750 - Release 1.10.6 (Ingalls SR6). - - -Changes in version 2.0.0.RC2 (2017-07-25) ------------------------------------------ -* DATAMONGO-1753 - IndexEnsuringQueryCreationListener should skip queries without criteria. -* DATAMONGO-1752 - Executing repository methods with closed projection fails. -* DATAMONGO-1751 - Release 2.0 RC2 (Kay). - - -Changes in version 2.0.0.RC1 (2017-07-25) ------------------------------------------ -* DATAMONGO-1748 - Add Kotlin extensions for Criteria API. -* DATAMONGO-1746 - Inherit Project Reactor version from dependency management. -* DATAMONGO-1744 - Improve default setup for MappingMongoConverter. -* DATAMONGO-1739 - Change TerminatingFindOperation.stream() to return a Stream directly. -* DATAMONGO-1738 - Move to fluent API for repository query execution. -* DATAMONGO-1735 - Sort and fields objects in Query should not be null. -* DATAMONGO-1734 - Add count() & exists to fluent API. -* DATAMONGO-1733 - Allow usage of projection interfaces in FluentMongoOperations. -* DATAMONGO-1730 - Adapt to API changes in mapping subsystem. -* DATAMONGO-1729 - Open projection does not fetch all properties. -* DATAMONGO-1728 - ExecutableFindOperation.find(…).first() fails with NPE. -* DATAMONGO-1726 - Add terminating findOne/findFirst methods to FluentMongoOperations returning null value instead of Optional. -* DATAMONGO-1725 - Potential NullPointerException in CloseableIterableCursorAdapter. -* DATAMONGO-1723 - Fix unit tests after API changes in Spring Data Commons. -* DATAMONGO-1721 - Fix dependency cycles. -* DATAMONGO-1720 - Add JMH benchmark module. -* DATAMONGO-1719 - Add fluent alternative for ReactiveMongoOperations. -* DATAMONGO-1718 - MongoTemplate.findAndRemoveAll(Query, String) delegates to wrong overload. -* DATAMONGO-1717 - Release 2.0 RC1 (Kay). -* DATAMONGO-1715 - Remove spring-data-mongodb-log4j module. -* DATAMONGO-1713 - MongoCredentialPropertyEditor improperly resolves the credential string. -* DATAMONGO-1705 - Deprecate cross-store support. -* DATAMONGO-1703 - Allow referencing views in object graphs containing circular dependencies. -* DATAMONGO-1702 - Switch repository implementation to use fragments. -* DATAMONGO-1697 - @Version used by @EnableMongoAuditing does not increase when using collection name in MongoTemplate's updateFirst. Unexpected, not described by reference documentation. -* DATAMONGO-1682 - Add partial index support to ReactiveIndexOperations. -* DATAMONGO-1678 - DefaultBulkOperations do not map Query and Update objects properly. -* DATAMONGO-1646 - Support reactive aggregation streaming. -* DATAMONGO-1519 - Change MongoTemplate.insertDBObjectList(…) to return List instead of List. - - -Changes in version 1.10.5.RELEASE (2017-07-24) ----------------------------------------------- -* DATAMONGO-1744 - Improve default setup for MappingMongoConverter. -* DATAMONGO-1729 - Open projection does not fetch all properties. -* DATAMONGO-1725 - Potential NullPointerException in CloseableIterableCursorAdapter. -* DATAMONGO-1723 - Fix unit tests after API changes in Spring Data Commons. -* DATAMONGO-1720 - Add JMH benchmark module. -* DATAMONGO-1718 - MongoTemplate.findAndRemoveAll(Query, String) delegates to wrong overload. -* DATAMONGO-1711 - Release 1.10.5 (Ingalls SR5). -* DATAMONGO-1703 - Allow referencing views in object graphs containing circular dependencies. -* DATAMONGO-1697 - @Version used by @EnableMongoAuditing does not increase when using collection name in MongoTemplate's updateFirst. Unexpected, not described by reference documentation. -* DATAMONGO-1678 - DefaultBulkOperations do not map Query and Update objects properly. - - -Changes in version 2.0.0.M4 (2017-06-14) ----------------------------------------- -* DATAMONGO-1716 - Upgrade to Reactive Streams driver 1.5.0. -* DATAMONGO-1714 - Deprecate MongoLog4jAppender. -* DATAMONGO-1712 - Adopt to ReactiveCrudRepository.findById(Publisher) and existsById(Publisher). -* DATAMONGO-1710 - Adopt to changed AnnotationUtils.getValue(…) and OperatorNode.getRightOperand() behavior. -* DATAMONGO-1707 - Upgrade to Reactor 3.1 M2. -* DATAMONGO-1699 - Upgrade travis.yml to use MongoDB 3.4. -* DATAMONGO-1695 - Make sure GridFsResource.getContentType() reads type from new location within file metadata. -* DATAMONGO-1693 - Support collation in ReactiveMongoTemplate.createCollection. -* DATAMONGO-1690 - Adapt to QuerydslPerdicateExecutor API changes. -* DATAMONGO-1689 - Provide Kotlin extensions for Class based methods in MongoOperations / ReactiveMongoOperations. -* DATAMONGO-1688 - Release 2.0 M4 (Kay). -* DATAMONGO-1687 - Creating capped collection with CollectionOptions.empty().capped(…) causes NPE. -* DATAMONGO-1686 - Upgarde to MongoDB reactive streams driver 1.4. -* DATAMONGO-1685 - Adapt QueryByExampleExecutor API changes. -* DATAMONGO-1619 - Use ReactiveQueryByExampleExecutor in ReactiveMongoRepository. -* DATAMONGO-1563 - Add TemplateWrapper to reduce method overloads on MongoTemplate. - - -Changes in version 1.10.4.RELEASE (2017-06-08) ----------------------------------------------- -* DATAMONGO-1699 - Upgrade travis.yml to use MongoDB 3.4. -* DATAMONGO-1672 - Release 1.10.4 (Ingalls SR4). -* DATAMONGO-1205 - CyclicPropertyReferenceException logged with stack trace. - - -Changes in version 1.9.11.RELEASE (2017-06-07) ----------------------------------------------- -* DATAMONGO-1671 - Release 1.9.11 (Hopper SR11). -* DATAMONGO-1205 - CyclicPropertyReferenceException logged with stack trace. - - -Changes in version 2.0.0.M3 (2017-05-09) ----------------------------------------- -* DATAMONGO-1684 - Adopt documentation to removed JodaTime DateMidnight support. -* DATAMONGO-1679 - Adapt to API changes in CrudRepository. -* DATAMONGO-1674 - Adapt to Range API changes. -* DATAMONGO-1668 - Adopt changed Mono and Flux error handling API. -* DATAMONGO-1667 - Rename @InfiniteStream to @Tailable. -* DATAMONGO-1666 - Constructor creation with bulk fetching of DBRefs uses List instead of collection type. -* DATAMONGO-1665 - Adapt to API changes in Reactor 3.1. -* DATAMONGO-1664 - Release 2.0 M3 (Kay). -* DATAMONGO-1662 - Section "Projection Expressions" contains error "Aggregate". -* DATAMONGO-1660 - Extract CustomConversions into Spring Data Commons. -* DATAMONGO-1518 - Add support for Collations. -* DATAMONGO-1325 - Add support for $sample to aggregation. -* DATAMONGO-1205 - CyclicPropertyReferenceException logged with stack trace. - - -Changes in version 1.9.10.RELEASE (2017-04-19) ----------------------------------------------- -* DATAMONGO-1670 - Release 1.9.10 (Hopper SR10). - - -Changes in version 1.10.3.RELEASE (2017-04-19) ----------------------------------------------- -* DATAMONGO-1669 - Release 1.10.3 (Ingalls SR3). - - -Changes in version 1.9.9.RELEASE (2017-04-19) ---------------------------------------------- -* DATAMONGO-1662 - Section "Projection Expressions" contains error "Aggregate". -* DATAMONGO-1645 - RuntimeException when logging BeforeDeleteEvent and AfterDeleteEvent. -* DATAMONGO-1639 - BeforeConvertEvent now sees old version values for update of entities. -* DATAMONGO-1634 - Release 1.9.9 (Hopper SR9). -* DATAMONGO-1421 - Repository with Enum argument: json can't serialize type. - - -Changes in version 1.10.2.RELEASE (2017-04-19) ----------------------------------------------- -* DATAMONGO-1666 - Constructor creation with bulk fetching of DBRefs uses List instead of collection type. -* DATAMONGO-1662 - Section "Projection Expressions" contains error "Aggregate". -* DATAMONGO-1645 - RuntimeException when logging BeforeDeleteEvent and AfterDeleteEvent. -* DATAMONGO-1639 - BeforeConvertEvent now sees old version values for update of entities. -* DATAMONGO-1633 - Release 1.10.2 (Ingalls SR2). -* DATAMONGO-1620 - Not able to set serverSelectionTimeout on MongoClientOptions using MongoClientOptionsFactoryBean. -* DATAMONGO-1421 - Repository with Enum argument: json can't serialize type. - - -Changes in version 2.0.0.M2 (2017-04-04) ----------------------------------------- -* DATAMONGO-1656 - Upgrade to MongoDB Driver 3.4.2 and Reactive Streams Driver 1.3.0. -* DATAMONGO-1655 - Remove obsolete build profiles. -* DATAMONGO-1648 - Rename getRepositoryFactoryClassName to getRepositoryFactoryBeanClassName. -* DATAMONGO-1647 - Use IdentifierAccessor.getRequiredIdentifier() in MongoTemplate.doSaveVersioned(…). -* DATAMONGO-1645 - RuntimeException when logging BeforeDeleteEvent and AfterDeleteEvent. -* DATAMONGO-1643 - Add namespace xsd for 2.x. -* DATAMONGO-1641 - Remove formatting config from the repository. -* DATAMONGO-1639 - BeforeConvertEvent now sees old version values for update of entities. -* DATAMONGO-1637 - Add support for aggregation result streaming. -* DATAMONGO-1620 - Not able to set serverSelectionTimeout on MongoClientOptions using MongoClientOptionsFactoryBean. -* DATAMONGO-1617 - IDs with non-autogeneratable type cannot be assigned automatically with custom event listeners. -* DATAMONGO-1610 - Support RxJava 2 repositories. -* DATAMONGO-1608 - NullPointerException with null argument when using IgnoreCase. -* DATAMONGO-1607 - Class Cast Exception when retrieve data Point with value equal integer. -* DATAMONGO-1605 - All SPEL values are converted to String. -* DATAMONGO-1603 - @Query Annotation Placeholder Issue. -* DATAMONGO-1602 - Remove references to single-argument assertion methods of Spring. -* DATAMONGO-1600 - GraphLookupOperationBuilder is not visible. -* DATAMONGO-1596 - Reference to wrong annotation in cross-store reference documentation. -* DATAMONGO-1594 - Update "what’s new" section in reference documentation. -* DATAMONGO-1590 - Entity new detection doesn't consider Persistable.isNew(). -* DATAMONGO-1589 - Update project documentation with the CLA tool integration. -* DATAMONGO-1588 - Repository will not accept Point subclass in spatial query. -* DATAMONGO-1587 - Migrate ticket references in test code to Spring Framework style. -* DATAMONGO-1586 - TypeBasedAggregationOperationContext.getReferenceFor(…) exposes fields with their leaf property name. -* DATAMONGO-1585 - Aggregation sort references target field of projected and aliased fields. -* DATAMONGO-1581 - ReactiveMongoRepositoryConfigurationExtension should be public. -* DATAMONGO-1578 - Add missing @Test annotation to ProjectionOperationUnitTests. -* DATAMONGO-1577 - Fix Reference and JavaDoc spelling issues. -* DATAMONGO-1576 - AbstractMongoEventListener methods not called when working with member fields. -* DATAMONGO-1575 - Treat String replacement values in StringBased queries as such unless they are SpEL expressions. -* DATAMONGO-1567 - Upgrade to a newer JDK version on TravisCI. -* DATAMONGO-1566 - Adapt API in RepositoryFactoryBeanSupport implementation. -* DATAMONGO-1565 - Placeholders in manually defined queries not escaped properly. -* DATAMONGO-1564 - Split up AggregationExpressions. -* DATAMONGO-1559 - Migrate reactive tests from TestSubscriber to StepVerifier. -* DATAMONGO-1558 - Upgrade travis-ci profile to MongoDB 3.4. -* DATAMONGO-1552 - Add $facet, $bucket and $bucketAuto aggregation stages. -* DATAMONGO-1551 - Add $graphLookup aggregation stage. -* DATAMONGO-1550 - Add $replaceRoot aggregation stage. -* DATAMONGO-1549 - Add $count aggregation stage. -* DATAMONGO-1548 - Add new MongoDB 3.4 aggregation operators. -* DATAMONGO-1547 - Register repository factory in spring.factories for multi-store support. -* DATAMONGO-1546 - Switch to new way of registering custom Jackson modules. -* DATAMONGO-1542 - Refactor CondOperator and IfNullOperator to children of AggregationExpressions. -* DATAMONGO-1540 - Add support for $map to aggregation. -* DATAMONGO-1539 - Add dedicated annotations for manually declared count and delete queries. -* DATAMONGO-1538 - Add support for $let to aggregation. -* DATAMONGO-1536 - Add missing aggregation operators. -* DATAMONGO-1535 - Release 2.0 M2 (Kay). - - -Changes in version 1.9.8.RELEASE (2017-03-02) ---------------------------------------------- -* DATAMONGO-1608 - NullPointerException with null argument when using IgnoreCase. -* DATAMONGO-1607 - Class Cast Exception when retrieve data Point with value equal integer. -* DATAMONGO-1605 - All SPEL values are converted to String. -* DATAMONGO-1603 - @Query Annotation Placeholder Issue. -* DATAMONGO-1602 - Remove references to single-argument assertion methods of Spring. -* DATAMONGO-1597 - Release 1.9.8 (Hopper SR8). - - -Changes in version 1.10.1.RELEASE (2017-03-02) ----------------------------------------------- -* DATAMONGO-1608 - NullPointerException with null argument when using IgnoreCase. -* DATAMONGO-1607 - Class Cast Exception when retrieve data Point with value equal integer. -* DATAMONGO-1605 - All SPEL values are converted to String. -* DATAMONGO-1603 - @Query Annotation Placeholder Issue. -* DATAMONGO-1602 - Remove references to single-argument assertion methods of Spring. -* DATAMONGO-1600 - GraphLookupOperationBuilder is not visible. -* DATAMONGO-1598 - Release 1.10.1 (Ingalls SR1). - - -Changes in version 1.9.7.RELEASE (2017-01-26) ---------------------------------------------- -* DATAMONGO-1596 - Reference to wrong annotation in cross-store reference documentation. -* DATAMONGO-1590 - Entity new detection doesn't consider Persistable.isNew(). -* DATAMONGO-1588 - Repository will not accept Point subclass in spatial query. -* DATAMONGO-1586 - TypeBasedAggregationOperationContext.getReferenceFor(…) exposes fields with their leaf property name. -* DATAMONGO-1585 - Aggregation sort references target field of projected and aliased fields. -* DATAMONGO-1578 - Add missing @Test annotation to ProjectionOperationUnitTests. -* DATAMONGO-1577 - Fix Reference and JavaDoc spelling issues. -* DATAMONGO-1576 - AbstractMongoEventListener methods not called when working with member fields. -* DATAMONGO-1573 - Release 1.9.7 (Hopper SR7). -* DATAMONGO-1508 - Documentation lacking for db-factory authentication-dbname. - - -Changes in version 1.10.0.RELEASE (2017-01-26) ----------------------------------------------- -* DATAMONGO-1596 - Reference to wrong annotation in cross-store reference documentation. -* DATAMONGO-1594 - Update "what’s new" section in reference documentation. -* DATAMONGO-1590 - Entity new detection doesn't consider Persistable.isNew(). -* DATAMONGO-1589 - Update project documentation with the CLA tool integration. -* DATAMONGO-1588 - Repository will not accept Point subclass in spatial query. -* DATAMONGO-1587 - Migrate ticket references in test code to Spring Framework style. -* DATAMONGO-1586 - TypeBasedAggregationOperationContext.getReferenceFor(…) exposes fields with their leaf property name. -* DATAMONGO-1585 - Aggregation sort references target field of projected and aliased fields. -* DATAMONGO-1578 - Add missing @Test annotation to ProjectionOperationUnitTests. -* DATAMONGO-1577 - Fix Reference and JavaDoc spelling issues. -* DATAMONGO-1576 - AbstractMongoEventListener methods not called when working with member fields. -* DATAMONGO-1575 - Treat String replacement values in StringBased queries as such unless they are SpEL expressions. -* DATAMONGO-1574 - Release 1.10 GA (Ingalls). -* DATAMONGO-1508 - Documentation lacking for db-factory authentication-dbname. - - -Changes in version 1.9.6.RELEASE (2016-12-21) ---------------------------------------------- -* DATAMONGO-1565 - Placeholders in manually defined queries not escaped properly. -* DATAMONGO-1534 - Type hint is missing when using BulkOperations.insert. -* DATAMONGO-1525 - Reading empty EnumSet fails. -* DATAMONGO-1522 - Release 1.9.6 (Hopper SR6). - - -Changes in version 1.10.0.RC1 (2016-12-21) ------------------------------------------- -* DATAMONGO-1567 - Upgrade to a newer JDK version on TravisCI. -* DATAMONGO-1566 - Adapt API in RepositoryFactoryBeanSupport implementation. -* DATAMONGO-1565 - Placeholders in manually defined queries not escaped properly. -* DATAMONGO-1564 - Split up AggregationExpressions. -* DATAMONGO-1558 - Upgrade travis-ci profile to MongoDB 3.4. -* DATAMONGO-1552 - Add $facet, $bucket and $bucketAuto aggregation stages. -* DATAMONGO-1551 - Add $graphLookup aggregation stage. -* DATAMONGO-1550 - Add $replaceRoot aggregation stage. -* DATAMONGO-1549 - Add $count aggregation stage. -* DATAMONGO-1548 - Add new MongoDB 3.4 aggregation operators. -* DATAMONGO-1547 - Register repository factory in spring.factories for multi-store support. -* DATAMONGO-1546 - Switch to new way of registering custom Jackson modules. -* DATAMONGO-1542 - Refactor CondOperator and IfNullOperator to children of AggregationExpressions. -* DATAMONGO-1540 - Add support for $map to aggregation. -* DATAMONGO-1539 - Add dedicated annotations for manually declared count and delete queries. -* DATAMONGO-1538 - Add support for $let to aggregation. -* DATAMONGO-1536 - Add missing aggregation operators. -* DATAMONGO-1534 - Type hint is missing when using BulkOperations.insert. -* DATAMONGO-1533 - Add support for SpEL in GroupOperations (aggregation). -* DATAMONGO-1530 - Support missing aggregation pipeline operators in expression support. -* DATAMONGO-1525 - Reading empty EnumSet fails. -* DATAMONGO-1521 - Aggregation.skip(...) expects int but new SkipOperation(...) supports long. -* DATAMONGO-1520 - Aggregation.match should accept CriteriaDefinition. -* DATAMONGO-1514 - SpringDataMongodbQuery should be public. -* DATAMONGO-1513 - Non-ObjectId identifiers generated by event listeners are not populated if documents are inserted as batch. -* DATAMONGO-1504 - Assert compatibility with MongoDB 3.4 server and driver. -* DATAMONGO-1500 - RuntimeException for query methods with fields declaration and Pageable parameters. -* DATAMONGO-1498 - MongoMappingContext doesn't know about types usually auto-detected (JodaTime, JDK 8 date time types). -* DATAMONGO-1493 - Typos in reference documentation. -* DATAMONGO-1492 - Interface AggregationExpression in package org.springframework.data.mongodb.core.aggregation should be public. -* DATAMONGO-1491 - Add support for $filter to aggregation. -* DATAMONGO-1490 - Change the XML data type of boolean flags to String. -* DATAMONGO-1486 - Changes to MappingMongoConverter Result in Class Cast Exception. -* DATAMONGO-1485 - Querydsl MongodbSerializer does not take registered converters for Enums into account. -* DATAMONGO-1480 - Add support for noCursorTimeout in Query. -* DATAMONGO-1479 - MappingMongoConverter.convertToMongoType causes StackOverflowError for parameterized map value types. -* DATAMONGO-1476 - New stream method only partially makes use of collection name. -* DATAMONGO-1471 - MappingMongoConverter attempts to set null value on potentially primitive identifier. -* DATAMONGO-1470 - AbstractMongoConfiguraton should allow multiple base package for @Document scanning. -* DATAMONGO-1469 - Release 1.10 RC1 (Ingalls). -* DATAMONGO-1467 - Support partial filter expressions for indexing introduced in MongoDB 3.2. -* DATAMONGO-1465 - String arguments passed to DefaultScriptOperations.execute() appear quoted in script. -* DATAMONGO-1454 - Add support for exists projection in repository query derivation. -* DATAMONGO-1406 - Query mapper does not use @Field field name when querying nested fields in combination with nested keywords. -* DATAMONGO-1328 - Add support for mongodb 3.2 specific arithmetic operators to aggregation. -* DATAMONGO-1327 - Add support for $stdDevSamp and $stdDevPop to aggregation ($group stage). -* DATAMONGO-1299 - Add support for date aggregations. -* DATAMONGO-1141 - Add support for $push $sort in Update. -* DATAMONGO-861 - Add support for $cond and $ifNull operators in aggregation operation. -* DATAMONGO-784 - Add support for $cmp in group or project aggregation. - - -Changes in version 2.0.0.M1 (2016-11-23) ----------------------------------------- -* DATAMONGO-1527 - Release 2.0 M1 (Kay). -* DATAMONGO-1509 - Inconsistent type alias placement in list of classes. -* DATAMONGO-1461 - Upgrade Hibernate/JPA dependencies to match Spring 5 baseline. -* DATAMONGO-1448 - Set up 2.0 development. -* DATAMONGO-1444 - Reactive support in Spring Data MongoDB. -* DATAMONGO-1176 - Use org.bson types instead of com.mongodb. -* DATAMONGO-563 - Upgrade to MongoDB driver 2.9.2 as it fixes a serious regression introduced in 2.9.0. -* DATAMONGO-562 - Cannot create entity with OptimisticLocking (@Version) and initial id. - - -Changes in version 1.9.5.RELEASE (2016-11-03) ---------------------------------------------- -* DATAMONGO-1521 - Aggregation.skip(...) expects int but new SkipOperation(...) supports long. -* DATAMONGO-1514 - SpringDataMongodbQuery should be public. -* DATAMONGO-1513 - Non-ObjectId identifiers generated by event listeners are not populated if documents are inserted as batch. -* DATAMONGO-1504 - Assert compatibility with MongoDB 3.4 server and driver. -* DATAMONGO-1502 - Release 1.9.5 (Hopper SR5). -* DATAMONGO-1500 - RuntimeException for query methods with fields declaration and Pageable parameters. - - -Changes in version 1.9.4.RELEASE (2016-09-29) ---------------------------------------------- -* DATAMONGO-1498 - MongoMappingContext doesn't know about types usually auto-detected (JodaTime, JDK 8 date time types). -* DATAMONGO-1497 - MappingMongoConverter's check for whether a value is available for a property should use DbObjectAccessor. -* DATAMONGO-1495 - Release 1.9.4 (Hopper SR4). - - -Changes in version 1.8.6.RELEASE (2016-09-29) ---------------------------------------------- -* DATAMONGO-1499 - Release 1.8.6 (Gosling SR6). -* DATAMONGO-1497 - MappingMongoConverter's check for whether a value is available for a property should use DbObjectAccessor. - - -Changes in version 1.8.5.RELEASE (2016-09-20) ---------------------------------------------- -* DATAMONGO-1494 - Release 1.8.5 (Gosling SR5). -* DATAMONGO-1492 - Interface AggregationExpression in package org.springframework.data.mongodb.core.aggregation should be public. -* DATAMONGO-1485 - Querydsl MongodbSerializer does not take registered converters for Enums into account. -* DATAMONGO-1479 - MappingMongoConverter.convertToMongoType causes StackOverflowError for parameterized map value types. -* DATAMONGO-1471 - MappingMongoConverter attempts to set null value on potentially primitive identifier. -* DATAMONGO-1465 - String arguments passed to DefaultScriptOperations.execute() appear quoted in script. -* DATAMONGO-1453 - Parse error into GeoJsonPoint if coordinates are "integers". -* DATAMONGO-1449 - Replace legacy for loop with foreach in MappingMongoConverter. -* DATAMONGO-1445 - @Id annotated attribute of type BigInteger does not work with query methods. -* DATAMONGO-1437 - DefaultDbRefResolver swallows cause of non DataAccessException translatable Exception. -* DATAMONGO-1425 - NOT_CONTAINS keyword issues CONTAINS query. -* DATAMONGO-1423 - Nested document update doesn't apply converters on embedded maps. -* DATAMONGO-1412 - Document mapping rules for Java types to MongoDB representation. -* DATAMONGO-1406 - Query mapper does not use @Field field name when querying nested fields in combination with nested keywords. -* DATAMONGO-1401 - GeoJsonPoint error on update. - - -Changes in version 1.9.3.RELEASE (2016-09-20) ---------------------------------------------- -* DATAMONGO-1493 - Typos in reference documentation. -* DATAMONGO-1492 - Interface AggregationExpression in package org.springframework.data.mongodb.core.aggregation should be public. -* DATAMONGO-1486 - Changes to MappingMongoConverter Result in Class Cast Exception. -* DATAMONGO-1485 - Querydsl MongodbSerializer does not take registered converters for Enums into account. -* DATAMONGO-1479 - MappingMongoConverter.convertToMongoType causes StackOverflowError for parameterized map value types. -* DATAMONGO-1471 - MappingMongoConverter attempts to set null value on potentially primitive identifier. -* DATAMONGO-1465 - String arguments passed to DefaultScriptOperations.execute() appear quoted in script. -* DATAMONGO-1463 - Upgrade to MongoDB Java driver 2.14.3. -* DATAMONGO-1453 - Parse error into GeoJsonPoint if coordinates are "integers". -* DATAMONGO-1450 - Release 1.9.3 (Hopper SR3). -* DATAMONGO-1406 - Query mapper does not use @Field field name when querying nested fields in combination with nested keywords. - - -Changes in version 1.10.0.M1 (2016-07-27) ------------------------------------------ -* DATAMONGO-1464 - Pagination - Optimize out the count query for paging. -* DATAMONGO-1463 - Upgrade to MongoDB Java driver 2.14.3. -* DATAMONGO-1462 - Integrate version badge from spring.io. -* DATAMONGO-1460 - User placeholder property for JSR-303 API. -* DATAMONGO-1459 - Add support for any-match mode in query-by-example. -* DATAMONGO-1457 - Add support for $slice in projection stage of aggregation. -* DATAMONGO-1456 - Add support for $diacriticInsensitivity to text search. -* DATAMONGO-1455 - Add support for $caseSensitive to text search. -* DATAMONGO-1453 - Parse error into GeoJsonPoint if coordinates are "integers". -* DATAMONGO-1449 - Replace legacy for loop with foreach in MappingMongoConverter. -* DATAMONGO-1437 - DefaultDbRefResolver swallows cause of non DataAccessException translatable Exception. -* DATAMONGO-1431 - Add overload of MongoOperations.stream(…) to take an explicit collection name. -* DATAMONGO-1425 - NOT_CONTAINS keyword issues CONTAINS query. -* DATAMONGO-1424 - Add support for "notLike" keyword in derived queries. -* DATAMONGO-1423 - Nested document update doesn't apply converters on embedded maps. -* DATAMONGO-1420 - Update Spring Data MongoDB version in Github readme. -* DATAMONGO-1419 - Remove deprecations in AbstractMongoEventListener. -* DATAMONGO-1418 - Add support for $out operand for Aggregation. -* DATAMONGO-1416 - Standard bootstrap issues warning in converter registration. -* DATAMONGO-1412 - Document mapping rules for Java types to MongoDB representation. -* DATAMONGO-1411 - Enable MongoDB build on TravisCI. -* DATAMONGO-1409 - Release 1.10 M1 (Ingalls). -* DATAMONGO-1404 - Add support of $max and $min update operations. -* DATAMONGO-1403 - Add maxExecutionTimeMs alias for @Meta(maxExcecutionTime). -* DATAMONGO-1399 - Allow adding hole to GeoJson Polygon. -* DATAMONGO-1394 - References not handled correctly when using QueryDSL. -* DATAMONGO-1391 - Support Mongo 3.2 syntax for $unwind in aggregation. -* DATAMONGO-1271 - Provide read lifecycle events when loading DBRefs. -* DATAMONGO-1194 - Improve DBRef resolution for collections. -* DATAMONGO-832 - Add support for $slice in Update.push. - - -Changes in version 1.9.2.RELEASE (2016-06-15) ---------------------------------------------- -* DATAMONGO-1449 - Replace legacy for loop with foreach in MappingMongoConverter. -* DATAMONGO-1437 - DefaultDbRefResolver swallows cause of non DataAccessException translatable Exception. -* DATAMONGO-1425 - NOT_CONTAINS keyword issues CONTAINS query. -* DATAMONGO-1423 - Nested document update doesn't apply converters on embedded maps. -* DATAMONGO-1416 - Standard bootstrap issues warning in converter registration. -* DATAMONGO-1412 - Document mapping rules for Java types to MongoDB representation. -* DATAMONGO-1411 - Enable MongoDB build on TravisCI. -* DATAMONGO-1410 - Release 1.9.2 (Hopper SR2). - - -Changes in version 1.9.1.RELEASE (2016-04-06) ---------------------------------------------- -* DATAMONGO-1408 - Release 1.9.1 (Hopper SR1). - - -Changes in version 1.9.0.RELEASE (2016-04-06) ---------------------------------------------- -* DATAMONGO-1407 - Add pull request template. -* DATAMONGO-1405 - Release 1.9 GA (Hopper). -* DATAMONGO-1401 - GeoJsonPoint error on update. -* DATAMONGO-1398 - Update documentation for Spring Data MongoDB 1.9. -* DATAMONGO-1396 - Exception when creating geo within Criteria using Aggregation. - - -Changes in version 1.9.0.RC1 (2016-03-18) ------------------------------------------ -* DATAMONGO-1400 - Adapt to rename of Spring Data Commons' Tuple to Pair. -* DATAMONGO-1397 - MongoTemplate.geoNear() do not log the Query. -* DATAMONGO-1392 - Release 1.9 RC1 (Hopper). -* DATAMONGO-1389 - Adapt test case to changes made for improved type prediction infrastructure. -* DATAMONGO-1387 - BasicQuery.fields().include() doesn't stick, even though Query.fields().include() does. -* DATAMONGO-1373 - Problem with custom annotations with AliasFor annotated attributes. -* DATAMONGO-1326 - Add support for $lookup to aggregation. -* DATAMONGO-1245 - Add support for Query-By-Example. - - -Changes in version 1.8.4.RELEASE (2016-02-23) ---------------------------------------------- -* DATAMONGO-1381 - Release 1.8.4 (Gosling SR4). -* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister. -* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort). -* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories. -* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE. -* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc. -* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results. -* DATAMONGO-1360 - Cannot query with JSR310. -* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean. - - -Changes in version 1.9.0.M1 (2016-02-12) ----------------------------------------- -* DATAMONGO-1380 - Improve logging in MongoChangeSetPersister. -* DATAMONGO-1378 - Update reference documentation: Change Query.sort() to Query.with(Sort sort). -* DATAMONGO-1377 - Update JavaDoc: Use @EnableMongoRepositories instead of @EnableJpaRepositories. -* DATAMONGO-1376 - Move away from SimpleTypeInformationMapper.INSTANCE. -* DATAMONGO-1375 - Fix typo in MongoOperations JavaDoc. -* DATAMONGO-1372 - Add converter for Currency. -* DATAMONGO-1371 - Add code of conduct. -* DATAMONGO-1366 - Release 1.9 M1 (Hopper). -* DATAMONGO-1361 - geoNear() queries fail when the accompanying query returns no results. -* DATAMONGO-1360 - Cannot query with JSR310. -* DATAMONGO-1349 - Upgrade to mongo-java-driver 2.14.0. -* DATAMONGO-1346 - Cannot add two pullAll to an Update. -* DATAMONGO-1345 - Add support for projections on repository query methods. -* DATAMONGO-1342 - Potential NullPointerException in MongoQueryCreator.nextAsArray(…). -* DATAMONGO-1341 - Remove package cycle between core and core.index. -* DATAMONGO-1337 - General code quality improvements. -* DATAMONGO-1335 - DBObjectAccessor doesn't write properties correctly if multiple ones are nested. -* DATAMONGO-1334 - MapResultOptions limit not implemented. -* DATAMONGO-1324 - StringToObjectIdConverter not properly registered causing drop in performance on identifier conversion. -* DATAMONGO-1317 - Assert compatibility with MongoDB Java driver 3.2. -* DATAMONGO-1314 - Fix typo in Exception message. -* DATAMONGO-1312 - Cannot convert generic sub-document fields. -* DATAMONGO-1303 - Add build profile for MongoDB 3.1 driver. -* DATAMONGO-1302 - CustomConversions should allow registration of ConverterFactory. -* DATAMONGO-1297 - Unique Index on DBRef. -* DATAMONGO-1293 - MongoDbFactoryParser should allow id attribute in addition to client-uri. -* DATAMONGO-1291 - Allow @Document to be used as meta-annotation. -* DATAMONGO-1290 - @Query annotation with byte[] parameter does not work. -* DATAMONGO-1289 - NullPointerException when saving an object with no "id" field or @Id annotation. -* DATAMONGO-1288 - Update.inc(String, Number) method fails to work with AtomicInteger. -* DATAMONGO-1287 - MappingMongoConverter eagerly fetches and converts lazy DbRef to change them afterwards by proxies. -* DATAMONGO-1276 - MongoTemplate.CloseableIterableCursorAdapter does not null check return values from PersistenceExceptionTranslator. -* DATAMONGO-1270 - Update documentation to reflect deprecation of MongoFactoryBean. -* DATAMONGO-1238 - Support for Querydsl 4. -* DATAMONGO-1204 - ObjectPath equality check breaks due to changes MongoDB V3. -* DATAMONGO-1163 - Allow @Indexed to be used as meta-annotation. -* DATAMONGO-934 - Add support for the bulk operations introduced in MongoDB 2.6. - - -Changes in version 1.8.2.RELEASE (2015-12-18) ---------------------------------------------- -* DATAMONGO-1355 - Release 1.8.2 (Gosling). -* DATAMONGO-1346 - Cannot add two pullAll to an Update. -* DATAMONGO-1342 - Potential NullPointerException in MongoQueryCreator.nextAsArray(…). -* DATAMONGO-1337 - General code quality improvements. -* DATAMONGO-1335 - DBObjectAccessor doesn't write properties correctly if multiple ones are nested. -* DATAMONGO-1334 - MapResultOptions limit not implemented. -* DATAMONGO-1324 - StringToObjectIdConverter not properly registered causing drop in performance on identifier conversion. -* DATAMONGO-1317 - Assert compatibility with MongoDB Java driver 3.2. -* DATAMONGO-1290 - @Query annotation with byte[] parameter does not work. -* DATAMONGO-1289 - NullPointerException when saving an object with no "id" field or @Id annotation. -* DATAMONGO-1287 - MappingMongoConverter eagerly fetches and converts lazy DbRef to change them afterwards by proxies. -* DATAMONGO-1204 - ObjectPath equality check breaks due to changes MongoDB V3. - - -Changes in version 1.8.1.RELEASE (2015-11-15) ---------------------------------------------- -* DATAMONGO-1316 - Release 1.8.1 (Gosling). -* DATAMONGO-1312 - Cannot convert generic sub-document fields. -* DATAMONGO-1302 - CustomConversions should allow registration of ConverterFactory. -* DATAMONGO-1297 - Unique Index on DBRef. -* DATAMONGO-1293 - MongoDbFactoryParser should allow id attribute in addition to client-uri. -* DATAMONGO-1276 - MongoTemplate.CloseableIterableCursorAdapter does not null check return values from PersistenceExceptionTranslator. - - -Changes in version 1.6.4.RELEASE (2015-10-14) ---------------------------------------------- -* DATAMONGO-1304 - Release 1.6.4 (Evans). - - -Changes in version 1.8.0.RELEASE (2015-09-01) ---------------------------------------------- -* DATAMONGO-1282 - Release 1.8 GA (Gosling). -* DATAMONGO-1280 - Add what's new section to refrence documentation. -* DATAMONGO-1275 - Reference documentation should mention support for optimistic locking. -* DATAMONGO-1269 - QueryMapper drops numeric keys in Maps. -* DATAMONGO-1256 - Provide a collectionName in MongoMappingEvents. - - -Changes in version 1.8.0.RC1 (2015-08-04) ------------------------------------------ -* DATAMONGO-1268 - Release 1.8 RC1 (Gosling). -* DATAMONGO-1266 - Repository query methods returning a primitive do not detect domain type correctly. -* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory. -* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma. -* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name. -* DATAMONGO-1251 - update / findAndModify throws NullPointerException. -* DATAMONGO-1250 - Custom converter implementation not used in updates. -* DATAMONGO-1244 - StringBasedMongoQuery handles complex expression parameters incorrectly. -* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile. -* DATAMONGO-1236 - MongoOperations findAndModify and updateFirst do not include the _class in Map values. -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types. -* DATAMONGO-1125 - Specify collection that triggers CommandFailureException. - - -Changes in version 1.7.2.RELEASE (2015-07-28) ---------------------------------------------- -* DATAMONGO-1261 - Release 1.7.2 (Fowler). -* DATAMONGO-1260 - Prevent accidental authentication misconfiguration on SimpleMongoDbFactory. -* DATAMONGO-1257 - mongo:mongo-client namespace does not support usernames with a comma. -* DATAMONGO-1254 - Group after Project in aggregation uses incorrect field name. -* DATAMONGO-1251 - update / findAndModify throws NullPointerException. -* DATAMONGO-1250 - Custom converter implementation not used in updates. - - -Changes in version 1.5.6.RELEASE (2015-07-01) ---------------------------------------------- -* DATAMONGO-1246 - Release 1.5.6 (Dijkstra). -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0. - - -Changes in version 1.6.3.RELEASE (2015-07-01) ---------------------------------------------- -* DATAMONGO-1247 - Release 1.6.3 (Evans). -* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile. -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation. -* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release. -* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types. -* DATAMONGO-1155 - Upgrade mongo-next build profiles to Java driver version 2.13.0. -* DATAMONGO-1153 - Fix documentation build. -* DATAMONGO-1133 - Field aliasing is not honored in Aggregation operations. -* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO. -* DATAMONGO-1081 - Improve documentation on field mapping semantics. - - -Changes in version 1.7.1.RELEASE (2015-06-30) ---------------------------------------------- -* DATAMONGO-1248 - Release 1.7.1 (Fowler). -* DATAMONGO-1242 - Update mongo-java-driver to 3.0.2 in mongo3 profile. -* DATAMONGO-1234 - Fix typos in JavaDoc. -* DATAMONGO-1232 - IgnoreCase should escape queries. -* DATAMONGO-1229 - MongoQueryCreator incorrectly rejects ignoreCase on nested String path. -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1. -* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation. -* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality. -* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1202 - Indexed annotation problems under generics. -* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release. -* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver. -* DATAMONGO-1166 - ReadPreference not used for Aggregations. -* DATAMONGO-1157 - Throw meaningful exception when @DbRef is used with unsupported types. - - -Changes in version 1.8.0.M1 (2015-06-02) ----------------------------------------- -* DATAMONGO-1228 - Release 1.8 M1 (Gosling). -* DATAMONGO-1224 - Assert Spring Framework 4.2 compatibility. -* DATAMONGO-1221 - Remove relative reference to parent POM to make sure the right Spring version is picked up. -* DATAMONGO-1218 - Deprecate non-MongoClient related configuration options in XML namespace. -* DATAMONGO-1216 - Authentication mechanism PLAIN changes to SCRAM-SHA-1. -* DATAMONGO-1213 - Include new section on Spring Data and Spring Framework dependencies in reference documentation. -* DATAMONGO-1211 - Adapt API changes in Spring Data Commons to simplify custom repository base class registration. -* DATAMONGO-1210 - Inconsistent property order of _class type hint breaks document equality. -* DATAMONGO-1208 - MongoTemplate.stream(…) does not consider limit, order, sort etc. -* DATAMONGO-1207 - MongoTemplate#doInsertAll throws NullPointerException when passed Collection contains a null item. -* DATAMONGO-1202 - Indexed annotation problems under generics. -* DATAMONGO-1196 - Upgrade build profiles after MongoDB 3.0 Java driver release. -* DATAMONGO-1193 - Prevent unnecessary database lookups when resolving DBRefs on 2.x driver. -* DATAMONGO-1192 - Switch back to Spring 4.1's CollectionFactory. -* DATAMONGO-1134 - Add support for $geoIntersects. -* DATAMONGO-990 - Add support for SpEL expressions in @Query. - - -Changes in version 1.7.0.RELEASE (2015-03-23) ---------------------------------------------- -* DATAMONGO-1189 - Release 1.7 GA. -* DATAMONGO-1181 - Add Jackson Module for GeoJSON types. -* DATAMONGO-1180 - Incorrect exception message creation in PartTreeMongoQuery. -* DATAMONGO-1179 - Update reference documentation. -* DATAMONGO-1124 - Switch log level for cyclic reference index warnings from WARN to INFO. -* DATAMONGO-979 - Add support for $size expression in project and group aggregation pipeline. - - -Changes in version 1.7.0.RC1 (2015-03-05) ------------------------------------------ -* DATAMONGO-1173 - Release 1.7 RC1. -* DATAMONGO-1167 - Add 'findAll' method to QueryDslMongoRepository which accepts a querydsl Predicate and a Sort. -* DATAMONGO-1165 - Add support for Java 8 Stream as return type in repositories. -* DATAMONGO-1162 - Adapt test cases to semantic changes in Spring Data Commons AuditingHandler API. -* DATAMONGO-1158 - Assert compatibility with MongoDB 3.0. -* DATAMONGO-1154 - Upgrade to MongoDB Java driver 2.13.0. -* DATAMONGO-1153 - Fix documentation build. -* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR. -* DATAMONGO-1147 - Remove manual array copy. -* DATAMONGO-1146 - Add 'exists' method to QueryDslMongoRepository which accepts a querydsl Predicate. -* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5. -* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance. -* DATAMONGO-1136 - Use $geoWithin instead of $within for geo queries. -* DATAMONGO-1135 - Add support for $geometry to support GeoJSON queries. -* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation. -* DATAMONGO-1131 - Register converters for ThreeTen back port by default. -* DATAMONGO-1129 - Upgrade to latest MongoDB Java driver. -* DATAMONGO-1127 - Add support for geoNear queries with distance information. -* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly. -* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents. -* DATAMONGO-1121 - "Cycle found" false positive. -* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts. -* DATAMONGO-1118 - Custom converters not used for map keys. -* DATAMONGO-1110 - Add support for $minDistance to NearQuery. -* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support. -* DATAMONGO-1081 - Improve documentation on field mapping semantics. -* DATAMONGO-712 - Another round of potential performance improvements. -* DATAMONGO-479 - Support calling of MongoDB stored javascripts. - - -Changes in version 1.6.2.RELEASE (2015-01-28) ---------------------------------------------- -* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR. -* DATAMONGO-1147 - Remove manual array copy. -* DATAMONGO-1145 - Upgrade MongoDB Java driver to 2.12.5. -* DATAMONGO-1144 - Release 1.6.2. -* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance. -* DATAMONGO-1132 - The sample does not match the logical result in the MongoDB repositories section of the documentation. -* DATAMONGO-1127 - Add support for geoNear queries with distance information. -* DATAMONGO-1126 - Repository keyword query findByInId with pageable not returning correctly. -* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents. -* DATAMONGO-1121 - "Cycle found" false positive. -* DATAMONGO-1120 - Pageable queries timeout or return incorrect counts. -* DATAMONGO-1118 - Custom converters not used for map keys. -* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation. -* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate. -* DATAMONGO-1094 - Wrong reference to @DocumentField in error message. -* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods. -* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field... -* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL. -* DATAMONGO-1082 - Improve JavaDoc and reference documentation on alias usage in aggregation framework support. -* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure. -* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties. -* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible. -* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions. -* DATAMONGO-712 - Another round of potential performance improvements. - - -Changes in version 1.5.5.RELEASE (2015-01-27) ---------------------------------------------- -* DATAMONGO-1148 - Use EclipseLink provided JPA API JAR. -* DATAMONGO-1147 - Remove manual array copy. -* DATAMONGO-1143 - Release 1.5.5. -* DATAMONGO-1139 - MongoQueryCreator must not create $nearSphere query for neutral Distance. -* DATAMONGO-1123 - geoNear, does not return all matching elements, it returns only a max of 100 documents. -* DATAMONGO-1121 - "Cycle found" false positive. -* DATAMONGO-1118 - Custom converters not used for map keys. -* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate. -* DATAMONGO-1094 - Wrong reference to @DocumentField in error message. -* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field... -* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure. -* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties. -* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted. -* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria. -* DATAMONGO-1063 - IllegalStateException using any().in(). -* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests. -* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior. -* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1. -* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions. -* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name. -* DATAMONGO-1039 - Polish implementation for cleaning up after tests. -* DATAMONGO-712 - Another round of potential performance improvements. - - -Changes in version 1.7.0.M1 (2014-12-01) ----------------------------------------- -* DATAMONGO-1108 - BasicMongoPersistentEntity doesn't need to parse expression on every invocation. -* DATAMONGO-1106 - Release 1.7 M1. -* DATAMONGO-1105 - Add implementation for new QueryDslPredicateExecutor.findAll(OrderSpecifier... orders). -* DATAMONGO-1102 - Auto-register JSR-310 converters to support JDK 8 date/time types. -* DATAMONGO-1101 - Add support for $bit to Update. -* DATAMONGO-1100 - Adapt to new PersistentPropertyAccessor API. -* DATAMONGO-1097 - Add support for $mul to Update. -* DATAMONGO-1096 - RuntimeExceptions during debug query printing in MongoTemplate. -* DATAMONGO-1094 - Wrong reference to @DocumentField in error message. -* DATAMONGO-1093 - BasicQuery missing hashCode() and equals(…) methods. -* DATAMONGO-1092 - Ensure compatibility with MongoDB 2.8.0.rc0 and java driver 2.13.0-rc0. -* DATAMONGO-1087 - Incorrect warning for MongoPersistentEntityIndexResolver$CyclicPropertyReferenceException: Found cycle for field… -* DATAMONGO-1085 - Sort can not use the metamodel classes generated by QueryDSL. -* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results. -* DATAMONGO-1078 - @Query annotated repository query fails to map complex Id structure. -* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property. -* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs. -* DATAMONGO-1075 - Correctly evaluate CONTAINS keyword on collection properties. -* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted. -* DATAMONGO-1070 - Query annotation with $oid leads to a parse error. -* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria. -* DATAMONGO-1063 - IllegalStateException using any().in(). -* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests. -* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior. -* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element. -* DATAMONGO-1054 - Improve performance of saving entities by using insert(…) if possible. -* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String. -* DATAMONGO-1050 - SimpleMongoRepository.findById(id, class) don't return ids for nested documents. -* DATAMONGO-1049 - Reserved field name 'language' causes trouble. -* DATAMONGO-1043 - SpEL Expressions in @Document annotations are not re-evaluated for query executions. -* DATAMONGO-943 - Add support for $position to Update $push $each. - - -Changes in version 1.6.1.RELEASE (2014-10-30) ---------------------------------------------- -* DATAMONGO-1080 - AbstractMongoQuery must not eagerly post-process results. -* DATAMONGO-1079 - Release 1.6.1. -* DATAMONGO-1077 - Update removes positional operator $ in key when used on DBRef property. -* DATAMONGO-1076 - Finalizer hit db on lazy dbrefs. -* DATAMONGO-1072 - Query placeholders in keys no longer correctly substituted. -* DATAMONGO-1070 - Query annotation with $oid leads to a parse error. -* DATAMONGO-1068 - elemMatch of Class Criteria fails to build special cirteria. -* DATAMONGO-1063 - IllegalStateException using any().in(). -* DATAMONGO-1062 - Fix failing test in ServerAddressPropertyEditorUnitTests. -* DATAMONGO-1058 - Using @Field("foo") with @Dbref breaking behavior. -* DATAMONGO-1057 - AbstractMongoQuery.SlicedExecution#execute() skips every nth element. -* DATAMONGO-1053 - In 1.6, any field in a mapped object named "language" will fail to map if it is a type other than String. -* DATAMONGO-1049 - Reserved field name 'language' causes trouble. - - -Changes in version 1.6.0.RELEASE (2014-09-05) ---------------------------------------------- -* DATAMONGO-1046 - Release 1.6 GA. -* DATAMONGO-1045 - Make sure Spring Data MongoDB can build against Spring 4.1. -* DATAMONGO-1040 - deleteAll repository query don't use EntityMetadata collection name. -* DATAMONGO-1039 - Polish implementation for cleaning up after tests. -* DATAMONGO-1038 - Assert Mongo instances cleand up properly after test runs. -* DATAMONGO-1036 - Custom repository implementations are not picked up when using CDI. -* DATAMONGO-1034 - Improve error message when trying to convert incompatible types. -* DATAMONGO-1032 - Polish Asciidoctor documentation. -* DATAMONGO-1030 - Query methods retuning a single entity does not work with projecting types. -* DATAMONGO-1027 - Collection inherits complex index from embedded class/object. -* DATAMONGO-1025 - Duplicate index creation on embedded documents. - - -Changes in version 1.5.4.RELEASE (2014-08-27) ---------------------------------------------- -* DATAMONGO-1038 - Assert Mongo instances cleand up properly after test runs. -* DATAMONGO-1034 - Improve error message when trying to convert incompatible types. -* DATAMONGO-1033 - Release 1.5.4. -* DATAMONGO-1030 - Query methods retuning a single entity does not work with projecting types. -* DATAMONGO-1027 - Collection inherits complex index from embedded class/object. -* DATAMONGO-1025 - Duplicate index creation on embedded documents. -* DATAMONGO-1020 - LimitOperator should be a public class. -* DATAMONGO-1008 - IndexOperations fail, when "2dsphere" index is present. - - -Changes in version 1.6.0.RC1 (2014-08-13) ------------------------------------------ -* DATAMONGO-1024 - Upgrade to Java driver 2.12.3. -* DATAMONGO-1021 - Release 1.6 RC1. -* DATAMONGO-1020 - LimitOperator should be a public class. -* DATAMONGO-1019 - Correct examples in reference documentation. -* DATAMONGO-1017 - Add support for custom implementations in CDI repositories. -* DATAMONGO-1016 - Remove deprecations in geospatial area. -* DATAMONGO-1015 - Move to Asciidoctor for reference documentation. -* DATAMONGO-1012 - Proxies for lazy DBRefs with field access should have their id values resolved eagerly. -* DATAMONGO-1009 - Adapt to new multi-store configuration detection. -* DATAMONGO-1008 - IndexOperations fail, when "2dsphere" index is present. -* DATAMONGO-1005 - Improve cycle-detection for DbRef's. -* DATAMONGO-1002 - Update.toString(…) might throw exception. -* DATAMONGO-1001 - Can't save/update lazy load object. -* DATAMONGO-999 - Multiple Mongo Instances always have the same MongoOption Reference - MongoOptionsFactoryBean has a static instance of MongoOptions. -* DATAMONGO-996 - Pagination broken after introduction of the support for top/first. -* DATAMONGO-995 - Parameter binding in String-based query does not bind all parameters. -* DATAMONGO-993 - The system variables $$CURRENT and $$ROOT not handled correctly. -* DATAMONGO-992 - Entity can't be deserialized if @TypeAlias is used. -* DATAMONGO-991 - Adapt to deprecation removals in Spring Data Commons. -* DATAMONGO-989 - MatchOperation should accept CriteriaDefinition. -* DATAMONGO-987 - Problem with lazy loading in @DBRef when getting data using MongoTemplate. -* DATAMONGO-974 - synthetic field target's name is returned instead of the alias name. -* DATAMONGO-973 - Add support for deriving full text queries. -* DATAMONGO-957 - Add support for query modifiers. -* DATAMONGO-420 - Extra quotes being added to @Query values and fields. - - -Changes in version 1.5.2.RELEASE (2014-07-28) ---------------------------------------------- -* DATAMONGO-1007 - Release 1.5.2. -* DATAMONGO-1002 - Update.toString(…) might throw exception. -* DATAMONGO-1001 - Can't save/update lazy load object. -* DATAMONGO-999 - Multiple Mongo Instances always have the same MongoOption Reference - MongoOptionsFactoryBean has a static instance of MongoOptions. -* DATAMONGO-995 - Parameter binding in String-based query does not bind all parameters. -* DATAMONGO-992 - Entity can't be deserialized if @TypeAlias is used. -* DATAMONGO-989 - MatchOperation should accept CriteriaDefinition. -* DATAMONGO-987 - Problem with lazy loading in @DBRef when getting data using MongoTemplate. -* DATAMONGO-983 - Remove links to forum.spring.io. -* DATAMONGO-982 - Assure compatibility with upcoming MongoDB driver versions. -* DATAMONGO-978 - deleteBy/removeBy repository methods don't set type information in Before/AfterDeleteEvent. -* DATAMONGO-972 - References are not handled properly in Querydsl integration. -* DATAMONGO-969 - String @id field is not mapped to ObjectId when using QueryDSL .id.in(Collection). -* DATAMONGO-420 - Extra quotes being added to @Query values and fields. - - -Changes in version 1.6.0.M1 (2014-07-10) ----------------------------------------- -* DATAMONGO-983 - Remove links to forum.spring.io. -* DATAMONGO-982 - Assure compatibility with upcoming MongoDB driver versions. -* DATAMONGO-981 - Release 1.6 M1. -* DATAMONGO-980 - Use meta annotations from spring data commons for @Score. -* DATAMONGO-978 - deleteBy/removeBy repository methods don't set type information in Before/AfterDeleteEvent. -* DATAMONGO-977 - Adapt to Spring 4 upgrade. -* DATAMONGO-976 - Add support for reading $meta projection on textScore into document. -* DATAMONGO-975 - Add support for date/time operators in aggregation framework. -* DATAMONGO-973 - Add support for deriving full text queries. -* DATAMONGO-972 - References are not handled properly in Querydsl integration. -* DATAMONGO-970 - Id query cannot be created if object to remove is DBObject. -* DATAMONGO-969 - String @id field is not mapped to ObjectId when using QueryDSL .id.in(Collection). -* DATAMONGO-968 - Add support for $meta projections and sorting for textScore metadata. -* DATAMONGO-963 - Compound index with expireAfterSeconds causes repeating error on mongodb server. -* DATAMONGO-962 - “Cycle found” with Spring Data Mongo 1.5. -* DATAMONGO-960 - Allow to pass options to the Aggregation Pipeline. -* DATAMONGO-958 - Move to FieldNamingStrategy SPI in Spring Data Commons. -* DATAMONGO-954 - Add support for System Variables in Aggregations. -* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString. -* DATAMONGO-952 - @Query annotation does not work with only field restrictions. -* DATAMONGO-950 - Add support for limiting the query result in the query derivation mechanism. -* DATAMONGO-949 - CyclicPropertyReferenceException in versions 1.5.0 + for MongoDB. -* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject. -* DATAMONGO-944 - Add support $currentDate to Update. -* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce. -* DATAMONGO-937 - Add support for creating text index. -* DATAMONGO-850 - Add support for text search using $text. -* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0. - - -Changes in version 1.4.3.RELEASE (2014-06-18) ---------------------------------------------- -* DATAMONGO-955 - Release 1.4.3. -* DATAMONGO-953 - Update object should have a proper equals/hashcode/toString. -* DATAMONGO-952 - @Query annotation does not work with only field restrictions. -* DATAMONGO-948 - Assertion error in MongoTemplate.getMappedSortObject. -* DATAMONGO-938 - Exception when creating geo within Criteria using MapReduce. -* DATAMONGO-924 - Aggregation not working with as() method in project() pipeline operator. -* DATAMONGO-920 - Fix debug messages for delete events in AbstractMongoEventListener. -* DATAMONGO-917 - DefaultDbRefResolver throws NPE when bundled into an uberjar. -* DATAMONGO-914 - Improve resolving of LazyLoading proxies for classes that override equals/hashcode. -* DATAMONGO-913 - Can't query using lazy DBRef objects. -* DATAMONGO-912 - Aggregation#project followed by Aggregation#match with custom converter causes IllegalArgumentException. -* DATAMONGO-898 - MapReduce seems not to work when javascript not being escaped. -* DATAMONGO-847 - Allow usage of Criteria within Update. -* DATAMONGO-745 - @Query($in) and Pageable in result Page total = 0. -* DATAMONGO-647 - Using "OrderBy" in "query by method name" ignores the @Field annotation for field alias. - - -Changes in version 1.5.0.RELEASE (2014-05-20) ---------------------------------------------- -* DATAMONGO-936 - Release 1.5 GA. -* DATAMONGO-929 - Index key should be the properties dot path when creating index using @Indexed / @CompoundIndex. -* DATAMONGO-928 - Error when using field-naming-strategy-ref. -* DATAMONGO-926 - Stack Overflow Error with 1.5.0.RC1 Release. -* DATAMONGO-925 - MappingMongoConverterParser is incorrectly rejecting field-naming-strategy-ref XML configuration. -* DATAMONGO-647 - Using "OrderBy" in "query by method name" ignores the @Field annotation for field alias. -* DATAMONGO-367 - @Indexed field in embedded Object creates new collection. - - -Changes in version 1.5.0.RC1 (2014-05-02) ------------------------------------------ -* DATAMONGO-924 - Aggregation not working with as() method in project() pipeline operator. -* DATAMONGO-921 - Upgrade to MongoDB Java driver 2.12.1. -* DATAMONGO-920 - Fix debug messages for delete events in AbstractMongoEventListener. -* DATAMONGO-919 - Release 1.5 RC1. -* DATAMONGO-917 - DefaultDbRefResolver throws NPE when bundled into an uberjar. -* DATAMONGO-914 - Improve resolving of LazyLoading proxies for classes that override equals/hashcode. -* DATAMONGO-913 - Can't query using lazy DBRef objects. -* DATAMONGO-912 - Aggregation#project followed by Aggregation#match with custom converter causes IllegalArgumentException. -* DATAMONGO-910 - Upgrade to latest MongoDB Java driver (2.12). -* DATAMONGO-909 - @CompoundIndex on inherited entity classes. -* DATAMONGO-908 - Nested field references in group operations broken. -* DATAMONGO-907 - Assert compatibility with mongodb 2.6.0. -* DATAMONGO-905 - Remove obsolete CGLib dependency from cross store module. -* DATAMONGO-901 - MongoRepositoryConfigurationExtension fails to invoke super method. -* DATAMONGO-899 - Overhaul automatic index creation. -* DATAMONGO-898 - MapReduce seems not to work when javascript not being escaped. -* DATAMONGO-897 - FindAndUpdate broken when using @DbRef and interface as target. -* DATAMONGO-896 - Assert compatibility with latest MongoDB Java driver. -* DATAMONGO-895 - Use most specific type for checks against values in DBObjects. -* DATAMONGO-893 - Mapping Convertor does not remove "_class" property on collection of embedded objects. -* DATAMONGO-892 - can't be configured as nested bean definition. -* DATAMONGO-888 - Mapping is not applied to SortObject during queries. -* DATAMONGO-866 - Add new field naming strategy and make it configurable through XML/Java config. -* DATAMONGO-847 - Allow usage of Criteria within Update. -* DATAMONGO-827 - @Indexed and @CompundIndex cannot be created without giving index name. - - -Changes in version 1.4.2.RELEASE (2014-04-15) ---------------------------------------------- -** Fix - * [DATAMONGO-880] - Improved handling of persistence of lazy-loaded DBRefs. - * [DATAMONGO-884] - Improved handling for Object methods in LazyLoadingInterceptor. - * [DATAMONGO-887] - Added unit tests to verify TreeMaps can be converted. - * [DATAMONGO-888] - Sorting now considers mapping information. - * [DATAMONGO-890] - Fixed Point.toString(). - * [DATAMONGO-892] - Reject nested MappingMongoConverter declarations in XML. - * [DATAMONGO-893] - Converter must not write "_class" information for know types. - * [DATAMONGO-897] - Fixed potential NullPointerException in QueryMapper. - * [DATAMONGO-908] - Support for nested field references in group operations. - -** Improvement - * [DATAMONGO-881] - Allow custom conversions to override default conversions. - -** Task - * [DATAMONGO-895] - Use most specific type for checks against values in DBObjects. - * [DATAMONGO-896] - Assert compatibility with latest MongoDB Java driver. - * [DATAMONGO-905] - Removed obsolete dependency to CGLib from cross-store support. - * [DATAMONGO-907] - Assert compatibility with mongodb 2.6. - * [DATAMONGO-911] - Release 1.4.2 - -Changes in version 1.5.0.M1 (2014-03-31) ----------------------------------------- -** Fix - * [DATAMONGO-471] - Update operation $addToSet does not support adding a list with $each. - * [DATAMONGO-773] - Spring Data MongoDB projection search on @DBref fields. - * [DATAMONGO-821] - MappingException for $size queries on subcollections containing dbrefs. - * [DATAMONGO-829] - NearQuery, when used in conjunction with a Query, it sets num=0, unless Query specifies otherwise. - * [DATAMONGO-833] - EnumSet is not handled correctly. - * [DATAMONGO-843] - Unable to use @EnableMongoAuditing annotation in Java config. - * [DATAMONGO-862] - Update Array Field Using Positional Operator ($) Does Not Work. - * [DATAMONGO-863] - QueryMapper.getMappedValue Fails To Handle Arrays Mapped To $in. - * [DATAMONGO-868] - findAndModify method does not increment @Version field. - * [DATAMONGO-871] - Declarative query method with array return type causes NPE. - * [DATAMONGO-877] - AbstractMongoConfiguration.getMappingBasePackage() throws NullPointerException if config class resides in default package. - * [DATAMONGO-880] - Error when trying to persist an object containing a DBRef which was lazy loaded. - * [DATAMONGO-884] - Potential NullPointerException for lazy DBRefs. - * [DATAMONGO-887] - Repository not instantiated when entity contains field of type TreeMap. - * [DATAMONGO-890] - Point class toString method is confusing. - -** Improvement - * [DATAMONGO-809] - Make filename optional in GridFsOperations doc and GridFsTemplate implementation. - * [DATAMONGO-858] - Add support for common geospatial structures. - * [DATAMONGO-865] - Adjust test dependencies to avoid ClassNotFoundException during test runs. - * [DATAMONGO-881] - Cannot override default converters in CustomConversions. - * [DATAMONGO-882] - Adapt to removal of obsolete generics in BeanWrapper. - -** New Feature - * [DATAMONGO-566] - Provide support for removeBy… / deleteBy… methods like for findBy… on repository interfaces. - * [DATAMONGO-870] - Add support for sliced query method execution. - -** Task - * [DATAMONGO-876] - Adapt to changes introduced for property access configuration. - * [DATAMONGO-883] - Update auditing configuration to enable auditing annotations on accessors. - * [DATAMONGO-859] - Release 1.5 M1. - -Changes in version 1.4.1.RELEASE (2014-03-13) ---------------------------------------------- -** Fix - * [DATAMONGO-773] - Verify that @DBRef fields can be included in query. - * [DATAMONGO-821] - Fixed handling of keyword expressions for DBRefs. - * [DATAMONGO-829] - NearQuery should not default 'num' to zero. - * [DATAMONGO-833] - Support for EnumSet and EnumMap in MappingMongoConverter. - * [DATAMONGO-843] - Back-port of defaulting of the MappingContext for auditing. - * [DATAMONGO-862] - Fixed handling of unmapped paths for updates. - * [DATAMONGO-863] - UpdateMapper doesn't convert raw DBObjects anymore. - * [DATAMONGO-868] - MongoTemplate.findAndModify(…) increases version if not handled manually. - * [DATAMONGO-871] - Add support for arrays as query method return types. - * [DATAMONGO-877] - Added guard against null-package in AbstractMappingConfiguration. - -** Improvement - * [DATAMONGO-865] - Adjust test dependencies to avoid ClassNotFoundException during test runs. - -Changes in version 1.3.5.RELEASE (2014-03-10) ---------------------------------------------- -** Fix - * [DATAMONGO-829] - NearQuery, when used in conjunction with a Query, no longer sets num=0, unless Query specifies otherwise. - * [DATAMONGO-871] - Repository queries support array return type. - -** Improvement - * [DATAMONGO-865] - Avoid ClassNotFoundException during test runs. - -Changes in version 1.4.0.RELEASE (2014-02-24) ---------------------------------------------- - -** Fix - * [DATAMONGO-354] - MongoTemplate should support multiple $pushAll in one update. - * [DATAMONGO-404] - Removing a DBRef using pull does not work. - * [DATAMONGO-410] - Update with pushAll should recognize defined Converter. - * [DATAMONGO-812] - $pushAll is deprecated since mongodb 2.4 move to $push $each. - * [DATAMONGO-830] - Fix NPE during cache warmup in CustomConversions. - * [DATAMONGO-838] - Support for refering to expression based field in group operation. - * [DATAMONGO-840] - Support for nested MongoDB field references in SpEL expressions within Projections. - * [DATAMONGO-842] - Fix documentation error in GRIDFS section. - * [DATAMONGO-852] - Increase version for update should traverse DBObject correctly in order to find version property. - -** Improvement - * [DATAMONGO-468] - Simplification for updates of DBRef fields with mongoTemplate. - * [DATAMONGO-849] - Documentation on github should not reference invalid class. - -** Task - * [DATAMONGO-848] - Ensure compatibility with Mongo Java driver 2.12. - * [DATAMONGO-853] - Update no longer allows null keys. - * [DATAMONGO-856] - Update documentation. - -Changes in version 1.3.4.RELEASE (2014-02-17) ---------------------------------------------- -** Bug - * [DATAMONGO-407] - Collection with generics losing element type after $set update - * [DATAMONGO-410] - Update with pushAll doesnt recognize defined Converter - * [DATAMONGO-686] - ClassCastException while reusing Query object - * [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException - * [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException - * [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly - * [DATAMONGO-811] - updateFirst methods do not increment @Version field - * [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums. - * [DATAMONGO-828] - UpdateFirst throws OptimisticLockingFailureException when updating document that does not exist - * [DATAMONGO-830] - NPE during cache warmup in CustomConversions - * [DATAMONGO-842] - Documentation error in GRIDFS section -** Improvement - * [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file -** Task - * [DATAMONGO-824] - Add contribution guidelines - * [DATAMONGO-846] - Release 1.3.4 - -Changes in version 1.4.0.RC1 (2014-01-29) ---------------------------------------------- - -** Bug - * [DATAMONGO-407] - Collection with generics losing element type after $set update - * [DATAMONGO-686] - ClassCastException while reusing Query object - * [DATAMONGO-726] - References to non existing classes in namespace XSD - * [DATAMONGO-804] - EnableMongoRepositories repositoryImplementationPostfix() default is empty String instead of "Impl" - * [DATAMONGO-805] - Excluding DBRef field in a query causes a MappingException - * [DATAMONGO-806] - Spring Data MongoDB - Aggregation Framework - No property _id found for type com.entity.User - * [DATAMONGO-807] - using findAndModify removes the _class field of encapsulated classes, causing MappingInstantiationException - * [DATAMONGO-808] - Spring data mongoDB not working with IPv6 address directly - * [DATAMONGO-811] - updateFirst methods do not increment @Version field - * [DATAMONGO-816] - Unable to execute query with DocumentCallbackHandler when query contains Criteria with enums. -** Improvement - * [DATAMONGO-778] - Create geospatial index of type other than 2d with @GeoSpatialIndexed - * [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types - * [DATAMONGO-787] - Guard against SpEL issue in Spring 3.2.4 - * [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x - * [DATAMONGO-802] - Change AbstractMongoConfiguration.mongoDbFactory() to return MongoDbFactory - * [DATAMONGO-813] - GridFsTemplate.getResource(location) throws NPE if don't find file - * [DATAMONGO-822] - Add support for eager CDI repository instantiation - * [DATAMONGO-823] - Add bucket attribute to - * [DATAMONGO-837] - Upgrade MongoDB Java driver to 2.11.4 -** Task - * [DATAMONGO-790] - Ensure compatibility with Spring Framework 4.0 - * [DATAMONGO-824] - Add contribution guidelines - * [DATAMONGO-826] - Release Spring Data MongoDB 1.4.0.RC1 - * [DATAMONGO-835] - Code cleanups - -Changes in version 1.3.3.RELEASE (2013-12-11) ---------------------------------------------- -** Bug - * [DATAMONGO-726] - Fixed classname references in namespace XSDs. - * [DATAMONGO-788] - Projection operations do not render synthetic fields properly. - * [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour - * [DATAMONGO-804] - Fix default annotation attribute value for repositoryImplementationPostfix(). - * [DATAMONGO-806] - Fixed invalid rendering of id field references. - * [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor - -** Improvement - * [DATAMONGO-791] - Added newAggregation(…) overloads to accept a List. - * [DATAMONGO-799] - Fix failing test in MongoTemplateTests on Mongo 2.5.x - * [DATAMONGO-800] - Improved AuditingIntegrationTests. -** Task - * [DATAMONGO-810] - Release 1.3.3 - -Changes in version 1.4.0.M1 (2013-11-19) ---------------------------------------------- -** Bug - * [DATAMONGO-534] - The GridFs query execution does not return sorted resources, when the sorting fields are defined in the query definition - * [DATAMONGO-630] - Add support of $setOnInsert modifier for upsert - * [DATAMONGO-746] - IndexInfo cannot be read for indices created via mongo shell - * [DATAMONGO-752] - QueryMapper prevents searching for values that start with a $ [dollarsign] - * [DATAMONGO-753] - Add support for nested field references in group operations - * [DATAMONGO-758] - Reject excludes other than _id in projection operations - * [DATAMONGO-759] - Render group operation without non synthetic fields correctly. - * [DATAMONGO-761] - ClassCastException in SpringDataMongodbSerializer.getKeyForPath - * [DATAMONGO-768] - Improve documentation of how to use @PersistenceConstructor - * [DATAMONGO-788] - Projection operations do not render synthetic fields properly. - * [DATAMONGO-789] - Support login via different (e.g. admin) authentication database - * [DATAMONGO-795] - When adding custom converters to the mongo template it is possible to get unpredictable behaviour -** Improvement - * [DATAMONGO-757] - Projections should follow mongodb conventions more precisely. - * [DATAMONGO-764] - Add support for SSL connections to Mongo - * [DATAMONGO-766] - Allow nested field references on properties through e.g. @Field("a.b") - * [DATAMONGO-769] - Support arithmetic operators for properties - * [DATAMONGO-770] - Repository - findByIgnoreCase doesnt work - * [DATAMONGO-771] - Saving raw JSON through MongoTemplate.insert(…) fails - * [DATAMONGO-774] - Support SpEL expressions to define arithmetical projection operations in the aggregation framework - * [DATAMONGO-776] - TypeBasedAggregationOperationContext should use MappingContext.getPersistentPropertyPath(String, Class) - * [DATAMONGO-780] - Add support for nested repositories - * [DATAMONGO-782] - Typo in reference documentation - * [DATAMONGO-785] - Add support for geospatial 2Dsphere and geohaystack index types - * [DATAMONGO-787] - Upgrade to Spring 3.2.4 - * [DATAMONGO-791] - make newAggregation() method to accept list - * [DATAMONGO-793] - Adapt to changes in Spring Data Commons triggered by repository initialization changes - * [DATAMONGO-800] - AuditingIntegrationTests fail on fast machines -** New Feature - * [DATAMONGO-348] - Lazy Load for DbRef - * [DATAMONGO-653] - Support for index operations in GridFsOperations - * [DATAMONGO-760] - Add support for custom findAll Queries - * [DATAMONGO-792] - Add support to configure Auditing via JavaConfig. -** Task - * [DATAMONGO-777] - Upgrade to Mongo Java Driver in 2.11 - -Changes in version 1.3.2.RELEASE (2013-10-25) ---------------------------------------------- -** Bug - * [DATAMONGO-746] IndexInfo cannot be read for indices created via mongo shell - * [DATAMONGO-752] QueryMapper prevents searching for values that start with a $ [dollarsign] - * [DATAMONGO-753] Add support for nested field references in group operations - * [DATAMONGO-758] Reject excludes other than _id in projection operations - * [DATAMONGO-759] Render group operation without non synthetic fields correctly. - * [DATAMONGO-761] ClassCastException in SpringDataMongodbSerializer.getKeyForPath - * [DATAMONGO-768] Improve documentation of how to use @PersistenceConstructor - -** Improvement - * [DATAMONGO-757] - Projections should follow mongodb conventions more precisely. - * [DATAMONGO-769] - Support arithmetic operators for properties - * [DATAMONGO-771] - Saving raw JSON through MongoTemplate.insert(…) fails -** Task - * [DATAMONGO-772] - Release 1.3.2 - -Changes in version 1.3.1.RELEASE (2013-09-09) ---------------------------------------------- -** Task - * [DATAMONGO-751] Upgraded to Spring Data Commons 1.6.1. - -Changes in version 1.3.0.RELEASE (2013-09-09) ---------------------------------------------- -** Bug - * [DATAMONGO-540] MongoTemplate upsert and findOne handle id queries differently. - * [DATAMONGO-445] GeoNear Query Doesn't Work with Pageable. - * [DATAMONGO-507] Criteria not() is not working. - * [DATAMONGO-602] Querying with $in operator on the id field of type BigInteger returns zero results. - -** Improvement - * [DATAMONGO-725] Improve configurability and documentation of TypeMapper on MappingMongoConverter. - * [DATAMONGO-738] Add methods to MongoTemplate and MongoOperations to allow calling class to pass both the entityClass and the collectionName for the update and upsert methods. - * [DATAMONGO-737] Extra MongoSynchronizations cause TransactionSynchronizationManager to throw IllegalStateException on transaction complete. - * [DATAMONGO-743] Support returning raw json from a query. - -** Task - * [DATAMONGO-742] Document CDI integration in reference documentation. - -Changes in version 1.3.0.RC1 (2013-08-05) ------------------------------------------ -** Bug - * [DATAMONGO-392] - Updating an object does not write type information for objects to be updated - * [DATAMONGO-685] - JMX ServerInfo bean may return wrong info - * [DATAMONGO-688] - There is no precedence between @Id annotation and field named "id" - both are attempted to be used - * [DATAMONGO-693] - MongoFactoryBean should create a mongo instance with host/port if replicaset is null or empty - * [DATAMONGO-702] - Spring Data MongoDB projection search, is not properly configured with respective Java Pojo - * [DATAMONGO-704] - Remove references to SimpleMongoConverter from JavaDoc. - * [DATAMONGO-705] - QueryMapper doesn't handles exists query with DBRef field - * [DATAMONGO-706] - QueryMapper does not transform DBRefs in nested keywords correctly - * [DATAMONGO-709] - Polymorphic query on documents in same collection - * [DATAMONGO-717] - Application context is not properly distributed to persistent entities - * [DATAMONGO-721] - Polymorphic attribute type not persisted on update operations - -** Improvement - * [DATAMONGO-701] - Improve performance of indexed starts-with queries - * [DATAMONGO-713] - Typos in readme.md - -** New Feature - * [DATAMONGO-544] - Support for TTL collection via Indexed annotation - * [DATAMONGO-586] - Add support for new Aggregation Framework - -** Task - * [DATAMONGO-714] - Add latest formatter to project sources - * [DATAMONGO-723] - Clean up test cases - * [DATAMONGO-728] - Add missing package-info.java files - * [DATAMONGO-731] - Adapt refactorings in Spring Data Commons - * [DATAMONGO-732] - Release 1.3 RC1 - - -Changes in version 1.2.3.GA (2013-07-24) ----------------------------------------- -** Task - * [DATAMONGO-728] - Add missing package-info.java files - * [DATAMONGO-729] - Release 1.2.3. - - -Changes in version 1.2.2.GA (2013-07-19) ----------------------------------------- -** Bug - * [DATAMONGO-663] - org.springframework.data.mongodb.core.query.Field needs an equals method - * [DATAMONGO-677] - QueryMapper does not handled correctly Map with DBRef value - * [DATAMONGO-679] - MongoTemplate.doSave(…) passed a JSON String doesn't save it. - * [DATAMONGO-683] - QueryMapper does not handle default _id when no MappingMetadata is present - * [DATAMONGO-685] - JMX ServerInfo bean may return wrong info - * [DATAMONGO-693] - MongoFactoryBean should create a mongo instance with host/port if replicaset is null or empty - * [DATAMONGO-704] - Remove references to SimpleMongoConverter from JavaDoc. - * [DATAMONGO-705] - QueryMapper doesn't handles exists query with DBRef field - * [DATAMONGO-706] - QueryMapper does not transform DBRefs in nested keywords correctly - * [DATAMONGO-717] - Application context is not properly distributed to persistent entities - -** Improvement - * [DATAMONGO-682] - Remove performance hotspots - * [DATAMONGO-701] - Improve performance of indexed starts-with queries - -** Task - * [DATAMONGO-658] - Minor formatting changes to README.md - * [DATAMONGO-678] - Performance improvements in CustomConversions - * [DATAMONGO-714] - Add latest formatter to project sources - * [DATAMONGO-723] - Clean up test cases - * [DATAMONGO-727] - Release 1.2.2 - - -Changes in version 1.3.0.M1 (2013-06-04) ----------------------------------------- -** Bug - * [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class - * [DATAMONGO-612] - Fix PDF reference documentation name - * [DATAMONGO-613] - Images missing from reference documentation - * [DATAMONGO-617] - NullPointerException in MongoTemplate.initializeVersionProperty(…) - * [DATAMONGO-620] - MongoTemplate.doSaveVersioned(…) does not consider collection handed into the method - * [DATAMONGO-621] - MongoTemplate.initializeVersionProperty(…) does not use ConversionService - * [DATAMONGO-622] - An unversioned object should be created using insert(…) instead of save. - * [DATAMONGO-629] - Different results when using count and find with the same criteria with 'id' field - * [DATAMONGO-638] - MappingContext should not create PersistentEntity instances for native maps - * [DATAMONGO-640] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-641] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-642] - MongoChangeSetPersister does not use mapped collection name - * [DATAMONGO-646] - Can't insert DBObjects through MongoTemplate - * [DATAMONGO-648] - ID attributes in namespace shouldn't be XSD IDs - * [DATAMONGO-663] - org.springframework.data.mongodb.core.query.Field needs an equals method - * [DATAMONGO-669] - Incompatibility with Querydsl 3.1.1 - * [DATAMONGO-676] - SimpleMongoRepository fails if used with customized collection name - * [DATAMONGO-677] - QueryMapper does not handled correctly Map with DBRef value - * [DATAMONGO-679] - MongoTemplate.doSave(…) passed a JSON String doesn't save it. - * [DATAMONGO-683] - QueryMapper does not handle default _id when no MappingMetadata is present - -** Improvement - * [DATAMONGO-140] - Add XML namespace element for MongoTemplate - * [DATAMONGO-545] - Add before delete and after delete events for AbstractMongoEventListener - * [DATAMONGO-554] - Add background attribute to @Indexed and @CompoundIndex - * [DATAMONGO-569] - AbstractMongoConfiguration cannot be used on CloudFoundry - * [DATAMONGO-594] - cross-store=> Define document name using annotation - * [DATAMONGO-631] - Explicitly prevent an Order instance set to ignore case from being piped into a query - * [DATAMONGO-632] - Polish namespace XSD to avoid errors in STS - * [DATAMONGO-633] - Upgrade to Querydsl 3.0.0 - * [DATAMONGO-634] - Inherit application scope from basic CDI bean of Spring Data Commons - * [DATAMONGO-635] - Fix some Sonar warnings - * [DATAMONGO-636] - Add support for countBy projections - * [DATAMONGO-637] - Typo in Query.query(…) - * [DATAMONGO-651] - WriteResult not available from thrown Exception - * [DATAMONGO-652] - Add support for elemMatch and positional operator projections - * [DATAMONGO-656] - Potential NullPointerException when debugging in MongoTemplate - * [DATAMONGO-657] - Allow to write Map value as DBRef - * [DATAMONGO-666] - Fix architecture inconsistency created by MongoDataIntegrityViolationException - * [DATAMONGO-680] - SimpleMongoRepository.exists(ID) improvement - * [DATAMONGO-681] - Expose MongoTemplate.exists() method - * [DATAMONGO-682] - Remove performance hotspots - -** New Feature - * [DATAMONGO-607] - Add an abbreviating field naming strategy - * [DATAMONGO-628] - Add XML namespace elements for MongoTemplate and GridFsTemplate - -** Task - * [DATAMONGO-597] - Website is severely out-of-date - * [DATAMONGO-658] - Minor formatting changes to README.md - * [DATAMONGO-667] - Remove deprecations and further deprecate sorting/ordering types - * [DATAMONGO-672] - Upgrade to latest Spring Data Build and Commons - * [DATAMONGO-678] - Performance improvements in CustomConversions - * [DATAMONGO-690] - Release 1.3 M1 - -Changes in version 1.2.1.GA (2013-04-17) ----------------------------------------- -** Bug - * [DATAMONGO-571] - Spring Data for MongoDb doesn't save null values when @Version is added to domain class - * [DATAMONGO-612] - Fix PDF reference documentation name - * [DATAMONGO-613] - Images missing from reference documentation - * [DATAMONGO-617] - NullPointerException in MongoTemplate.initializeVersionProperty(…) - * [DATAMONGO-620] - MongoTemplate.doSaveVersioned(…) does not consider collection handed into the method - * [DATAMONGO-621] - MongoTemplate.initializeVersionProperty(…) does not use ConversionService - * [DATAMONGO-622] - An unversioned object should be created using insert(…) instead of save. - * [DATAMONGO-629] - Different results when using count and find with the same criteria with 'id' field - * [DATAMONGO-638] - MappingContext should not create PersistentEntity instances for native maps - * [DATAMONGO-640] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-641] - MongoLog4jAppender suffers from potential NullPointerException when closing Mongo instance - * [DATAMONGO-642] - MongoChangeSetPersister does not use mapped collection name - * [DATAMONGO-646] - Can't insert DBObjects through MongoTemplate - * [DATAMONGO-648] - ID attributes in namespace shouldn't be XSD IDs - -** Improvement - * [DATAMONGO-594] - cross-store=> Define document name using annotation - * [DATAMONGO-632] - Polish namespace XSD to avoid errors in STS - * [DATAMONGO-635] - Fix some Sonar warnings - * [DATAMONGO-637] - Typo in Query.query(…) - * [DATAMONGO-651] - WriteResult not available from thrown Exception - * [DATAMONGO-656] - Potential NullPointerException when debugging in MongoTemplate - -** Task - * [DATAMONGO-597] - Website is severely out-of-date - * [DATAMONGO-654] - Release 1.2.1 - -Changes in version 1.2.0.GA (2013-02-08) ----------------------------------------- -** Bug - * [DATAMONGO-378] - MapReduceResults ClassCastException due to raw results counts as Long - * [DATAMONGO-568] - MongoTemplate.find(...) method causes Nullpointer if query parameter is null - * [DATAMONGO-570] - Query methods on @DBRef field with the qualifier isNull throws Exception - * [DATAMONGO-583] - Check if you are using for loop with a DBCursor - * [DATAMONGO-585] - Exception during authentication in multithreaded access - * [DATAMONGO-588] - MongoTemplate.insert does not initialize null versions to zero - * [DATAMONGO-592] - Persisting Objects containing Objects with PersistenceConstructor causes MappingInstantiationException - * [DATAMONGO-593] - Persisting objects containing primitive arrays with PersistenceConstructor causes MappingInstantiationException - * [DATAMONGO-600] - Issues with polymorphism of nested types - * [DATAMONGO-601] - CannotGetMongoDbConnectionException should not print password in logfile - * [DATAMONGO-603] - Results of geo queries in repository dont't get metrics of supplied distance applied - -** Improvement - * [DATAMONGO-503] - GridFsTemplate is not setting the file Content-Type - * [DATAMONGO-573] - Move to Logback for test logging - * [DATAMONGO-580] - Polish BeanDefinitionParsers to avoid warnings in STS - * [DATAMONGO-581] - Expose managed PersistentEntity in MongoRepositoryFactoryBean - * [DATAMONGO-606] - Register converter for JodaTime types if present on classpath - -** New Feature - * [DATAMONGO-577] - Add support for auditing - -** Task - * [DATAMONGO-81] - Create unit tests for exception translation in MongoTemplate - * [DATAMONGO-576] - Configure java.util.logging to reduce verbose test logging - * [DATAMONGO-590] - Clean up code in MongoTemplate - * [DATAMONGO-598] - Upgrade to new build infrastructure - * [DATAMONGO-609] - Release 1.2.0 - -Changes in version 1.1.2.GA (2013-02-08) ----------------------------------------- -** Bug - * [DATAMONGO-562] - Cannot create entity with OptimisticLocking (@Version) and initial id - * [DATAMONGO-568] - MongoTemplate.find(...) method causes Nullpointer if query parameter is null - * [DATAMONGO-570] - Query methods on @DBRef field with the qualifier isNull throws Exception - * [DATAMONGO-578] - pom version issues in 1.1.x branch - * [DATAMONGO-583] - Check if you are using for loop with a DBCursor - * [DATAMONGO-585] - Exception during authentication in multithreaded access - * [DATAMONGO-588] - MongoTemplate.insert does not initialize null versions to zero - * [DATAMONGO-600] - Issues with polymorphism of nested types - * [DATAMONGO-601] - CannotGetMongoDbConnectionException should not print password in logfile - -** Improvement - * [DATAMONGO-573] - Move to Logback for test logging - * [DATAMONGO-580] - Polish BeanDefinitionParsers to avoid warnings in STS - -** Task - * [DATAMONGO-81] - Create unit tests for exception translation in MongoTemplate - * [DATAMONGO-563] - Upgrade to MongoDB driver 2.9.2 as it fixes a serious regression introduced in 2.9.0 - * [DATAMONGO-576] - Configure java.util.logging to reduce verbose test logging - * [DATAMONGO-590] - Clean up code in MongoTemplate - * [DATAMONGO-608] - Release 1.1.2 - -Changes in version 1.1.1.GA (2012-10-17) ----------------------------------------- -** Bug - * [DATAMONGO-549] - MongoTemplate.save(…) suffers from potential NullPointException - * [DATAMONGO-550] - MongoTemplate.save(BasicDBObject, String) results in NPE (after upgrading to 1.1.0.RELEASE - * [DATAMONGO-551] - MongoTemplate.save(String, String) results in NPE (after upgrading to 1.1.0.RELEASE - -** Task - * [DATAMONGO-559] - Release 1.1.1.RELEASE - - -Changes in version 1.1.0.GA (2012-10-10) ----------------------------------------- -** Bug - * [DATAMONGO-523] - @TypeAlias annotation not used with AbstractMongoConfiguration - * [DATAMONGO-527] - Criteria.equals(…) broken for complex criterias - * [DATAMONGO-530] - MongoMappingContext.setApplicationContext(…) does not invoke superclass method - * [DATAMONGO-531] - StackOverflowError when persisting Groovy beans - * [DATAMONGO-532] - Multithreading authentication issue - * [DATAMONGO-533] - Default MongoPersistentEntityIndexCreator not registered if ApplicationContext already contains one for different MappingContext - * [DATAMONGO-535] - Retrieve of existing Mongo DB from Transaction is not working - * [DATAMONGO-539] - Document remove doesn't work when giving collection name as a parameter - -** Improvement - * [DATAMONGO-279] - Optimistic locking using @Version field - * [DATAMONGO-456] - XSD incorrectly states the default value for the mongo-ref attribute of the mongo:db-factory configuration element - * [DATAMONGO-457] - broken links "Spring Data MongoDB - Reference Documentation" - * [DATAMONGO-526] - Polish README.md - * [DATAMONGO-529] - Improve Querydsl setup - * [DATAMONGO-538] - Unify usage of Sort APIs in Query API - -** New Feature - * [DATAMONGO-389] - stable release spring-data-mongodb should work with stable spring spring-data-jpa - -** Task - * [DATAMONGO-484] - Migrate to latest MongoDB Java driver - * [DATAMONGO-528] - Document GridFS support - * [DATAMONGO-536] - Fix package cycle introduced by SerializationUtils - * [DATAMONGO-541] - Release 1.1 GA - * [DATAMONGO-543] - Polish reference documentation - * [DATAMONGO-548] - Upgrade to Querydsl 2.8.0 - - -Changes in version 1.1.0.RC1 (2012-24-08) ------------------------------------------ -** Bug - * [DATAMONGO-493] - Criteria.ne() method converts all value into ObjectId - * [DATAMONGO-494] - $or/$nor expressions do not consider entity class mapping - * [DATAMONGO-495] - JSON can't serialize Enum when printing Query in DEBUG message - * [DATAMONGO-497] - Reading an empty List throws a MappingInstantiationException because it returns an HashSet instead of returning an ArrayList - * [DATAMONGO-505] - Conversion of associations doesn't work for collection values - * [DATAMONGO-508] - DBRef can accidentally get added as PersistentProperty - * [DATAMONGO-517] - QueryMapping incorrectly translates complex keywords - -** Improvement - * [DATAMONGO-496] - AbstractMongoConfiguration.getMappingBasePackage() could default to config class' package - * [DATAMONGO-499] - Namespace XSDs of current release version should refer to repositories XSD in version 1.0 - * [DATAMONGO-500] - Index creation reacts on events not intended for it - * [DATAMONGO-502] - QueryMapper should transparently translate property names to field names - * [DATAMONGO-509] - SimpleMongoRepository.exists(…) can be improved. - * [DATAMONGO-510] - Criteria should only use BasicDBList internally - * [DATAMONGO-511] - QueryMapper should correctly transform associations - * [DATAMONGO-516] - Make Spring 3.1.2.RELEASE default Spring dependency version - -** Task - * [DATAMONGO-513] - Release 1.1 RC1 - - -Changes in version 1.0.4.RELEASE MongoDB (2012-08-24) ------------------------------------------------------ -** Bug - * [DATAMONGO-493] - Criteria.ne() method converts all value into ObjectId - * [DATAMONGO-494] - $or/$nor expressions do not consider entity class mapping - * [DATAMONGO-495] - JSON can't serialize Enum when printing Query in DEBUG message - -** Improvement - * [DATAMONGO-499] - Namespace XSDs of current release version should refer to repositories XSD in version 1.0 - -** Task - * [DATAMONGO-514] - Release 1.0.4. - -Changes in version 1.1.0.M2 (2012-24-07) ----------------------------------------- -** Bug - * [DATAMONGO-378] - MapReduceResults ClassCastException due to raw results counts as Long - * [DATAMONGO-424] - Declaring a list of DBRef in a domian class results in Null for each DBRef when reading from mongo database - * [DATAMONGO-425] - Binding a Date to a manually defined repository query fails - * [DATAMONGO-428] - ClassCastException when using outputDatabase option in map-reduce - * [DATAMONGO-446] - Pageable query methods returning List are broken - * [DATAMONGO-447] - Removal of Documents fails in in debug mode for Documents with complex ids - * [DATAMONGO-450] - enabling DEBUG causes RuntimeException - * [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable - * [DATAMONGO-458] - When reading back empty collections unmodifiable instances of Collections.emptyList/Set is returned. - * [DATAMONGO-462] - findAll() fails with NPE - discovering the root cause - * [DATAMONGO-465] - Mongo inserts document with "_id" as an integer but saves with "_id" as a string. - * [DATAMONGO-467] - String @id field is not mapped to ObjectId when using QueryDSL ".id" path - * [DATAMONGO-469] - Query creation from method names using AND criteria does not work anymore - * [DATAMONGO-474] - Wrong property is used for Id mapping - * [DATAMONGO-475] - 'group' operation fails where query references non primitive property - * [DATAMONGO-480] - The WriteResultChecking is not used in case of insert or save of documents. - * [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB - * [DATAMONGO-489] - ClassCastException when loading Map - -** Improvement - * [DATAMONGO-448] - Remove the need for Converters for complex classes that are used as IDs - * [DATAMONGO-455] - Document how to use raw queries using BasicQuery - * [DATAMONGO-460] - Improve Querydsl implementation internals - * [DATAMONGO-466] - QueryMapper shouldn't map id properties of nested classes - * [DATAMONGO-470] - Criteria and Query should have proper equals(…) and hashCode() method. - * [DATAMONGO-477] - Change upper bound of Google Guava package import to 13 - * [DATAMONGO-482] - typo in documentation - 2 i's in usiing - * [DATAMONGO-486] - Polish namspace implementation - * [DATAMONGO-491] - Release 1.1.0.M2 - -** New Feature - * [DATAMONGO-476] - JavaConfig support for Mongo repositories - -** Task - * [DATAMONGO-451] - Tweak pom.xml to let Sonar build run without Bundlor - * [DATAMONGO-490] - Fix minor typos - - -Changes in version 1.0.3.RELEASE (2012-24-07) ---------------------------------------------- -** Bug - * [DATAMONGO-467] - String @id field is not mapped to ObjectId when using QueryDSL ".id" path - * [DATAMONGO-469] - Query creation from method names using AND criteria does not work anymore - * [DATAMONGO-474] - Wrong property is used for Id mapping - * [DATAMONGO-475] - 'group' operation fails where query references non primitive property - * [DATAMONGO-480] - The WriteResultChecking is not used in case of insert or save of documents. - * [DATAMONGO-483] - @Indexed(unique=true, name="foo") puts name's value to the 'key' in the MongoDB - * [DATAMONGO-489] - ClassCastException when loading Map - -** Improvement - * [DATAMONGO-466] - QueryMapper shouldn't map id properties of nested classes - * [DATAMONGO-470] - Criteria and Query should have proper equals(…) and hashCode() method. - * [DATAMONGO-482] - typo in documentation - 2 i's in usiing - -** Task - * [DATAMONGO-492] - Release 1.0.3 - - -Changes in version 1.0.2.RELEASE (2012-06-20) ---------------------------------------------- -** Bug - * [DATAMONGO-360] - java.lang.ClassCastException when placing GeospatialIndex into IndexOperations and invoking IndexOperations.getIndexInfo() - * [DATAMONGO-366] - Chapter 3.2. points to wrong bugtracker - * [DATAMONGO-378] - MapReduceResults ClassCastException due to raw results counts as Long - * [DATAMONGO-382] - ClassCastException: "com.mongodb.BasicDBObject cannot be cast to com.mongodb.BasicDBList" during find() - * [DATAMONGO-411] - Potential ClassCastExceptions in MongoPersistentEntityIndexCreator - * [DATAMONGO-412] - getUserCredentials() is called twice in AbstractMongoConfiguration::mongoDbFactory() - * [DATAMONGO-413] - Using "Or" in repository query yields a ClassCastException - * [DATAMONGO-422] - UUIDToBinaryConverter not compatible with mongo java driver - * [DATAMONGO-423] - Criteria.regex should use java.util.Pattern instead of $regex - * [DATAMONGO-425] - Binding a Date to a manually defined repository query fails - * [DATAMONGO-428] - ClassCastException when using outputDatabase option in map-reduce - * [DATAMONGO-429] - using @Query annotation, arrays are translated somewhere between query creation and mongo interpretation - * [DATAMONGO-446] - Pageable query methods returning List are broken - * [DATAMONGO-447] - Removal of Documents fails in in debug mode for Documents with complex ids - * [DATAMONGO-450] - enabling DEBUG causes RuntimeException - * [DATAMONGO-454] - ServerAddressPropertyEditor fails if a hostname is unresolvable - * [DATAMONGO-461] - MappedConstructor potentially throws NullPointerException - * [DATAMONGO-462] - findAll() fails with NPE - discovering the root cause - -** Improvement - * [DATAMONGO-448] - Remove the need for Converters for complex classes that are used as IDs - * [DATAMONGO-455] - Document how to use raw queries using BasicQuery - -** Task - * [DATAMONGO-463] - Release 1.0.2 - - -Changes in version 1.1.0.M1 (2012-05-07) ----------------------------------------- - -** Bug - * [DATAMONGO-299] - Mongodb Query Does not allow for multiple query conditionals of the same time. - * [DATAMONGO-360] - java.lang.ClassCastException when placing GeospatialIndex into IndexOperations and invoking IndexOperations.getIndexInfo() - * [DATAMONGO-363] - Criteria.and() cannot be chained when using Criteria.gte and Criteria.lte - * [DATAMONGO-364] - Chaining of Criteria when including a GeoSpatial field is inconsistent. - * [DATAMONGO-366] - Chapter 3.2. points to wrong bugtracker - * [DATAMONGO-368] - Empty values in collections are not supported - * [DATAMONGO-369] - Wrong query created when one value is a DBObject - * [DATAMONGO-373] - QueryMapper is getting a ClassCasteException When trying to Convert an ArrayList to a BSONList - * [DATAMONGO-376] - Fix potential NPE in SpringDataMongodbSerializer - * [DATAMONGO-380] - maps with mongo reserved characters for keys generate on save: java.lang.IllegalArgumentException: fields stored in the db can't have . in them - * [DATAMONGO-387] - Executing query methods with GeoPage results doesn't work - * [DATAMONGO-401] - StringBasedMongoQuery suffers from NullPointerException in case a null parameter gets bound to a parameter placeholder - * [DATAMONGO-402] - Inner class not supported - * [DATAMONGO-403] - Conflicts between MongoDB and JPA - * [DATAMONGO-411] - Potential ClassCastExceptions in MongoPersistentEntityIndexCreator - * [DATAMONGO-412] - getUserCredentials() is called twice in AbstractMongoConfiguration::mongoDbFactory() - * [DATAMONGO-413] - Using "Or" in repository query yields a ClassCastException - * [DATAMONGO-423] - Criteria.regex should use java.util.Pattern instead of $regex - -** Defect - * [DATAMONGO-429] - using @Query annotation, arrays are translated somewhere between query creation and mongo interpretation - -** Improvement - * [DATAMONGO-347] - Repositories and DBRef - * [DATAMONGO-375] - Polish versions of referenced XSD schemas - * [DATAMONGO-379] - Exception when trying to instantiate an entity having a primitive constructor argument and no according document field - * [DATAMONGO-390] - Add Converter for UUID - * [DATAMONGO-391] - Move to SLF4J for logging - * [DATAMONGO-397] - MongoRepositoryFactoryBean should refer to MongoOperations instead of MongoTemplate - * [DATAMONGO-441] - Improve MongoDbUtils API - -** New Feature - * [DATAMONGO-6] - Integration with GridFS features - * [DATAMONGO-36] - Validation support to MongoTemplate - * [DATAMONGO-356] - Provide CDI integration - * [DATAMONGO-418] - Add support for newly introduced StartingWith, EndingWith and Containing keywords - * [DATAMONGO-427] - Support After and Before keywords for query creation - -** Refactoring - * [DATAMONGO-383] - Adapt new entity instantiation API from Spring Data Commons - * [DATAMONGO-431] - Adapt changes in CrudRepository - -** Task - * [DATAMONGO-443] - Upgrade to Querydsl 2.5.0 - * [DATAMONGO-394] - Upgrade to Querydsl 2.3.2 - * [DATAMONGO-396] - Release 1.1.0.M1. - * [DATAMONGO-432] - Upgrade to Spring Data Commons 1.3.0.RC1 - * [DATAMONGO-439] - Add performance tests - -Changes in version 1.0.1.RELEASE MongoDB (2012-02-11) ------------------------------------------------------ - -** Bug - * [DATAMONGO-363] - Criteria.and() cannot be chained when using Criteria.gte and Criteria.lte - * [DATAMONGO-364] - Chaining of Criteria when including a GeoSpatial field is inconsistent. - * [DATAMONGO-368] - Empty values in collections are not supported - * [DATAMONGO-369] - Wrong query created when one value is a DBObject - * [DATAMONGO-376] - Fix potential NPE in SpringDataMongodbSerializer - * [DATAMONGO-380] - maps with mongo reserved characters for keys generate on save: java.lang.IllegalArgumentException: fields stored in the db can't have . in them - * [DATAMONGO-387] - Executing query methods with GeoPage results doesn't work - * [DATAMONGO-401] - StringBasedMongoQuery suffers from NullPointerException in case a null parameter gets bound to a parameter placeholder - -** Improvement - * [DATAMONGO-375] - Polish versions of referenced XSD schemas - * [DATAMONGO-379] - Exception when trying to instantiate an entity having a primitive constructor argument and no according document field - * [DATAMONGO-390] - Add Converter for UUID - * [DATAMONGO-397] - MongoRepositoryFactoryBean should refer to MongoOperations instead of MongoTemplate - -** Task - * [DATAMONGO-395] - Release 1.0.1. - - -Changes in version 1.0.0.RELEASE MongoDB (2011-12-22) ------------------------------------------------------ - -** Bug - * [DATAMONGO-260] - MapReduce fails when using with Long as key-type. - * [DATAMONGO-319] - WriteConcern not parsed correctly in namespace handlers - * [DATAMONGO-336] - MongoDB GeoNear returning null pointer exception when giving data more precision than test data - * [DATAMONGO-343] - ServerAddressPropertyEditor disables default Spring conversion - * [DATAMONGO-346] - MongoTemplate.remove(Object arg) not working - * [DATAMONGO-349] - remove doesn't work in RC1 for mongo db - -** Improvement - * [DATAMONGO-139] - Startup behavior should be that MongoTemplate does not eagerly try to connect to MongoDB - * [DATAMONGO-296] - Add hook to use MongoConverter for Querydsl argument handling - * [DATAMONGO-326] - Enums can't be used in Criteria - * [DATAMONGO-341] - Tighten implementation of MongoTemplate's geoNear(...) methods - -** Task - * [DATAMONGO-81] - Create unit tests for exception translation in MongoTemplate - * [DATAMONGO-93] - Create integration tests for authentication - * [DATAMONGO-257] - Document TypeMapper abstraction to control how type information is stored and retrieved from documents - * [DATAMONGO-330] - Document classpath scanning for Converters - * [DATAMONGO-350] - Upgrade to latest Querydsl - * [DATAMONGO-355] - Upgrade to Spring 3.0.7 - * [DATAMONGO-357] - Release 1.0 GA - - -Changes in version 1.0.0.RC1 MongoDB (2011-12-6) ------------------------------------------------- - -** Bug - * [DATAMONGO-199] - Synchronisation during performance tests - * [DATAMONGO-298] - Spring custom converters do not work for subclasses of java.lang.Number - * [DATAMONGO-306] - NullPointerException if mongo factory created via URI with out credentials - * [DATAMONGO-309] - POJO containing a List of Maps not persisting properly - * [DATAMONGO-312] - Cannot retrieve persisted Enum implementing an abstract method - * [DATAMONGO-315] - MongoTemplate.findOne(query) methods ignore SortOrder on query - * [DATAMONGO-316] - Replica Set configuration via properties file throws ArrayIndexOutOfBoundsException - * [DATAMONGO-318] - Distinguishing write errors and writes with zero documents affected - * [DATAMONGO-321] - An ID field of type integer is always saved as zero if not set by the user before calling save. Throw exception to indicate an int field will not be autopopulated. - * [DATAMONGO-322] - Throw exception in a save operation if the POJO's ID field is null and field type is not String, BigInteger or ObjectId. - * [DATAMONGO-325] - MongoTemplate fails to correctly report a js file not found on classpath while calling mapReduce - * [DATAMONGO-328] - Fix the import statement in mongodb manifest - * [DATAMONGO-329] - Map value not converted correctly - * [DATAMONGO-333] - AbstractMongoEventListener throws NullPointerException if used without generic parameter - -** Improvement - * [DATAMONGO-26] - Investigate performance of POJO serialization. - * [DATAMONGO-174] - Add additional constructor to MongoTemplate that take com.mongodb.Mongo, database name, user credentials and MongoConverter. - * [DATAMONGO-208] - Add suppoprt for group() operation on collection in MongoOperations - * [DATAMONGO-213] - Provide additional options for setting WriteConcern on a per operation basis - * [DATAMONGO-234] - MongoTemplate should support the findAndModify operation to update version fields - * [DATAMONGO-292] - Several mongo for different database names - * [DATAMONGO-301] - Allow converters to be included through scanning - * [DATAMONGO-305] - Remove synchronized(this) from sort() and fields() methods in the Query class - * [DATAMONGO-310] - Allow Collections as parameters in @Query - * [DATAMONGO-320] - Remove use of slaveOk boolean option in MongoTemplate as it is deprecated. Replace with ReadPreference - * [DATAMONGO-323] - Using @Query and a Sort parameter on the same method should produce sorted results - * [DATAMONGO-324] - Support for JSON in mongo template - * [DATAMONGO-337] - The "nin" and "all" methods on Criteria should take a collection like the "in" method. - * [DATAMONGO-338] - Add query derivation implementations for newly introduced Regex, Exists, True and False keywords - -** New Feature - * [DATAMONGO-185] - Add hint to Query - * [DATAMONGO-251] - Support geting index information on a collection or mapped class. - * [DATAMONGO-308] - Add support for upsert methods - -** Refactoring - * [DATAMONGO-304] - Change package name for Class MongoLog4jAppender - * [DATAMONGO-313] - Use MongoOperations interface instead of MongoTemplate class - -** Task - * [DATAMONGO-195] - Add description of @Field mapping annotation to reference docs - * [DATAMONGO-262] - Ensure Cloud Foundry Runtime works with RC1 - * [DATAMONGO-263] - Ensure Cloud Foundry Examples work with RC1 - * [DATAMONGO-311] - Update MongoDB driver to v 2.7.x - * [DATAMONGO-332] - Update reference documentation to list correct necessary dependencies - * [DATAMONGO-334] - Use repository URLs pointing to Artifactory - * [DATAMONGO-335] - Create hybrid Spring 3.0.6 / 3.1 build - - -Changes in version 1.0.0.M5 MongoDB (2011-10-24) ------------------------------------------------- - -** Bug - * [DATAMONGO-259] - Maps inside collections are not written correctly - * [DATAMONGO-268] - CustomConversions is too liberal in registering "simple types" (asymmetric conversion) - * [DATAMONGO-269] - XML configuration for replica sets is not working - * [DATAMONGO-275] - DBRef fields and collections are returning nulls - * [DATAMONGO-281] - Improve the to handle blank username and password when using property placholders like ${mongo.username} - * [DATAMONGO-282] - Cannot create a "range" query - * [DATAMONGO-284] - Execution of Querydsl query maps id incorrectly - * [DATAMONGO-285] - NPE in MappingMongoConverter.writeMapInternal when saving a Map with val instance of Collection - * [DATAMONGO-288] - querying same property multiple times produces incorrect query - * [DATAMONGO-289] - AbstractMongoEventListener will never call onAfterLoad - * [DATAMONGO-294] - List elements nested in Map lose their type when persisted - -** Improvement - * [DATAMONGO-65] - Allow Spring EL usage in collection name attribute of @Document - * [DATAMONGO-183] - Query count() support for pagination - * [DATAMONGO-258] - M4 documentation states SD Commons 1.1.0.M1 required but actually needs 1.2.0.M1 - * [DATAMONGO-261] - Reference documentation for geoNear queries has no stable section id - * [DATAMONGO-270] - Approach Sonar results to improve code quality - * [DATAMONGO-271] - Remove 'document' from cross-store package names - * [DATAMONGO-272] - Namespace configuration file still resides in 'document' package - * [DATAMONGO-276] - QueryUtils should be public - * [DATAMONGO-280] - Add maxAutoConnectRetryTime for and MongoOptionsFactoryBean - * [DATAMONGO-283] - $and support - * [DATAMONGO-286] - MongoDB Repository no query methods for $lte and $gte - * [DATAMONGO-291] - Path expressions in repository methods should honour mapping metadata - * [DATAMONGO-293] - Add support for new polygon based within search in Mongo 2.0. - * [DATAMONGO-295] - Allow MongoTemplate to be configured using MongoURI - * [DATAMONGO-300] - Re-work the Query/Criteria to better support $and, $or and $nor queries - * [DATAMONGO-302] - Consistently handle null values given to CrudRepository implementation - -** New Feature - * [DATAMONGO-230] - MongoTemplate missing method remove(Object object, String collectionName) - -** Refactoring - * [DATAMONGO-274] - Split up repository package according to the structure in Spring Data JPA - -** Task - * [DATAMONGO-264] - Ensure Data Document examples work - * [DATAMONGO-265] - Create new github repository for mongodb - * [DATAMONGO-266] - Create new github repository for CouchDB - * [DATAMONGO-297] - Prune project directory - * [DATAMONGO-303] - Update to QueryDsl 2.2.4 - - -Changes in version 1.0.0.M4 MongoDB (2011-09-01) ------------------------------------------------- - -** Bug - * [DATADOC-134] - MongoDB: No exception when saving duplicate value to an attribute annotated with @Indexed(unique=true) - * [DATADOC-162] - Exception thrown on toString of Point class - * [DATADOC-167] - @Document annotation is not inherited - * [DATADOC-168] - Registering a custom converter from String to UUID causes all Strings to be converted to UUIDs - * [DATADOC-172] - Unable to force property order when saving document - * [DATADOC-176] - @DBRef annotation only supports ids of type ObjectId - * [DATADOC-177] - Sorting on multiple fields does not maintain order - * [DATADOC-181] - MongoFactoryBean does not call Mongo.close() on shutdown - * [DATADOC-190] - SimpleMongoRepository.exists(…) returns false for existing entities with non-ObjectId id - * [DATADOC-192] - MappingMongoConverter does not read empty Sets correctly - * [DATADOC-199] - Synchronisation during performance tests - * [DATADOC-207] - MappingMongoConverter fails when reading empty java.util.SortedMaps - * [DATADOC-209] - Collections of enums not handled correctly - * [DATADOC-210] - spring-data-mongodb requires Java 1.6, should require 1.5 - * [DATADOC-212] - NPE during MongoTemplate.update() if no ID field is defined (via field name or annotation) - * [DATADOC-217] - Set cannot be used as a collection in a Document - * [DATADOC-218] - Adding of custom simple types is not easy - * [DATADOC-221] - BigDecimal values not read correctly in maps - * [DATADOC-224] - MappingMongoConverter does not inspect value type for Object properties - * [DATADOC-228] - NullPointerException when persiting Map with null values - * [DATADOC-229] - When a parameterized List is used in the PersistentConstructor, conversion fail - * [DATADOC-231] - spring-data-mongodb does not work in an OSGi server because of unresolved dependencies - * [DATADOC-232] - mongodb allow to $inc many fields in one query, but Updat().inc(firs).inc(last) do only last inc - * [DATADOC-235] - Unable to map unstructured data - * [DATADOC-236] - Repository queries do not honour order defined in method name - * [DATADOC-237] - @Indexed annotation doesn't honor field name from @Field annotation - * [DATADOC-240] - Update with id key is not working - * [DATADOC-243] - mongo:mapping-converter schema does not allow db-factory-ref - * [DATADOC-246] - Stack overflow when Update.pushAll(push) - * [DATADOC-247] - QueryMapper does not handle BigInteger ids correctly - * [DATADOC-248] - MongoDB Query and Collection mapping - * [DATADOC-249] - ConcurrentModificationException when calling MongoTemplate.updateFirst - * [DATADOC-254] - SimpleMongoDbFactory should handle dots in database names correctly - -** Improvement - * [DATADOC-32] - SimpleMongoConverter could support identifying Spring EL expressions in keys - * [DATADOC-63] - Converters to support use of a 'typeId' strategy to determine class to marshall/unmarshal from Mongo - * [DATADOC-166] - Check for null if various template CRUD methods - * [DATADOC-169] - Registering custom converters for a type requires treating the type as simple in mapping context - * [DATADOC-171] - IllegalArgumentException when persisting entity with BigDecimal field - * [DATADOC-178] - System.out.println in the in method of Criteria. Line 179. - * [DATADOC-188] - Allow means to disable repository infrastructure creating indexes - * [DATADOC-189] - Improve extensibility of MongoRepositoryFactoryBean - * [DATADOC-215] - Allow configuring WriteConcern via MongoFactoryBean and thus the namespace - * [DATADOC-223] - Registering of customSimpleTypes should be available through the mongo namespace of spring-data-mongodb - * [DATADOC-225] - BasicMongoPersistentEntity shouldn't reject root entities without an id property - * [DATADOC-241] - Allow Map conversion behavior overriding - * [DATADOC-255] - Add to MongoOperations and executeCommand with an additional integer options argument - * [DATADOC-256] - Update to use MongoDB driver version 2.6.5 - -** New Feature - * [DATADOC-7] - Support for map-reduce operations in MongoTemplate - * [DATADOC-64] - Allow defining the collections a query is ran against on finder methods - * [DATADOC-68] - Support for geoNear command - * [DATADOC-87] - Provide @GeoSpatialIndexed annotation that mirrors GeoSpatialIndex class for use in mapping. - * [DATADOC-100] - Provide means to externalize manually defined queries - * [DATADOC-202] - Add a 'DocumentCallbackHandler' so that a callback can process each DBObject returned from a query - * [DATADOC-216] - Allow configuring a WriteConcern on SimpleMongoDbFactory for DB wide WriteConcern - * [DATADOC-226] - Add QuerydslRepositorySupport helper class similar to the one in Spring Data JPA - -** Refactoring - * [DATADOC-170] - Review listener design - * [DATADOC-191] - Remove 'document' from package names - * [DATADOC-214] - MongoConverter refactorings - -** Task - * [DATADOC-91] - Add more TestCases for the basic functionality - * [DATADOC-152] - Investigate failing of test for repository.findbyLocationWithinBox - * [DATADOC-175] - Review DSM matrix and remove package cycles if found. - * [DATADOC-194] - Remove use of Class.forName to support use in an OSGi environment - * [DATADOC-206] - Upgrade to Querydsl 2.2.0 - * [DATADOC-253] - Upgrade to Spring 3.0.6 - - -Changes in version 1.0.0.M3 MongoDB (2011-06-02) ------------------------------------------------- - -General -* [DATADOC-92] - Exception translation to catch RuntimeException instead of MongoException -* [DATADOC-111] - Ensure all MongoTemplate methods execute within the context of a callback method. -* [DATADOC-120] - Remove MongoReaderWriter -* [DATADOC-160] - Review MongoOperations and make the parameter ordering more consistent across methods - -Core Data Access -* [DATADOC-80] - Rename addConverters to setCustomConverters in MappingMongoConverter and SimpleMongoConverter -* [DATADOC-89] - Support setting slaveOk in MongoTemplate per query -* [DATADOC-108] - Add findById method to MongoTemplate. -* [DATADOC-112] - Storing a DBRef uses the wrong collection name -* [DATADOC-116] - Nesting DBRefs doesn't work > 1 layers -* [DATADOC-117] - Remove the default collection name on the MongoTemplate -* [DATADOC-118] - Remove MongoTemplate methods that take a Reader/Writer parameter -* [DATADOC-121] - Deprecate SimpleMongoConverter -* [DATADOC-124] - Add method to MappingContext to get the collection name used for a specific type -* [DATADOC-127] - @Document(collection="SOME_COLLECTION_NAME") doesn't take effect -* [DATADOC-141] - Provide a prepareCollection protected method in MongoTemplate to allow customization of behavior such as slaveOk or writeConcern via a subclass -* [DATADOC-142] - Change constructors in MongoTemplate that take Mongo object -* [DATADOC-143] - MappingMongoConverter should be MongoTemplate's default converter -* [DATADOC-149] - Remove setter for MongoDbFactory from MappingMongoConverter -* [DATADOC-157] - MongoTemplate updateFirst/updateMulti methods to take java.lang.Class parameter as last in method param list to be consistent with other usage -* [DATADOC-158] - Change default conventions of java.lang.Class to collection name in MappingMongoConverter to be first letter lower case camel casing. - -Configuration -* [DATADOC-42] - Provide option for configuring replica sets using the Mongo namespace -* [DATADOC-88] - Create MongoDbFactory to consolidate DB, Server location, and user credentials into one location -* [DATADOC-119] - Modify XML schema and @Configuration base class to make it easy to register custom Spring converters with the mapper. -* [DATADOC-133] - Support Property Placeholder as MongoDB Port Number in Application Context Configuration -* [DATADOC-135] - should use - instead of camel case to be consistent with other attribute names -* [DATADOC-138] - Expose all properties of the MongoOptions class in Mongo namespace - -Querying / Updating -* [DATADOC-43] - Query creator should support Near and Within keyword -* [DATADOC-96] - Query#or(Query) does not work -* [DATADOC-102] - Update does not let you $set across multiple fields -* [DATADOC-106] - Add additional mongo operators to Criteria class -* [DATADOC-107] - Criteria "in" operator should correctly handle passing in a collection instead of an array. -* [DATADOC-113] - NotNull/IsNotNull not implemented in MongoQueryCreator -* [DATADOC-146] - Advanced Regexp Queries - -Mapping -* [DATADOC-95] - Can not save an object that has not had any of its properties set -* [DATADOC-97] - ID replacement not working correctly when using updateFirst/updateMulti -* [DATADOC-98] - Collection or Object[][] doesn't save correctly -* [DATADOC-109] - Add MappingContext to MongoConverter interface -* [DATADOC-110] - Improve implementation of substituteMappedIdIfNecessary -* [DATADOC-101] - Explicit Converters only registered one way. -* [DATADOC-114] - UpdateFirst/Multi operations on MongoTemplate not properly using converter to store objects. -* [DATADOC-122] - Use same default collection name for MappingMongoConverter and SimpleMongoConverter -* [DATADOC-123] - Use the same id/_id mapping for MappingMongoConverter and SimpleMongoConverter -* [DATADOC-128] - Support inheritance with Document mappings -* [DATADOC-130] - Problem with Converters (java.util.Locale) -* [DATADOC-144] - Add an @Key annotation to allow defining the key a property is stored under -* [DATADOC-145] - MappingMongoConverter does not convert objects for collections of interfaces -* [DATADOC-155] - Need to support plain POJOs with non-ObjectId-compatible ID properties -* [DATADOC-156] - MongoOperations.find(query(where("id").in(ids)) fails where ids aren't ObjectIds -* [DATADOC-159] - Saving the same Entity multiple times creates multiple entries in the database -* [DATADOC-161] - MappingMongoConverter now supports nested Maps - -Repository -* [DATADOC-115] - Upgrade to QueryDsl 2.2.0-beta4 -* [DATADOC-137] - Parameter values in MongoDB JSON Query are not being replaced properly - -Documentation -* [DATADOC-99] - Reference documentation shows invalid field spec for @Query usage with repositories - - -Changes in version 1.0.0.M2 MongoDB (2011-04-09) ------------------------------------------------- - -General -* Spring configuration support using Java based @Configuration classes - -Core Data Access -* Persistence and mapping lifecycle events -* GeoSpatial integration -* [DATADOC-76] - Add support for findAndRemove to MongoTemplate/MongoOperations -* [DATADOC-5] - Provide detailed mapping of Mongo errors onto Spring DAO exception -* [DATADOC-51] - Fixed issue with exceptions thrown when authenticating multiple times for sam eDB instance - -Querying -* [DATADOC-72] - Add support for Mongo's $elemMatch and chained Criteria -* [DATADOC-77] - Rename "and" method in Query to "addCritera" -* [DATADOC-67] - Criteria API to support keywords for geo search - -Mapping -* Feature Rich Object Mapping integrated with Spring's Conversion Service -* Annotation based mapping metadata but extensible to support other metadata formats -* [DATADOC-60] - Add namespace support to setup a MappingMongoConverter -* [DATADOC-33] - Introduce annotation to demarcate id field in a domain object - -Repository -* [DATADOC-47, DATACMNS-17] - Adapted new metamodel API -* [DATADOC-46] - Added support for 'In' and 'NotIn' keyword -* [DATADOC-49] - Fixed 'And' and 'Or' keywords -* [DATADOC-41] - Added support for executing QueryDsl predicates -* [DATADOC-69] - Let repository namespace pickup the default mapping context bean and allow configuration -* [DATADOC-24] - Allow use of @Query annotation to define queries -* [DATADOC-34] - Create indexes for columns that are mentioned in query methods - -Cross-Store -* [DATADOC-48] - Cross-store persistance - support for JPA Entities with fields transparently persisted/retrieved using MongoDB - -Logging -* [DATADOC-66] - Log4j log appender - -Changes in version 1.0.0.M1 MongoDB (2011-02-14) ------------------------------------------------- - -General -* BeanFactory for basic configuration of Mongo environment -* Namespace for basic configuration of Mongo environment - -Core Data Access -* Introduce MongoTemplate implementation with methods defined in MongoOperations interface -* MongoTemplate support for insert, find, save, update, remove -* MongoTemplate support for basic POJO serialization based on bean properties -* Allow MongoTemplate methods to use a default collection name -* Exception translation in MongoTemplate to Spring's DAO exception hierarchy -* Support for update modifiers to allow for partial document updates -* Expose WriteConcern settings on MongoTemplate used for any write operations -* Support in MongoTemplate for enabling either logging or throwing exceptions based on value of WriteResult return value. - -Repository -* Introducing generic repository implementation for MongoDB -* Automatic implementation of interface query method names on repositories. -* Namespace support for Mongo repositories -* Allow usage of pagination and sorting with repositories - diff --git a/src/main/resources/license.txt b/src/main/resources/license.txt index 7584e2dfe2..964a55d1c3 100644 --- a/src/main/resources/license.txt +++ b/src/main/resources/license.txt @@ -1,6 +1,6 @@ Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +192,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -207,7 +207,7 @@ similar licenses that require the source code and/or modifications to source code to be made available (as would be noted above), you may obtain a copy of the source code corresponding to the binaries for such open source components and modifications thereto, if any, (the "Source Files"), by -downloading the Source Files from http://www.springsource.org/download, +downloading the Source Files from https://www.springsource.org/download, or by sending a request, with your name and address to: VMware, Inc., 3401 Hillview Avenue, Palo Alto, CA 94304, United States of America or email info@vmware.com. All such requests should clearly specify: OPEN SOURCE FILES REQUEST, Attention General diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index e1387879c2..52ee00c4f5 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,5 +1,5 @@ -Spring Data MongoDB 2.1 M1 -Copyright (c) [2010-2015] Pivotal Software, Inc. +Spring Data MongoDB 4.5 RC1 (2025.0.0) +Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). You may not use this product except in compliance with the License. @@ -8,3 +8,57 @@ This product may include a number of subcomponents with separate copyright notices and license terms. Your use of the source code for the these subcomponents is subject to the terms and conditions of the subcomponent's license, as noted in the LICENSE file. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +